hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
93e86a92bec79c9009c6adff93a1d91def7df978 | 330 | ex | Elixir | lib/encryption/encrypted_field.ex | danielberkompas/phoenix_ecto_encryption_sample | 1fd0f337eb214cb6a5b7348a70ad27e50968d851 | [
"MIT"
] | 4 | 2015-08-11T04:01:14.000Z | 2019-09-17T04:47:02.000Z | lib/encryption/encrypted_field.ex | danielberkompas/phoenix_ecto_encryption_sample | 1fd0f337eb214cb6a5b7348a70ad27e50968d851 | [
"MIT"
] | null | null | null | lib/encryption/encrypted_field.ex | danielberkompas/phoenix_ecto_encryption_sample | 1fd0f337eb214cb6a5b7348a70ad27e50968d851 | [
"MIT"
] | null | null | null | defmodule Encryption.EncryptedField do
alias Encryption.AES
@behaviour Ecto.Type
def type, do: :binary
def cast(value) do
{:ok, to_string(value)}
end
def dump(value) do
ciphertext = value |> to_string |> AES.encrypt
{:ok, ciphertext}
end
def load(value) do
{:ok, AES.decrypt(value)}
end
end
| 15.714286 | 50 | 0.660606 |
93e8726d0bdc1d3a8e3573e292459a7584417e5a | 448 | ex | Elixir | lib/chess/auth/error_handler.ex | danbee/chess | c766ecb63ed15cd4a5c7ce4b503641d0222fb69d | [
"MIT"
] | 30 | 2018-06-16T00:41:59.000Z | 2021-03-05T08:39:02.000Z | lib/chess/auth/error_handler.ex | danbee/chess | c766ecb63ed15cd4a5c7ce4b503641d0222fb69d | [
"MIT"
] | 221 | 2018-01-15T20:33:13.000Z | 2022-02-16T00:53:35.000Z | lib/chess/auth/error_handler.ex | danbee/chess | c766ecb63ed15cd4a5c7ce4b503641d0222fb69d | [
"MIT"
] | 5 | 2018-02-20T17:25:19.000Z | 2018-12-29T00:09:30.000Z | defmodule Chess.Auth.ErrorHandler do
@moduledoc false
use ChessWeb, :controller
import Plug.Conn
def auth_error(conn, {_type, _reason}, _opts) do
case get_format(conn) do
"html" ->
conn
|> put_flash(:info, "You must be logged in")
|> redirect(to: "/")
|> halt()
"json" ->
conn
|> put_status(403)
|> json(%{status: 403, message: "Not authorized"})
end
end
end
| 20.363636 | 58 | 0.5625 |
93e88572f51370be0cb273657d55705e028460a6 | 2,033 | exs | Elixir | apps/mishka_html/config/prod.exs | mojtaba-naserei/mishka-cms | 1f31f61347bab1aae6ba0d47c5515a61815db6c9 | [
"Apache-2.0"
] | 35 | 2021-06-26T09:05:50.000Z | 2022-03-30T15:41:22.000Z | apps/mishka_html/config/prod.exs | iArazar/mishka-cms | 8b579101d607d91e80834527c1508fe5f4ceefef | [
"Apache-2.0"
] | 101 | 2021-01-01T09:54:07.000Z | 2022-03-28T10:02:24.000Z | apps/mishka_html/config/prod.exs | iArazar/mishka-cms | 8b579101d607d91e80834527c1508fe5f4ceefef | [
"Apache-2.0"
] | 8 | 2021-01-17T17:08:07.000Z | 2022-03-11T16:12:06.000Z | use Mix.Config
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
config :mishka_html, MishkaHtmlWeb.Endpoint,
url: [host: "example.com", port: 80],
cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :mishka_html, MishkaHtmlWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH"),
# transport_options: [socket_opts: [:inet6]]
# ]
#
# The `cipher_suite` is set to `:strong` to support only the
# latest and more secure SSL ciphers. This means old browsers
# and clients may not be supported. You can set it to
# `:compatible` for wider support.
#
# `:keyfile` and `:certfile` expect an absolute path to the key
# and cert in disk or a relative path inside priv, for example
# "priv/ssl/server.key". For all supported SSL configuration
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
#
# We also recommend setting `force_ssl` in your endpoint, ensuring
# no data is ever sent via http, always redirecting to https:
#
# config :mishka_html, MishkaHtmlWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# Finally import the config/prod.secret.exs which loads secrets
# and configuration from environment variables.
import_config "prod.secret.exs"
| 36.303571 | 66 | 0.717659 |
93e898475055ca0bc57f76eb603d854df28494cb | 1,449 | exs | Elixir | examples/mandelbrot.exs | xavier/ex_png | 58a23a705ace85e2351cbf0787e88df79b8f6494 | [
"Apache-2.0"
] | 10 | 2015-03-15T16:12:00.000Z | 2021-02-09T09:30:17.000Z | examples/mandelbrot.exs | xavier/ex_png | 58a23a705ace85e2351cbf0787e88df79b8f6494 | [
"Apache-2.0"
] | 1 | 2019-04-20T12:09:24.000Z | 2019-04-20T12:09:24.000Z | examples/mandelbrot.exs | xavier/ex_png | 58a23a705ace85e2351cbf0787e88df79b8f6494 | [
"Apache-2.0"
] | 5 | 2019-01-19T19:47:09.000Z | 2021-11-11T23:54:25.000Z | #
# Run as:
#
# mix run examples/mandelbrot.exs
#
defmodule Mandelbrot do
require ExPNG.Color, as: Color
require Bitwise
@max_iterations 1000
def draw(image) do
scale_point = scaling_function(-2..2, -1.5..1.5, image.width, image.height)
pixels = for y <- 0..(image.height-1), x <- 0..(image.width-1), into: <<>> do
{sx, sy} = scale_point.(x, y)
iteration = mandelbrot(sx, sy)
tint = 255-round(iteration / @max_iterations * 255)
if tint > 250 do
r = Bitwise.band(Bitwise.bxor(x, y), 0xff)
g = 0
b = Bitwise.band(Bitwise.bor(div(x, 2), div(y, 2)), 0xff)
Color.rgb(r, g, b)
else
Color.grayscale(tint)
end
end
%{image | pixels: pixels}
end
defp scaling_function(x1..x2, y1..y2, width, height) do
x_scale = abs(x1 - x2) / width
y_scale = abs(y1 - y2) / height
fn (x, y) ->
{x1 + (((2*x)-1)*0.5)*x_scale, y1 + (((2*y)-1)*0.5)*y_scale}
end
end
defp mandelbrot(x, y) do
mandelbrot(x, y, 0, 0, 0)
end
defp mandelbrot(x0, y0, x, y, iter)
when iter < @max_iterations and (x*x + y*y < 4), do:
mandelbrot(x0, y0, x*x - y*y + x0, 2 *x*y + y0, iter + 1)
defp mandelbrot(_, _, _, _, iter), do: iter
end
width = 1024
height = 768
path = Path.join([File.cwd!, "examples", "output", "mandelbrot.png"])
IO.puts "Writing to #{path}"
ExPNG.image(width, height)
|> Mandelbrot.draw
|> ExPNG.write(path)
| 22.292308 | 81 | 0.57764 |
93e8b3cbdaae59d5ce1284f3ce1ca956c8e0aa82 | 9,828 | exs | Elixir | test/ex_saga/stage_test.exs | naramore/ex_saga | 66c6b64867f28a1bbfb8ec2b6a786469b5f84e47 | [
"MIT"
] | null | null | null | test/ex_saga/stage_test.exs | naramore/ex_saga | 66c6b64867f28a1bbfb8ec2b6a786469b5f84e47 | [
"MIT"
] | 17 | 2019-02-06T03:51:03.000Z | 2019-10-22T10:15:48.000Z | test/ex_saga/stage_test.exs | naramore/ex_saga | 66c6b64867f28a1bbfb8ec2b6a786469b5f84e47 | [
"MIT"
] | null | null | null | defmodule ExSaga.StageTest do
@moduledoc false
use ExUnit.Case, async: true
use ExUnitProperties
import ExUnit.CaptureLog
doctest ExSaga.Stage
alias ExSaga.Generators, as: Gen
alias ExSaga.{Event, Stage, Step, Stepable, TestStage}
describe "ExSaga.Stage.Stepable.step_from/3" do
property "should return [:starting, :transaction] event given {:ok, effects}" do
check all stage <- Gen.stage(length: 0..3),
effects_so_far <- Gen.effects(length: 0..3),
opts <- Gen.stepable_opts(length: 0..3) do
result = Stepable.step_from(stage, {:ok, effects_so_far}, opts)
assert match?({:continue, %Event{name: [:starting, :transaction]}, %Stage{}}, result)
end
end
property "should return [:starting, :compensation] event given {:error, reason, effects}" do
check all stage <- Gen.stage(length: 0..3),
effects_so_far <- Gen.effects(length: 0..3),
reason <- Gen.reason(),
opts <- Gen.stepable_opts(length: 0..3) do
result = Stepable.step_from(stage, {:error, reason, effects_so_far}, opts)
assert match?({:continue, %Event{name: [:starting, :compensation]}, %Stage{}}, result)
end
end
end
describe "ExSaga.Stage.Stepable.step/3" do
@tag :skip
property "should return hook or next event given hook event" do
flunk("not implemented yet...")
end
@tag :skip
property "should return ??? given error handler event" do
flunk("not implemented yet...")
end
@tag :skip
property "should return ??? given [:starting, :transaction] event" do
flunk("not implemented yet...")
end
@tag :skip
property "should return ??? given [:completed, :transaction] event" do
flunk("not implemented yet...")
end
@tag :skip
property "should return ??? given [:starting, :compensation] event" do
flunk("not implemented yet...")
end
@tag :skip
property "should return ??? given [:completed, :compensation] event" do
flunk("not implemented yet...")
end
property "should return valid output for valid input" do
check all stage <- Gen.stage(length: 0..3),
event <- Gen.event(),
opts <- Gen.stepable_opts(length: 0..3) do
result = Stepable.step(stage, event, opts)
assert match?({:ok, %{}}, result) or match?({:error, _, %{}}, result) or match?({:continue, nil, %{}}, result) or
match?({:continue, %Event{}, %{}}, result)
end
end
end
describe "ExSaga.Step.mstep_from/3" do
property "should return valid result given %ExSaga.Stage{}" do
check all result <-
one_of([
tuple({constant(:ok), TestStage.test_effects()}),
tuple({member_of([:error, :abort]), Gen.reason(), TestStage.test_effects()})
]),
max_runs: 1000 do
capture_log(fn ->
{mstep_result, events} = Step.mstep_from(TestStage, result, [])
assert Enum.all?(events, fn e -> match?(%Event{}, e) end)
assert match?({:ok, %{}}, mstep_result) or
match?({status, _, %{}} when status in [:error, :abort], mstep_result)
end)
end
end
test "should successfully return when there are no problems" do
capture_log(fn ->
{mstep_result, events} = Step.mstep_from(TestStage, {:ok, %{}}, [])
assert match?({:ok, %{ExSaga.TestStage => :success!}}, mstep_result)
assert Enum.count(events) == 6
assert match?(
[
%Event{name: [:starting, :transaction]},
%Event{name: [:skipped, :hook, :log_compensation]},
%Event{name: [:completed, :hook, :log_event]},
%Event{name: [:completed, :transaction]},
%Event{name: [:skipped, :hook, :log_compensation]},
%Event{name: [:completed, :hook, :log_event]}
],
events
)
end)
end
test "should return failure on the raising of an error" do
capture_log(fn ->
{mstep_result, events} =
Step.mstep_from(TestStage, {:ok, %{txn: %{TestStage => {:raise, %ArgumentError{}}}}}, [])
assert match?({:error, _, %{}}, mstep_result)
assert Enum.count(events) == 12
assert match?(
[
%Event{name: [:starting, :transaction]},
%Event{name: [:skipped, :hook, :log_compensation]},
%Event{name: [:completed, :hook, :log_event]},
%Event{name: [:completed, :transaction]},
%Event{name: [:skipped, :hook, :log_compensation]},
%Event{name: [:completed, :hook, :log_event]},
%Event{name: [:starting, :compensation]},
%Event{name: [:completed, :hook, :log_compensation]},
%Event{name: [:completed, :hook, :log_event]},
%Event{name: [:completed, :compensation]},
%Event{name: [:completed, :hook, :log_compensation]},
%Event{name: [:completed, :hook, :log_event]}
],
events
)
end)
end
test "should return failure after retry" do
capture_log(fn ->
{mstep_result, events} =
Step.mstep_from(
TestStage,
{:ok, %{txn: %{TestStage => {:raise, %ArgumentError{}}}, cmp: %{TestStage => :retry}}},
[]
)
assert match?({:error, _, %{}}, mstep_result)
assert Enum.count(events) == 60
assert match?(
[
%Event{name: [:starting, :transaction]},
%Event{name: [:skipped, :hook, :log_compensation]},
%Event{name: [:completed, :hook, :log_event]},
%Event{name: [:completed, :transaction]},
%Event{name: [:skipped, :hook, :log_compensation]},
%Event{name: [:completed, :hook, :log_event]},
%Event{name: [:starting, :compensation]},
%Event{name: [:completed, :hook, :log_compensation]},
%Event{name: [:completed, :hook, :log_event]},
%Event{name: [:completed, :compensation]},
%Event{name: [:completed, :hook, :log_compensation]},
%Event{name: [:completed, :hook, :log_event]},
%Event{name: [:starting, :retry, :init]},
%Event{name: [:skipped, :hook, :log_compensation]},
%Event{name: [:completed, :hook, :log_event]},
%Event{name: [:completed, :retry, :init]},
%Event{name: [:skipped, :hook, :log_compensation]},
%Event{name: [:completed, :hook, :log_event]},
%Event{name: [:starting, :retry, :handler]},
%Event{name: [:skipped, :hook, :log_compensation]},
%Event{name: [:completed, :hook, :log_event]},
%Event{name: [:completed, :retry, :handler]},
%Event{name: [:skipped, :hook, :log_compensation]},
%Event{name: [:completed, :hook, :log_event]},
%Event{name: [:starting, :transaction]},
%Event{name: [:skipped, :hook, :log_compensation]},
%Event{name: [:completed, :hook, :log_event]},
%Event{name: [:completed, :transaction]},
%Event{name: [:skipped, :hook, :log_compensation]},
%Event{name: [:completed, :hook, :log_event]},
%Event{name: [:starting, :compensation]},
%Event{name: [:completed, :hook, :log_compensation]},
%Event{name: [:completed, :hook, :log_event]},
%Event{name: [:completed, :compensation]},
%Event{name: [:completed, :hook, :log_compensation]},
%Event{name: [:completed, :hook, :log_event]},
%Event{name: [:starting, :retry, :handler]},
%Event{name: [:skipped, :hook, :log_compensation]},
%Event{name: [:completed, :hook, :log_event]},
%Event{name: [:completed, :retry, :handler]},
%Event{name: [:skipped, :hook, :log_compensation]},
%Event{name: [:completed, :hook, :log_event]},
%Event{name: [:starting, :transaction]},
%Event{name: [:skipped, :hook, :log_compensation]},
%Event{name: [:completed, :hook, :log_event]},
%Event{name: [:completed, :transaction]},
%Event{name: [:skipped, :hook, :log_compensation]},
%Event{name: [:completed, :hook, :log_event]},
%Event{name: [:starting, :compensation]},
%Event{name: [:completed, :hook, :log_compensation]},
%Event{name: [:completed, :hook, :log_event]},
%Event{name: [:completed, :compensation]},
%Event{name: [:completed, :hook, :log_compensation]},
%Event{name: [:completed, :hook, :log_event]},
%Event{name: [:starting, :retry, :handler]},
%Event{name: [:skipped, :hook, :log_compensation]},
%Event{name: [:completed, :hook, :log_event]},
%Event{name: [:completed, :retry, :handler]},
%Event{name: [:skipped, :hook, :log_compensation]},
%Event{name: [:completed, :hook, :log_event]}
],
events
)
end)
end
end
end
| 44.27027 | 121 | 0.517399 |
93e8efee99bfd77a71dcd3186bf0439e4dc9b455 | 959 | ex | Elixir | lib/iot/event.ex | enterprizing/iot | 1c8d71b2f779fabdad2a33b3ce3133ec2799eb9c | [
"0BSD"
] | 2 | 2019-07-27T13:29:35.000Z | 2019-07-28T08:56:46.000Z | lib/iot/event.ex | erpuno/iot | 1c8d71b2f779fabdad2a33b3ce3133ec2799eb9c | [
"0BSD"
] | 1 | 2019-07-29T22:37:22.000Z | 2019-07-29T22:37:22.000Z | lib/iot/event.ex | enterprizing/iot | 1c8d71b2f779fabdad2a33b3ce3133ec2799eb9c | [
"0BSD"
] | null | null | null | defmodule IOT.Rows.Event do
use N2O, with: [:n2o, :nitro]
use FORM, with: [:form]
require IOT
require Logger
require Record
def doc(),
do:
"Event from particular device."
def id(), do: IOT."Event"()
def new(name, IOT."Event"(id: id, code: code, type: type, kind: kind, date: date, time: time)) do
panel(
id: FORM.atom([:tr, name]),
class: :td,
body: [
panel(
class: :column6,
body: NITRO.compact(id)
),
panel(
class: :column6,
body: NITRO.compact(date)
),
panel(
class: :column6,
body: NITRO.compact(time)
),
panel(
class: :column6,
body: NITRO.compact(code)
),
panel(
class: :column6,
body: NITRO.compact(type)
),
panel(
class: :column20,
body: NITRO.compact(kind)
)
]
)
end
end
| 20.404255 | 99 | 0.482795 |
93e8f58df8fe7537cd8a9e918e10c2f8825def45 | 340 | exs | Elixir | config/dev.exs | Zhenya2000perm/neko-achievements | a3851c4d41c0410f7821893b5d39e107b982560d | [
"MIT"
] | null | null | null | config/dev.exs | Zhenya2000perm/neko-achievements | a3851c4d41c0410f7821893b5d39e107b982560d | [
"MIT"
] | null | null | null | config/dev.exs | Zhenya2000perm/neko-achievements | a3851c4d41c0410f7821893b5d39e107b982560d | [
"MIT"
] | null | null | null | use Mix.Config
config :neko, :cowboy, listen_address: {127, 0, 0, 1}
config :neko, :cowboy, listen_port: 4004
if System.get_env("SHIKIMORI_LOCAL") == "true" do
config :neko, :shikimori, url: "https://shikimori.local/api/"
else
config :neko, :shikimori, url: "https://shikimori.one/api/"
end
config :appsignal, :config, active: false
| 26.153846 | 63 | 0.705882 |
93e90bcc74fa721f604185a554c4f098145214eb | 2,303 | ex | Elixir | lib/auto_api/states/wi_fi_state.ex | nonninz/auto-api-elixir | 53e11542043285e94bbb5a0a3b8ffff0b1b47167 | [
"MIT"
] | null | null | null | lib/auto_api/states/wi_fi_state.ex | nonninz/auto-api-elixir | 53e11542043285e94bbb5a0a3b8ffff0b1b47167 | [
"MIT"
] | null | null | null | lib/auto_api/states/wi_fi_state.ex | nonninz/auto-api-elixir | 53e11542043285e94bbb5a0a3b8ffff0b1b47167 | [
"MIT"
] | null | null | null | # AutoAPI
# The MIT License
#
# Copyright (c) 2018- High-Mobility GmbH (https://high-mobility.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
defmodule AutoApi.WiFiState do
@moduledoc """
WiFi state
"""
alias AutoApi.{CommonData, State}
use AutoApi.State, spec_file: "wi_fi.json"
@type t :: %__MODULE__{
status: State.property(CommonData.enabled_state()),
network_connected: State.property(CommonData.connection_state()),
network_ssid: State.property(String.t()),
network_security: State.property(CommonData.network_security()),
password: State.property(String.t())
}
@doc """
Build state based on binary value
iex> bin = <<1, 0, 4, 1, 0, 1, 1>>
iex> AutoApi.WiFiState.from_bin(bin)
%AutoApi.WiFiState{status: %AutoApi.PropertyComponent{data: :enabled}}
"""
@spec from_bin(binary) :: __MODULE__.t()
def from_bin(bin) do
parse_bin_properties(bin, %__MODULE__{})
end
@doc """
Parse state to bin
iex> state = %AutoApi.WiFiState{status: %AutoApi.PropertyComponent{data: :enabled}}
iex> AutoApi.WiFiState.to_bin(state)
<<1, 0, 4, 1, 0, 1, 1>>
"""
@spec to_bin(__MODULE__.t()) :: binary
def to_bin(%__MODULE__{} = state) do
parse_state_properties(state)
end
end
| 35.984375 | 87 | 0.715154 |
93e91edafbad5ee8a0b18471dc22de36e443369f | 5,494 | ex | Elixir | lib/he_broker/broker.ex | HackerExperience/HEBroker | 2a653a43b512c6392f55508dfc1be8463a845b31 | [
"BSD-3-Clause"
] | 5 | 2017-02-15T18:25:35.000Z | 2018-03-29T16:36:31.000Z | lib/he_broker/broker.ex | HackerExperience/HeBroker | 2a653a43b512c6392f55508dfc1be8463a845b31 | [
"BSD-3-Clause"
] | null | null | null | lib/he_broker/broker.ex | HackerExperience/HeBroker | 2a653a43b512c6392f55508dfc1be8463a845b31 | [
"BSD-3-Clause"
] | null | null | null | defmodule HeBroker.Broker do
@moduledoc false
use GenServer
alias HeBroker.RouteMap
@type topic :: String.t
@type consumer_callbacks :: [cast: RouteMap.cast_fun, call: RouteMap.call_fun]
@typep t :: %__MODULE__{}
defstruct [:routes, :consumers]
@spec start_link() :: GenServer.on_start
def start_link,
do: start_link([])
@spec start_link(atom | [term]) :: GenServer.on_start
def start_link(params) when is_list(params),
do: GenServer.start_link(__MODULE__, params)
def start_link(name) when is_atom(name),
do: start_link(name, [])
def start_link(name, params),
do: GenServer.start_link(__MODULE__, [{:name, name}| params], name: name)
@spec subscribe(pid | atom, topic, consumer_callbacks) :: :ok
@doc false
def subscribe(broker, topic, callbacks) when is_binary(topic) do
cast = Keyword.get(callbacks, :cast)
call = Keyword.get(callbacks, :call)
if \
(is_nil(cast) and is_nil(call))
or (not is_nil(cast) and not is_function(cast, 4))
or (not is_nil(call) and not is_function(call, 4))
do
raise HeBroker.InvalidCallbackError
end
GenServer.cast(broker, {:subscribe, :consumer, topic, callbacks, self()})
end
@spec subscribed?(pid | atom, pid) :: boolean
def subscribed?(broker, pid),
do: GenServer.call(broker, {:subscribed?, pid})
@spec subscribed?(pid | atom, pid, topic) :: boolean
def subscribed?(broker, pid, topic) do
broker
|> GenServer.call(:consumers_by_topic)
|> :ets.lookup(topic)
|> case do
[{^topic, services}] ->
MapSet.member?(services, pid)
_ ->
false
end
end
@spec count_services_on_topic(pid | atom, topic) :: non_neg_integer
def count_services_on_topic(broker, topic) do
broker
|> GenServer.call(:consumers_by_topic)
|> :ets.lookup(topic)
|> case do
[{^topic, services}] ->
MapSet.size(services)
_ ->
0
end
end
@spec cast_callbacks(pid | atom, topic) :: [RouteMap.partial]
@doc """
Returns the callbacks from the consumers subscribed on `topic` on the `broker`
"""
def cast_callbacks(broker, topic),
do: callbacks(broker, topic, :cast)
@spec call_callbacks(pid | atom, topic) :: [RouteMap.partial]
@doc """
Returns the callbacks from the consumers subscribed on `topic` on the `broker`
"""
def call_callbacks(broker, topic),
do: callbacks(broker, topic, :call)
@spec callbacks(pid | atom, topic, :cast | :call) :: [RouteMap.partial]
defp callbacks(broker, topic, type) do
broker
|> GenServer.call(:routes)
|> RouteMap.services_on_topic(topic)
|> Enum.map(&RouteMap.callback(&1, type))
|> Enum.reject(&is_nil/1)
end
@doc false
def init(route_options) do
consumers_by_pid = :ets.new(:hebroker, [])
consumers_by_topic = :ets.new(:hebroker, [])
routes = RouteMap.new(route_options)
{:ok, %__MODULE__{routes: routes, consumers: %{by_pid: consumers_by_pid, by_topic: consumers_by_topic}}}
end
@spec handle_call({:subscribed?, pid}, {pid, term}, t) :: {:reply, boolean, t}
@spec handle_call({:subscribed?, topic, pid}, {pid, term}, t) :: {:reply, boolean, t}
@doc false
def handle_call({:subscribed?, pid}, _caller, state),
do: {:reply, :ets.member(state.consumers.by_pid, pid), state}
def handle_call(:routes, _caller, state),
do: {:reply, state.routes, state}
def handle_call(:consumers_by_topic, _caller, state),
do: {:reply, state.consumers.by_topic, state}
def handle_call(_, _, state),
do: {:noreply, state}
@spec handle_cast({:subscribe, :consumer, topic, consumer_callbacks, pid}, t) :: {:noreply, t}
@doc false
def handle_cast({:subscribe, :consumer, topic, callbacks, pid}, state) do
cast = Keyword.get(callbacks, :cast)
call = Keyword.get(callbacks, :call)
monitor_consumer(state.consumers, topic, pid)
RouteMap.upsert_topic(state.routes, topic, pid, cast, call)
{:noreply, state}
end
def handle_cast(_, state),
do: {:noreply, state}
@doc false
def handle_info({:DOWN, ref, _mod, pid, _reason}, state) do
by_topic = state.consumers.by_topic
by_pid = state.consumers.by_pid
case :ets.lookup(by_pid, pid) do
[{^pid, ^ref, topics}] ->
Enum.each(topics, fn topic ->
RouteMap.remove_consumer(state.routes, topic, pid)
case :ets.lookup(by_topic, topic) do
[{^topic, services}] ->
s2 = MapSet.delete(services, pid)
if 0 === MapSet.size(s2) do
:ets.delete(by_topic, topic)
else
:ets.insert(by_topic, {topic, s2})
end
_ ->
:ok
end
end)
:ets.delete(by_pid, pid)
_ ->
:ok
end
{:noreply, state}
end
@spec monitor_consumer(%{by_pid: RouteMap.t, by_topic: RouteMap.t}, topic, pid) :: no_return
defp monitor_consumer(%{by_pid: by_pid, by_topic: by_topic}, topic, pid) do
case :ets.lookup(by_pid, pid) do
[{^pid, ref, topics}] ->
:ets.insert(by_pid, {pid, ref, MapSet.put(topics, topic)})
[] ->
topics = MapSet.new([topic])
ref = Process.monitor(pid)
:ets.insert(by_pid, {pid, ref, topics})
end
case :ets.lookup(by_topic, topic) do
[{^topic, services}] ->
:ets.insert(by_topic, {topic, MapSet.put(services, pid)})
[] ->
:ets.insert(by_topic, {topic, MapSet.new([pid])})
end
end
end | 29.697297 | 108 | 0.631052 |
93e92a3d96fa1564c55f42585641e3ec42e98bda | 2,263 | exs | Elixir | apps/ewallet_config/mix.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/ewallet_config/mix.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/ewallet_config/mix.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | defmodule EWalletConfig.MixProject do
use Mix.Project
def project do
[
app: :ewallet_config,
version: "2.0.0-dev",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.8",
elixirc_paths: elixirc_paths(Mix.env()),
start_permanent: Mix.env() == :prod,
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test
],
aliases: aliases(),
deps: deps()
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:appsignal, :logger],
mod: {EWalletConfig.Application, []},
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:activity_logger, in_umbrella: true},
{:appsignal, "~> 1.9"},
{:arc, "~> 0.11.0"},
{:arc_ecto, github: "omisego/arc_ecto"},
{:bcrypt_elixir, "~> 1.0"},
{:cloak, "~> 0.9.1"},
{:db, in_umbrella: true},
{:deferred_config, "~> 0.1.0"},
{:ecto_sql, "~> 3.0"},
{:plug, "~> 1.0"},
{:poison, "~> 3.1"},
{:postgrex, ">= 0.0.0"},
{:utils, in_umbrella: true},
# arc GCS dependencies
{:arc_gcs, github: "omisego/arc_gcs"},
# arc AWS dependencies
{:ex_aws, "~> 2.0"},
{:ex_aws_s3, "~> 2.0"},
{:hackney, "~> 1.6"},
{:sweet_xml, "~> 0.6"},
# Quantum scheduler dependencies
{:quantum, ">= 2.2.6"},
{:timex, "~> 3.0"},
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
end
| 26.623529 | 67 | 0.548387 |
93e9383a52d1b743ffa3108dc80c67d5a0fec815 | 427 | ex | Elixir | lib/text_based_fps/player_commands/leave_room.ex | guisehn/elixir-text-based-fps | 59a815da337309297f8b42ef3481277dd4d9b371 | [
"MIT"
] | 1 | 2022-03-02T12:18:07.000Z | 2022-03-02T12:18:07.000Z | lib/text_based_fps/player_commands/leave_room.ex | guisehn/elixir-text-based-fps | 59a815da337309297f8b42ef3481277dd4d9b371 | [
"MIT"
] | 12 | 2021-05-31T21:41:09.000Z | 2021-07-30T03:18:09.000Z | lib/text_based_fps/player_commands/leave_room.ex | guisehn/elixir-text-based-fps | 59a815da337309297f8b42ef3481277dd4d9b371 | [
"MIT"
] | null | null | null | defmodule TextBasedFPS.PlayerCommand.LeaveRoom do
import TextBasedFPS.CommandHelper
alias TextBasedFPS.{PlayerCommand, ServerState}
@behaviour PlayerCommand
@impl true
def execute(state, player, _) do
with {:ok, _room} <- require_room(state, player) do
updated_state = ServerState.remove_player_from_current_room(state, player.key)
{:ok, updated_state, "You have left the room."}
end
end
end
| 26.6875 | 84 | 0.744731 |
93e953291bae6b4cf4c43459d434dd66e82b41bf | 1,133 | exs | Elixir | heis_driver/config/config.exs | jornbh/Studass_sanntid | 8b55d01803d00b3b918abb6cf21ba02f0425c23e | [
"MIT"
] | null | null | null | heis_driver/config/config.exs | jornbh/Studass_sanntid | 8b55d01803d00b3b918abb6cf21ba02f0425c23e | [
"MIT"
] | null | null | null | heis_driver/config/config.exs | jornbh/Studass_sanntid | 8b55d01803d00b3b918abb6cf21ba02f0425c23e | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :heis_driver, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:heis_driver, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env()}.exs"
| 36.548387 | 73 | 0.751986 |
93e96518855ec974b7577a6431fc050f48832d39 | 378 | ex | Elixir | lib/blog_web/controllers/plug/set_user.ex | itorisaias/elxpro_blog_phx | 872f70a154c15976ed829c5fe6a9767ea28d67a5 | [
"MIT"
] | null | null | null | lib/blog_web/controllers/plug/set_user.ex | itorisaias/elxpro_blog_phx | 872f70a154c15976ed829c5fe6a9767ea28d67a5 | [
"MIT"
] | 9 | 2021-08-13T02:21:40.000Z | 2021-08-24T05:02:19.000Z | lib/blog_web/controllers/plug/set_user.ex | itorisaias/elxpro_blog_phx | 872f70a154c15976ed829c5fe6a9767ea28d67a5 | [
"MIT"
] | null | null | null | defmodule BlogWeb.Plug.SetUser do
@moduledoc """
Plug set user
"""
import Plug.Conn
alias Blog.Accounts
def init(_opts) do
end
def call(conn, _params) do
user_id = get_session(conn, :user_id)
user = user_id && Accounts.get_user!(user_id)
if user != nil do
assign(conn, :user, user)
else
assign(conn, :user, nil)
end
end
end
| 16.434783 | 49 | 0.632275 |
93e9728677d552327f2d06a77af8d3d0ba0e90f0 | 247 | ex | Elixir | lib/kanban_liveview_web/controllers/board_controller.ex | ivangsm/kanban_live | 1dbbdaf3bf8dbaa4261a981e36c90088de02bbe9 | [
"MIT"
] | null | null | null | lib/kanban_liveview_web/controllers/board_controller.ex | ivangsm/kanban_live | 1dbbdaf3bf8dbaa4261a981e36c90088de02bbe9 | [
"MIT"
] | null | null | null | lib/kanban_liveview_web/controllers/board_controller.ex | ivangsm/kanban_live | 1dbbdaf3bf8dbaa4261a981e36c90088de02bbe9 | [
"MIT"
] | null | null | null | defmodule KanbanLiveviewWeb.BoardController do
use KanbanLiveviewWeb, :controller
import Phoenix.LiveView.Controller
def show(conn, %{"id" => id}) do
live_render(conn, KanbanLiveviewWeb.PageLive, session: %{"board_id" => id})
end
end
| 27.444444 | 79 | 0.744939 |
93e974313b04350ba882220f1aecdb03fe5fca0f | 674 | ex | Elixir | lib/erlef_web/controllers/stipend_controller.ex | pedrosnk/erlef-website | bb8da73d09930056c9d31bcc75a92b8fb3caf6da | [
"Apache-2.0"
] | null | null | null | lib/erlef_web/controllers/stipend_controller.ex | pedrosnk/erlef-website | bb8da73d09930056c9d31bcc75a92b8fb3caf6da | [
"Apache-2.0"
] | null | null | null | lib/erlef_web/controllers/stipend_controller.ex | pedrosnk/erlef-website | bb8da73d09930056c9d31bcc75a92b8fb3caf6da | [
"Apache-2.0"
] | null | null | null | defmodule ErlefWeb.StipendController do
use ErlefWeb, :controller
action_fallback ErlefWeb.FallbackController
def index(conn, _params) do
render(conn, errors: [], params: %{})
end
def create(%{private: %{phoenix_format: "html"}} = conn, params) do
files = params["files"] || []
case Erlef.StipendProposal.from_map(Map.put(params, "files", files)) do
{:ok, proposal} ->
Erlef.StipendMail.submission(proposal) |> Erlef.Mailer.send()
Erlef.StipendMail.submission_copy(proposal) |> Erlef.Mailer.send()
render(conn)
{:error, errs} ->
render(conn, "index.html", params: params, errors: errs)
end
end
end
| 29.304348 | 75 | 0.658754 |
93e97bdff7152b3bb113c6f48086859d95208a98 | 175 | exs | Elixir | config/config.exs | ConnorRigby/esqlite | df7e04bb7f21aae83ac4b5d6ed60d8a960277694 | [
"Apache-2.0"
] | 1 | 2018-04-09T03:11:46.000Z | 2018-04-09T03:11:46.000Z | config/config.exs | Sqlite-Ecto/esqlite | df7e04bb7f21aae83ac4b5d6ed60d8a960277694 | [
"Apache-2.0"
] | 1 | 2018-08-01T18:57:57.000Z | 2018-08-01T18:57:57.000Z | config/config.exs | Sqlite-Ecto/esqlite | df7e04bb7f21aae83ac4b5d6ed60d8a960277694 | [
"Apache-2.0"
] | 1 | 2018-04-18T18:52:40.000Z | 2018-04-18T18:52:40.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
config :sqlite, default_timeout: 5000
| 35 | 61 | 0.805714 |
93e98a9e93baaae4399e596779742c258faa2bf1 | 1,846 | exs | Elixir | mix.exs | aaroncolaco/conmon-elixir | 7e695421e413952c72fa927f3fb05a490f452762 | [
"MIT"
] | 3 | 2019-05-28T11:53:04.000Z | 2021-06-01T12:36:56.000Z | mix.exs | aaroncolaco/conmon-elixir | 7e695421e413952c72fa927f3fb05a490f452762 | [
"MIT"
] | null | null | null | mix.exs | aaroncolaco/conmon-elixir | 7e695421e413952c72fa927f3fb05a490f452762 | [
"MIT"
] | null | null | null | defmodule ConMon.MixProject do
use Mix.Project
@app :con_mon
@all_targets [:rpi3]
def project do
[
app: @app,
version: "0.1.0",
elixir: "~> 1.8",
archives: [nerves_bootstrap: "~> 1.8"],
start_permanent: Mix.env() == :prod,
build_embedded: true,
aliases: [loadconfig: [&bootstrap/1]],
preferred_cli_target: [run: :host, test: :host],
releases: [{@app, release()}],
deps: deps()
]
end
def release do
[
overwrite: true,
cookie: "#{@app}_cookie",
include_erts: &Nerves.Release.erts/0,
steps: [&Nerves.Release.init/1, :assemble],
strip_beams: Mix.env() == :prod
]
end
# Starting nerves_bootstrap adds the required aliases to Mix.Project.config()
# Aliases are only added if MIX_TARGET is set.
def bootstrap(args) do
Application.start(:nerves_bootstrap)
Mix.Task.run("loadconfig", args)
end
# Run "mix help compile.app" to learn about applications.
def application do
[
mod: {ConMon.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# Dependencies for all targets
{:nerves, "~> 1.6.3", runtime: false},
{:shoehorn, "~> 0.6"},
{:ring_logger, "~> 0.8"},
{:toolshed, "~> 0.2"},
{:hackney, "~> 1.15.1"},
{:poison, "~> 4.0"},
{:plug_cowboy, "~> 2.2.1"},
{:httpoison, "~> 1.6"},
# Dependencies for all targets except :host
{:nerves_runtime, "~> 0.11.1", targets: @all_targets},
{:nerves_init_gadget, "~> 0.7", targets: @all_targets},
{:nerves_time, "~> 0.4.1", targets: @all_targets},
# Dependencies for specific targets
{:nerves_system_rpi3, "~> 1.11.1", runtime: false, targets: :rpi3}
]
end
end
| 26.371429 | 79 | 0.583424 |
93e9971ce4b5a2be24932875839ec80f7b7103aa | 1,829 | exs | Elixir | mix.exs | vsyrovat/awesome_elixir | cfac2de5402c13f86f99032477daee883b31c020 | [
"MIT"
] | 2 | 2019-12-17T11:40:54.000Z | 2019-12-22T17:15:29.000Z | mix.exs | vsyrovat/funbox_awesome_elixir | cfac2de5402c13f86f99032477daee883b31c020 | [
"MIT"
] | 5 | 2020-10-08T20:36:07.000Z | 2020-10-08T21:27:28.000Z | mix.exs | vsyrovat/awesome-elixir | cfac2de5402c13f86f99032477daee883b31c020 | [
"MIT"
] | null | null | null | defmodule App.MixProject do
use Mix.Project
def project do
[
app: :app,
version: "0.1.0",
elixir: "~> 1.5",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {App.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.4.11"},
{:phoenix_pubsub, "~> 1.1"},
{:phoenix_ecto, "~> 4.0"},
{:ecto_sql, "~> 3.1"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 2.11"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:gettext, "~> 0.11"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"},
{:earmark, "~> 1.4"},
{:typed_struct, "~> 0.1.4"},
{:tesla, "~> 1.3"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
"ecto.migrate": ["ecto.migrate --log-sql"],
"ecto.rollback": ["ecto.rollback --log-sql"],
test: ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
end
| 26.897059 | 79 | 0.562603 |
93e9ae781d25ec823812806863e47e1db6e10738 | 8,045 | ex | Elixir | lib/hsluv.ex | kuon/ex-hsluv | 14bfe56c7890e0477c3b40329e1905671ec97f4f | [
"Apache-2.0",
"MIT"
] | null | null | null | lib/hsluv.ex | kuon/ex-hsluv | 14bfe56c7890e0477c3b40329e1905671ec97f4f | [
"Apache-2.0",
"MIT"
] | null | null | null | lib/hsluv.ex | kuon/ex-hsluv | 14bfe56c7890e0477c3b40329e1905671ec97f4f | [
"Apache-2.0",
"MIT"
] | null | null | null | defmodule HSLuv do
@moduledoc """
Convert colors between HSLuv and RGB color spaces
"""
import :math
@min_f 0.00000001
@max_f 99.9999999
@m {
{3.240969941904521, -1.537383177570093, -0.498610760293},
{-0.96924363628087, 1.87596750150772, 0.041555057407175},
{0.055630079696993, -0.20397695888897, 1.056971514242878}
}
@m_inv {
{0.41239079926595, 0.35758433938387, 0.18048078840183},
{0.21263900587151, 0.71516867876775, 0.072192315360733},
{0.019330818715591, 0.11919477979462, 0.95053215224966}
}
@ref_y 1.0
@ref_u 0.19783000664283
@ref_v 0.46831999493879
@kappa 903.2962962
@epsilon 0.0088564516
@enforce_keys [:h, :s, :l]
defstruct @enforce_keys
@doc """
Create an HSLuv color from values
Both integer and floats are supported.
- `h` must be between 0 and 360 included
- `s` must be between 0 and 100 included
- `l` must be between 0 and 100 included
"""
def new(h, s, l) do
%HSLuv{h: h, s: s, l: l}
end
@doc """
Create an HSLuv color from RGB values
Both integer and floats are supported.
- `r` must be between 0 and 255 included
- `g` must be between 0 and 255 included
- `b` must be between 0 and 255 included
## Examples
iex> HSLuv.rgb(200, 150, 20)
%HSLuv{h: 57.26077539223336, l: 65.07659371178795, s: 97.61326139925325}
"""
def rgb(r, g, b) do
{h, s, l} = rgb_to_hsluv({r / 255.0, g / 255.0, b / 255.0})
%HSLuv{h: h, s: s, l: l}
end
@doc """
Convert HSLuv to RGB.
- `h` must be between 0 and 360 included
- `s` must be between 0 and 100 included
- `l` must be between 0 and 100 included
Returned components are between 0 and 255 included
## Examples
iex> HSLuv.to_rgb(20, 50, 20)
{75, 38, 31}
"""
def to_rgb(h, s, l) do
new(h, s, l)
|> to_rgb()
end
def to_rgb(%HSLuv{h: h, s: s, l: l}) do
{r, g, b} = hsluv_to_rgb({h, s, l})
{round(r * 255.0), round(g * 255.0), round(b * 255.0)}
end
@doc """
Convert RGB to HSLuv.
## Examples
iex> HSLuv.to_hsluv(20, 50, 20)
{127.71501294923954, 67.94319276530133, 17.829530512200364}
"""
def to_hsluv(r, g, b) do
c = rgb(r, g, b)
{c.h, c.s, c.l}
end
def hsluv_to_rgb([h, s, l]), do: hsluv_to_rgb({h, s, l})
def hsluv_to_rgb({_h, _s, _l} = hsl) do
hsl
|> hsluv_to_lch()
|> lch_to_luv()
|> luv_to_xyz()
|> xyz_to_rgb()
end
def hpluv_to_rgb([h, s, l]), do: hpluv_to_rgb({h, s, l})
def hpluv_to_rgb({_h, _s, _l} = hsl) do
hsl
|> hpluv_to_lch()
|> lch_to_luv()
|> luv_to_xyz()
|> xyz_to_rgb()
end
def rgb_to_hsluv([r, g, b]), do: rgb_to_hsluv({r, g, b})
def rgb_to_hsluv({_r, _g, _b} = rgb) do
rgb
|> rgb_to_xyz()
|> xyz_to_luv()
|> luv_to_lch()
|> lch_to_hsluv()
end
def rgb_to_hpluv([r, g, b]), do: rgb_to_hpluv({r, g, b})
def rgb_to_hpluv({_r, _g, _b} = rgb) do
rgb
|> rgb_to_xyz()
|> xyz_to_luv()
|> luv_to_lch()
|> lch_to_hpluv()
end
def lch_to_luv({l, c, h}) do
h_rad = h / 360.0 * 2.0 * pi()
{l, cos(h_rad) * c, sin(h_rad) * c}
end
def lch_to_luv([l, c, h]), do: lch_to_luv({l, c, h})
def luv_to_lch({l, u, v}) do
c = sqrt(u * u + v * v)
h =
if c < @min_f do
0.0
else
atan2(v, u) * 180.0 / pi()
end
h =
if h < 0.0 do
360.0 + h
else
h
end
{l, c, h}
end
def luv_to_lch([l, u, v]), do: luv_to_lch({l, u, v})
def xyz_to_rgb({_x, _y, _z} = xyz) do
{m1, m2, m3} = @m
{a, b, c} = {dot(m1, xyz), dot(m2, xyz), dot(m3, xyz)}
{from_linear(a), from_linear(b), from_linear(c)}
end
def xyz_to_rgb([x, y, z]), do: xyz_to_rgb({x, y, z})
def rgb_to_xyz({r, g, b}) do
{m1, m2, m3} = @m_inv
rgb = {to_linear(r), to_linear(g), to_linear(b)}
{dot(m1, rgb), dot(m2, rgb), dot(m3, rgb)}
end
def rgb_to_xyz([r, g, b]), do: rgb_to_xyz({r, g, b})
def xyz_to_luv({x, y, z}) do
l = f(y)
if l == 0.0 || (x == 0.0 && y == 0.0 && z == 0.0) do
{0.0, 0.0, 0.0}
else
var_u = 4.0 * x / (x + 15.0 * y + 3.0 * z)
var_v = 9.0 * y / (x + 15.0 * y + 3.0 * z)
u = 13.0 * l * (var_u - @ref_u)
v = 13.0 * l * (var_v - @ref_v)
{l, u, v}
end
end
def xyz_to_luv([x, y, z]), do: xyz_to_luv({x, y, z})
def luv_to_xyz({l, u, v}) do
if l == 0.0 do
{0.0, 0.0, 0.0}
else
var_y = f_inv(l)
var_u = u / (13.0 * l) + @ref_u
var_v = v / (13.0 * l) + @ref_v
y = var_y * @ref_y
x = 0.0 - 9.0 * y * var_u / ((var_u - 4.0) * var_v - var_u * var_v)
z = (9.0 * y - 15.0 * var_v * y - var_v * x) / (3.0 * var_v)
{x, y, z}
end
end
def luv_to_xyz([l, u, v]), do: luv_to_xyz({l, u, v})
def hsluv_to_lch({h, s, l}) do
cond do
l > @max_f ->
{100.0, 0, h}
l < @min_f ->
{0.0, 0.0, h}
true ->
{l, max_safe_chroma_for_lh(l, h) / 100.0 * s, h}
end
end
def hsluv_to_lch([h, s, l]), do: hsluv_to_lch({h, s, l})
def lch_to_hsluv({l, c, h}) do
cond do
l > @max_f ->
{h, 0, 100.0}
l < @min_f ->
{h, 0.0, 0.0}
true ->
max_chroma = max_safe_chroma_for_lh(l, h)
{h, c / max_chroma * 100.0, l}
end
end
def lch_to_hsluv([l, c, h]), do: lch_to_hsluv({l, c, h})
def hpluv_to_lch({h, s, l}) do
cond do
l > @max_f ->
{100.0, 0, h}
l < @min_f ->
{0.0, 0.0, h}
true ->
{l, max_safe_chroma_for_l(l) / 100.0 * s, h}
end
end
def hpluv_to_lch([h, s, l]), do: hpluv_to_lch({h, s, l})
def lch_to_hpluv({l, c, h}) do
cond do
l > @max_f ->
{h, 0.0, 100.0}
l < @min_f ->
{h, 0.0, 0.0}
true ->
{h, c / max_safe_chroma_for_l(l) * 100.0, l}
end
end
def lch_to_hpluv([l, c, h]), do: lch_to_hpluv({l, c, h})
def get_bounds(l) do
sub = pow(l + 16.0, 3.0) / 1_560_896.0
sub =
if sub > @epsilon do
sub
else
l / @kappa
end
compute = fn {m1, m2, m3}, t ->
top1 = (284_517.0 * m1 - 94839.0 * m3) * sub
top2 =
(838_422.0 * m3 + 769_860.0 * m2 + 731_718.0 * m1) * l * sub -
769_860.0 * t * l
bottom = (632_260.0 * m3 - 126_452.0 * m2) * sub + 126_452.0 * t
{top1 / bottom, top2 / bottom}
end
{m1, m2, m3} = @m
[
compute.(m1, 0.0),
compute.(m1, 1.0),
compute.(m2, 0.0),
compute.(m2, 1.0),
compute.(m3, 0.0),
compute.(m3, 1.0)
]
end
def max_safe_chroma_for_l(l) do
val = 1.7976931348623157e308
l
|> get_bounds()
|> Enum.reduce(val, fn bound, val ->
length = distance_line_from_origin(bound)
if length >= 0.0 do
min(val, length)
else
val
end
end)
end
def max_safe_chroma_for_lh(l, h) do
h_rad = h / 360.0 * pi() * 2.0
val = 1.7976931348623157e308
l
|> get_bounds()
|> Enum.reduce(val, fn bound, val ->
length = length_of_ray_until_intersect(h_rad, bound)
if length >= 0.0 do
min(val, length)
else
val
end
end)
end
def distance_line_from_origin({slope, intercept}) do
abs(intercept) / sqrt(pow(slope, 2.0) + 1.0)
end
def length_of_ray_until_intersect(theta, {slope, intercept}) do
intercept / (sin(theta) - slope * cos(theta))
end
def dot({a0, a1, a2}, {b0, b1, b2}) do
a0 * b0 + a1 * b1 + a2 * b2
end
defp f(t) do
if t > @epsilon do
116.0 * pow(t / @ref_y, 1.0 / 3.0) - 16.0
else
t / @ref_y * @kappa
end
end
defp f_inv(t) do
if t > 8 do
@ref_y * pow((t + 16.0) / 116.0, 3.0)
else
@ref_y * t / @kappa
end
end
defp to_linear(c) do
if c > 0.04045 do
pow((c + 0.055) / 1.055, 2.4)
else
c / 12.92
end
end
defp from_linear(c) do
if c <= 0.0031308 do
12.92 * c
else
1.055 * pow(c, 1.0 / 2.4) - 0.055
end
end
end
| 20.315657 | 78 | 0.522561 |
93e9c9943b0585ad20f22d51b6d2d8a438c609c5 | 5,063 | ex | Elixir | lib/format/duration/formatters/default.ex | tarzan/timex | 6f7014659e62b07cff98152543c4456984a8173e | [
"MIT"
] | null | null | null | lib/format/duration/formatters/default.ex | tarzan/timex | 6f7014659e62b07cff98152543c4456984a8173e | [
"MIT"
] | null | null | null | lib/format/duration/formatters/default.ex | tarzan/timex | 6f7014659e62b07cff98152543c4456984a8173e | [
"MIT"
] | null | null | null | defmodule Timex.Format.Duration.Formatters.Default do
@moduledoc """
Handles formatting Duration values as ISO 8601 durations as described below.
Durations are represented by the format P[n]Y[n]M[n]DT[n]H[n]M[n]S.
In this representation, the [n] is replaced by the value for each of the
date and time elements that follow the [n]. Leading zeros are not required,
but the maximum number of digits for each element should be agreed to by the
communicating parties. The capital letters P, Y, M, W, D, T, H, M, and S are
designators for each of the date and time elements and are not replaced.
- P is the duration designator (historically called "period") placed at the start of the duration representation.
- Y is the year designator that follows the value for the number of years.
- M is the month designator that follows the value for the number of months.
- D is the day designator that follows the value for the number of days.
- T is the time designator that precedes the time components of the representation.
- H is the hour designator that follows the value for the number of hours.
- M is the minute designator that follows the value for the number of minutes.
- S is the second designator that follows the value for the number of seconds.
"""
use Timex.Format.Duration.Formatter
alias Timex.Translator
@minute 60
@hour @minute * 60
@day @hour * 24
@month @day * 30
@year @day * 365
@microsecond 1_000_000
@doc """
Return a human readable string representing the absolute value of duration (i.e. would
return the same output for both negative and positive representations of a given duration)
## Examples
iex> use Timex
...> Duration.from_erl({0, 1, 1_000_000}) |> #{__MODULE__}.format
"PT2S"
iex> use Timex
...> Duration.from_erl({0, 1, 1_000_100}) |> #{__MODULE__}.format
"PT2.0001S"
iex> use Timex
...> Duration.from_erl({0, 65, 0}) |> #{__MODULE__}.format
"PT1M5S"
iex> use Timex
...> Duration.from_erl({0, -65, 0}) |> #{__MODULE__}.format
"PT1M5S"
iex> use Timex
...> Duration.from_erl({1435, 180354, 590264}) |> #{__MODULE__}.format
"P45Y6M5DT21H12M34.590264S"
"""
@spec format(Duration.t()) :: String.t() | {:error, term}
def format(%Duration{} = duration), do: lformat(duration, Translator.current_locale())
def format(_), do: {:error, :invalid_timestamp}
def lformat(%Duration{} = duration, _locale) do
duration
|> deconstruct
|> do_format
end
def lformat(_, _locale), do: {:error, :invalid_duration}
defp do_format(components), do: do_format(components, <<?P>>)
defp do_format([], str), do: str
defp do_format([{unit, _} = component | rest], str) do
cond do
unit in [:hours, :minutes, :seconds] && String.contains?(str, "T") ->
do_format(rest, format_component(component, str))
unit in [:hours, :minutes, :seconds] ->
do_format(rest, format_component(component, str <> "T"))
true ->
do_format(rest, format_component(component, str))
end
end
defp format_component({_, 0}, str), do: str
defp format_component({:years, y}, str), do: str <> "#{y}Y"
defp format_component({:months, m}, str), do: str <> "#{m}M"
defp format_component({:days, d}, str), do: str <> "#{d}D"
defp format_component({:hours, h}, str), do: str <> "#{h}H"
defp format_component({:minutes, m}, str), do: str <> "#{m}M"
defp format_component({:seconds, s}, str), do: str <> "#{s}S"
defp deconstruct(duration) do
micros = Duration.to_microseconds(duration) |> abs
deconstruct({div(micros, @microsecond), rem(micros, @microsecond)}, [])
end
defp deconstruct({0, 0}, components),
do: Enum.reverse(components)
defp deconstruct({seconds, us}, components) do
cond do
seconds >= @year ->
deconstruct({rem(seconds, @year), us}, [{:years, div(seconds, @year)} | components])
seconds >= @month ->
deconstruct({rem(seconds, @month), us}, [{:months, div(seconds, @month)} | components])
seconds >= @day ->
deconstruct({rem(seconds, @day), us}, [{:days, div(seconds, @day)} | components])
seconds >= @hour ->
deconstruct({rem(seconds, @hour), us}, [{:hours, div(seconds, @hour)} | components])
seconds >= @minute ->
deconstruct({rem(seconds, @minute), us}, [{:minutes, div(seconds, @minute)} | components])
true ->
get_fractional_seconds(seconds, us, components)
end
end
defp get_fractional_seconds(seconds, 0, components),
do: deconstruct({0, 0}, [{:seconds, seconds} | components])
defp get_fractional_seconds(seconds, micro, components) do
millis =
micro
|> Duration.from_microseconds()
|> Duration.to_milliseconds()
cond do
millis >= 1.0 ->
deconstruct({0, 0}, [{:seconds, seconds + millis * :math.pow(10, -3)} | components])
true ->
deconstruct({0, 0}, [{:seconds, seconds + micro * :math.pow(10, -6)} | components])
end
end
end
| 35.405594 | 115 | 0.649812 |
93ea066f815b42c892612900e4ae5c9e2ff760f7 | 4,238 | ex | Elixir | lib/vintage_net/interfaces_monitor.ex | axelson/vintage_net | 68866ff223fa40e7ad6fd4d9fb2f7960cf51b1ca | [
"Apache-2.0"
] | 85 | 2019-05-09T14:54:38.000Z | 2022-02-08T16:52:04.000Z | lib/vintage_net/interfaces_monitor.ex | axelson/vintage_net | 68866ff223fa40e7ad6fd4d9fb2f7960cf51b1ca | [
"Apache-2.0"
] | 132 | 2019-05-09T15:57:59.000Z | 2022-02-28T16:31:22.000Z | lib/vintage_net/interfaces_monitor.ex | axelson/vintage_net | 68866ff223fa40e7ad6fd4d9fb2f7960cf51b1ca | [
"Apache-2.0"
] | 14 | 2019-07-08T19:18:23.000Z | 2022-02-08T16:52:05.000Z | defmodule VintageNet.InterfacesMonitor do
@moduledoc """
Monitor available interfaces
Currently this works by polling the system for what interfaces are visible.
They may or may not be configured.
"""
use GenServer
# require Logger
alias VintageNet.InterfacesMonitor.{HWPath, Info}
defmodule State do
@moduledoc false
defstruct port: nil,
interface_info: %{}
end
@spec start_link(any()) :: GenServer.on_start()
def start_link(args) do
GenServer.start_link(__MODULE__, args, name: __MODULE__)
end
@doc """
Force clear all addresses
This is useful to notify everyone that an address should not be used
immediately. This can be used to fix a race condition where the blip
for an address going away to coming back isn't reported.
"""
@spec force_clear_ipv4_addresses(VintageNet.ifname()) :: :ok
def force_clear_ipv4_addresses(ifname) do
GenServer.call(__MODULE__, {:force_clear_ipv4_addresses, ifname})
end
@impl GenServer
def init(_args) do
executable = :code.priv_dir(:vintage_net) ++ '/if_monitor'
case File.exists?(executable) do
true ->
port =
Port.open({:spawn_executable, executable}, [
{:packet, 2},
:use_stdio,
:binary,
:exit_status
])
{:ok, %State{port: port}}
false ->
# This is only done for testing on OSX
{:ok, %State{}}
end
end
@impl GenServer
def handle_call({:force_clear_ipv4_addresses, ifname}, _from, state) do
{ifindex, old_info} = get_by_ifname(state, ifname)
new_info = Info.delete_ipv4_addresses(old_info)
if old_info != new_info do
new_info = Info.update_address_properties(new_info)
new_state = %{state | interface_info: Map.put(state.interface_info, ifindex, new_info)}
{:reply, :ok, new_state}
else
{:reply, :ok, state}
end
end
@impl GenServer
def handle_info({_port, {:data, raw_report}}, state) do
report = :erlang.binary_to_term(raw_report)
# Logger.debug("if_monitor: #{inspect(report, limit: :infinity)}")
new_state = handle_report(state, report)
{:noreply, new_state}
end
defp handle_report(state, {:newlink, ifname, ifindex, link_report}) do
new_info =
get_or_create_info(state, ifindex, ifname)
|> Info.newlink(link_report)
|> Info.update_link_properties()
%{state | interface_info: Map.put(state.interface_info, ifindex, new_info)}
end
defp handle_report(state, {:dellink, ifname, ifindex, _link_report}) do
Info.clear_properties(ifname)
%{state | interface_info: Map.delete(state.interface_info, ifindex)}
end
defp handle_report(state, {:newaddr, ifindex, address_report}) do
new_info =
get_or_create_info(state, ifindex)
|> Info.newaddr(address_report)
|> Info.update_address_properties()
%{state | interface_info: Map.put(state.interface_info, ifindex, new_info)}
end
defp handle_report(state, {:deladdr, ifindex, address_report}) do
new_info =
get_or_create_info(state, ifindex)
|> Info.deladdr(address_report)
|> Info.update_address_properties()
%{state | interface_info: Map.put(state.interface_info, ifindex, new_info)}
end
defp get_by_ifname(state, ifname) do
Enum.find_value(state.interface_info, fn {ifindex, info} ->
case info do
%{ifname: ^ifname} -> {ifindex, info}
_ -> nil
end
end)
end
defp get_or_create_info(state, ifindex, ifname) do
case Map.fetch(state.interface_info, ifindex) do
{:ok, %{ifname: ^ifname} = info} ->
info
{:ok, %{ifname: old_ifname} = info} ->
Info.clear_properties(old_ifname)
%{info | ifname: ifname}
|> Info.update_present()
|> Info.update_address_properties()
_missing ->
hw_path = HWPath.query(ifname)
Info.new(ifname, hw_path)
|> Info.update_present()
end
end
defp get_or_create_info(state, ifindex) do
case Map.fetch(state.interface_info, ifindex) do
{:ok, info} ->
info
_missing ->
# Race between address and link notifications?
Info.new("__unknown")
end
end
end
| 26.4875 | 93 | 0.659981 |
93ea2bf5118e78848d9eb672addf24b2484ceccc | 775 | exs | Elixir | programming_elixir/test/list_and_recursion_5_test.exs | rsebescen/ProgrammingElixir | c2ec7bca4c2e326d99723bba6ae254d598c2c5ab | [
"MIT"
] | null | null | null | programming_elixir/test/list_and_recursion_5_test.exs | rsebescen/ProgrammingElixir | c2ec7bca4c2e326d99723bba6ae254d598c2c5ab | [
"MIT"
] | null | null | null | programming_elixir/test/list_and_recursion_5_test.exs | rsebescen/ProgrammingElixir | c2ec7bca4c2e326d99723bba6ae254d598c2c5ab | [
"MIT"
] | null | null | null | defmodule ListsAndRecursion5Test do
use ExUnit.Case
doctest ProgrammingElixir
# Exercise: ListsAndRecursion-5
test "all?" do
assert MyEnum.all?([1,2,3,4,5], &(&1<6)) == true
assert MyEnum.all?([1,2,3,4,5], &(&1<5)) == false
end
test "each" do
assert MyEnum.each([1,2,3], &(&1+1)) == [2,3,4]
assert MyEnum.each([1,2,3], &(&1*2)) == [2,4,6]
end
test "filter" do
assert MyEnum.filter([1,2,3], &(&1!=2)) == [1,3]
assert MyEnum.filter([1,2,3,4], &(rem(&1,2)==0)) == [2,4]
end
test "split" do
assert MyEnum.split([1,2,3,4], 2) == [[1,2],[3,4]]
assert MyEnum.split([1,2,3,4], 1) == [[1],[2,3,4]]
end
test "take" do
assert MyEnum.take([1,2,3,4,5], 3) == [1,2,3]
assert MyEnum.take([1,2,3,4,5], 1) == [1]
end
end
| 25 | 61 | 0.544516 |
93ea2e451b3fb448f76960fc0e716c23d1bf0619 | 1,781 | ex | Elixir | clients/real_time_bidding/lib/google_api/real_time_bidding/v1/model/remove_targeted_sites_request.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/real_time_bidding/lib/google_api/real_time_bidding/v1/model/remove_targeted_sites_request.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/real_time_bidding/lib/google_api/real_time_bidding/v1/model/remove_targeted_sites_request.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.RealTimeBidding.V1.Model.RemoveTargetedSitesRequest do
@moduledoc """
A request to stop targeting sites in a specific pretargeting configuration. The pretargeting configuration itself specifies how these sites are targeted in PretargetingConfig.webTargeting.
## Attributes
* `sites` (*type:* `list(String.t)`, *default:* `nil`) - A list of site URLs to stop targeting in the pretargeting configuration. These values will be removed from the list of targeted URLs in PretargetingConfig.webTargeting.values.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:sites => list(String.t())
}
field(:sites, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.RealTimeBidding.V1.Model.RemoveTargetedSitesRequest do
def decode(value, options) do
GoogleApi.RealTimeBidding.V1.Model.RemoveTargetedSitesRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.RealTimeBidding.V1.Model.RemoveTargetedSitesRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.893617 | 236 | 0.763616 |
93ea3506774544fe450b0e2e0073410b54863f3f | 1,787 | exs | Elixir | deps/timex/mix.exs | luishendrix92/exrez | c9a001ff32c2081449ce190d151e7df09a0a42ee | [
"MIT"
] | null | null | null | deps/timex/mix.exs | luishendrix92/exrez | c9a001ff32c2081449ce190d151e7df09a0a42ee | [
"MIT"
] | null | null | null | deps/timex/mix.exs | luishendrix92/exrez | c9a001ff32c2081449ce190d151e7df09a0a42ee | [
"MIT"
] | null | null | null | defmodule Timex.Mixfile do
use Mix.Project
@version "3.3.0"
def project do
[ app: :timex,
version: @version,
elixir: "~> 1.4",
description: description(),
package: package(),
deps: deps(),
docs: docs(),
compilers: [:gettext] ++ Mix.compilers,
test_coverage: [tool: ExCoveralls] ]
end
def application do
[applications: [:logger, :tzdata, :gettext, :combine],
env: [local_timezone: nil, default_locale: "en"],
mod: {Timex, []}]
end
defp description do
"""
Timex is a rich, comprehensive Date/Time library for Elixir projects, with full timezone support via the :tzdata package.
If you need to manipulate dates, times, datetimes, timestamps, etc., then Timex is for you!
"""
end
defp package do
[ files: ["lib", "priv", "mix.exs", "README.md", "LICENSE.md"],
maintainers: ["Paul Schoenfelder"],
licenses: ["MIT"],
links: %{ "GitHub": "https://github.com/bitwalker/timex" } ]
end
def deps do
[{:tzdata, "~> 0.1.8 or ~> 0.5"},
{:combine, "~> 0.10"},
{:gettext, "~> 0.10"},
{:ex_doc, "~> 0.13", only: :dev},
{:benchfella, "~> 0.3", only: :dev},
{:dialyze, "~> 0.2", only: :dev},
{:excoveralls, "~> 0.4", only: [:dev, :test]}]
end
defp docs do
[main: "getting-started",
formatter_opts: [gfm: true],
source_ref: @version,
source_url: "https://github.com/bitwalker/timex",
extras: [
"docs/Getting Started.md",
"CHANGELOG.md",
"docs/Basic Usage.md",
"docs/Erlang Interop.md",
"docs/Formatting.md",
"docs/Parsing.md",
"docs/FAQ.md",
"docs/Using with Ecto.md",
"docs/Custom Parsers.md",
"docs/Custom Formatters.md"
]]
end
end
| 25.898551 | 125 | 0.56911 |
93ea717a92e2fb38b117c01a55706fba251e7bc2 | 6,698 | ex | Elixir | apps/omg_eth/test/support/dev_helper.ex | omgnetwork/omg-childchain-v1 | 1e2313029ece2282c22ce411edc078a17e6bba09 | [
"Apache-2.0"
] | 1 | 2020-10-06T03:07:47.000Z | 2020-10-06T03:07:47.000Z | apps/omg_eth/test/support/dev_helper.ex | omgnetwork/omg-childchain-v1 | 1e2313029ece2282c22ce411edc078a17e6bba09 | [
"Apache-2.0"
] | 9 | 2020-09-16T15:31:17.000Z | 2021-03-17T07:12:35.000Z | apps/omg_eth/test/support/dev_helper.ex | omgnetwork/omg-childchain-v1 | 1e2313029ece2282c22ce411edc078a17e6bba09 | [
"Apache-2.0"
] | 1 | 2020-09-30T17:17:27.000Z | 2020-09-30T17:17:27.000Z | # Copyright 2019-2020 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule Support.DevHelper do
@moduledoc """
Helpers used when setting up development environment and test fixtures, related to contracts and ethereum.
Run against `geth --dev` and similar.
"""
import OMG.Eth.Encoding, only: [to_hex: 1, from_hex: 1, int_from_hex: 1]
require Logger
alias OMG.Eth
alias OMG.Eth.Client
alias OMG.Eth.Configuration
alias OMG.Eth.RootChain
alias OMG.Eth.Transaction
alias Support.WaitFor
@one_hundred_eth trunc(:math.pow(10, 18) * 100)
# about 4 Ethereum blocks on "realistic" networks, use to timeout synchronous operations in demos on testnets
# NOTE: such timeout works only in dev setting; on mainnet one must track its transactions carefully
@about_4_blocks_time 60_000
@passphrase "ThisIsATestnetPassphrase"
@doc """
Will take a map with eth-account information (from &generate_entity/0) and then
import priv key->unlock->fund with test ETH on that account
Options:
- :faucet - the address to send the test ETH from, assumed to be unlocked and have the necessary funds
- :initial_funds_wei - the amount of test ETH that will be granted to every generated user
"""
def import_unlock_fund(%{priv: account_priv}, opts \\ []) do
{:ok, account_enc} = create_account_from_secret(Configuration.eth_node(), account_priv, @passphrase)
{:ok, _} = fund_address_from_faucet(account_enc, opts)
{:ok, from_hex(account_enc)}
end
@doc """
Use with contract-transacting functions that return {:ok, txhash}, e.g. `Eth.Token.mint`, for synchronous waiting
for mining of a successful result
"""
@spec transact_sync!({:ok, Eth.hash()}, keyword()) :: {:ok, map}
def transact_sync!({:ok, txhash} = _transaction_submission_result, opts \\ []) when byte_size(txhash) == 32 do
timeout = Keyword.get(opts, :timeout, @about_4_blocks_time)
{:ok, _} =
txhash
|> WaitFor.eth_receipt(timeout)
|> case do
{:ok, %{"status" => "0x1"} = receipt} -> {:ok, Map.update!(receipt, "blockNumber", &int_from_hex(&1))}
{:ok, %{"status" => "0x0"} = receipt} -> {:error, Map.put(receipt, "reason", get_reason(txhash))}
other -> other
end
end
@doc """
Uses `transact_sync!` for synchronous deploy-transaction sending and extracts important data from the receipt
"""
@spec deploy_sync!({:ok, Eth.hash()}) :: {:ok, Eth.hash(), Eth.address()}
def deploy_sync!({:ok, txhash} = transaction_submission_result) do
{:ok, %{"contractAddress" => contract, "status" => "0x1", "gasUsed" => _gas_used}} =
transact_sync!(transaction_submission_result)
{:ok, txhash, from_hex(contract)}
end
def wait_for_root_chain_block(awaited_eth_height, timeout \\ 600_000) do
f = fn ->
{:ok, eth_height} = Client.get_ethereum_height()
if eth_height < awaited_eth_height, do: :repeat, else: {:ok, eth_height}
end
WaitFor.ok(f, timeout)
end
def wait_for_next_child_block(blknum) do
timeout = 10_000
f = fn ->
next_num = RootChain.next_child_block()
if next_num < blknum, do: :repeat, else: {:ok, next_num}
end
WaitFor.ok(f, timeout)
end
def create_account_from_secret(:ganache, secret, passphrase),
do: do_create_account_from_secret("personal_importRawKey", Eth.Encoding.to_hex(secret), passphrase)
def create_account_from_secret(:geth, secret, passphrase),
do: do_create_account_from_secret("personal_importRawKey", Base.encode16(secret), passphrase)
def create_account_from_secret(:parity, secret, passphrase) when byte_size(secret) == 64,
do: do_create_account_from_secret("parity_newAccountFromSecret", Eth.Encoding.to_hex(secret), passphrase)
# private
defp do_create_account_from_secret(method_name, secret, passphrase) do
{:ok, _} = Ethereumex.HttpClient.request(method_name, [secret, passphrase], [])
end
defp fund_address_from_faucet(account_enc, opts) do
{:ok, [default_faucet | _]} = Ethereumex.HttpClient.eth_accounts()
defaults = [faucet: default_faucet, initial_funds_wei: @one_hundred_eth]
%{faucet: faucet, initial_funds_wei: initial_funds_wei} =
defaults
|> Keyword.merge(opts)
|> Enum.into(%{})
unlock_if_possible(account_enc)
params = %{from: faucet, to: account_enc, value: to_hex(initial_funds_wei)}
{:ok, tx_fund} = Transaction.send(Configuration.eth_node(), params)
case Keyword.get(opts, :timeout) do
nil -> WaitFor.eth_receipt(tx_fund, @about_4_blocks_time)
timeout -> WaitFor.eth_receipt(tx_fund, timeout)
end
end
defp unlock_if_possible(account_enc) do
unlock_if_possible(account_enc, Configuration.eth_node())
end
# ganache works the same as geth in this aspect
defp unlock_if_possible(account_enc, :ganache), do: unlock_if_possible(account_enc, :geth)
defp unlock_if_possible(account_enc, :geth) do
{:ok, true} = Ethereumex.HttpClient.request("personal_unlockAccount", [account_enc, @passphrase, 0], [])
end
defp unlock_if_possible(_account_enc, :parity) do
:dont_bother_will_use_personal_sendTransaction
end
# gets the `revert` reason for a failed transaction by txhash
# based on https://gist.github.com/gluk64/fdea559472d957f1138ed93bcbc6f78a
defp get_reason(txhash) do
# we get the exact transaction details
{:ok, tx} = Ethereumex.HttpClient.eth_get_transaction_by_hash(to_hex(txhash))
# we use them (with minor tweak) to be called on the Ethereum client at the exact block of the original call
{:ok, call_result} = tx |> Map.put("data", tx["input"]) |> Ethereumex.HttpClient.eth_call(tx["blockNumber"])
# this call result is hex decoded and then additionally decoded with ABI, should yield a readable ascii-string
if call_result == "0x", do: "out of gas, reason is 0x", else: call_result |> from_hex() |> abi_decode_reason()
end
defp abi_decode_reason(result) do
bytes_to_throw_away = 2 * 32 + 4
# trimming the 4-byte function selector, 32 byte size of size and 32 byte size
result |> binary_part(bytes_to_throw_away, byte_size(result) - bytes_to_throw_away) |> String.trim(<<0>>)
end
end
| 38.716763 | 115 | 0.717528 |
93ea84132a8fa42866763d631df5a56416914979 | 2,646 | ex | Elixir | clients/docs/lib/google_api/docs/v1/model/weighted_font_family.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/docs/lib/google_api/docs/v1/model/weighted_font_family.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/docs/lib/google_api/docs/v1/model/weighted_font_family.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Docs.V1.Model.WeightedFontFamily do
@moduledoc """
Represents a font family and weight of text.
## Attributes
* `fontFamily` (*type:* `String.t`, *default:* `nil`) - The font family of the text. The font family can be any font from the Font menu in Docs or from [Google Fonts] (https://fonts.google.com/). If the font name is unrecognized, the text is rendered in `Arial`.
* `weight` (*type:* `integer()`, *default:* `nil`) - The weight of the font. This field can have any value that is a multiple of `100` between `100` and `900`, inclusive. This range corresponds to the numerical values described in the CSS 2.1 Specification, [section 15.6](https://www.w3.org/TR/CSS21/fonts.html#font-boldness), with non-numerical values disallowed. The default value is `400` ("normal"). The font weight makes up just one component of the rendered font weight. The rendered weight is determined by a combination of the `weight` and the text style's resolved `bold` value, after accounting for inheritance: * If the text is bold and the weight is less than `400`, the rendered weight is 400. * If the text is bold and the weight is greater than or equal to `400` but is less than `700`, the rendered weight is `700`. * If the weight is greater than or equal to `700`, the rendered weight is equal to the weight. * If the text is not bold, the rendered weight is equal to the weight.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:fontFamily => String.t() | nil,
:weight => integer() | nil
}
field(:fontFamily)
field(:weight)
end
defimpl Poison.Decoder, for: GoogleApi.Docs.V1.Model.WeightedFontFamily do
def decode(value, options) do
GoogleApi.Docs.V1.Model.WeightedFontFamily.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Docs.V1.Model.WeightedFontFamily do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 52.92 | 1,002 | 0.733182 |
93ea8d73e702b05dffa0b14d3e4bafdbbdd50827 | 1,044 | ex | Elixir | test/support/conn_case.ex | sjahandideh/words | 542ce945a8f1bed8546ecf5df5b4770a6c76329e | [
"MIT"
] | null | null | null | test/support/conn_case.ex | sjahandideh/words | 542ce945a8f1bed8546ecf5df5b4770a6c76329e | [
"MIT"
] | null | null | null | test/support/conn_case.ex | sjahandideh/words | 542ce945a8f1bed8546ecf5df5b4770a6c76329e | [
"MIT"
] | null | null | null | defmodule Words.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
imports other functionality to make it easier
to build and query models.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
alias Words.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query, only: [from: 1, from: 2]
import Words.Router.Helpers
# The default endpoint for testing
@endpoint Words.Endpoint
end
end
setup tags do
unless tags[:async] do
Ecto.Adapters.SQL.restart_test_transaction(Words.Repo, [])
end
{:ok, conn: Phoenix.ConnTest.conn()}
end
end
| 24.27907 | 64 | 0.700192 |
93eafe7744205b1429ad7ec857af6b7563896597 | 495 | ex | Elixir | apps/discovery_api/lib/discovery_api_web/controllers/data_json_controller.ex | AWHServiceAccount/smartcitiesdata | 6957afac12809288640b6ba6b576c3016e6033d7 | [
"Apache-2.0"
] | 1 | 2020-03-18T21:14:39.000Z | 2020-03-18T21:14:39.000Z | apps/discovery_api/lib/discovery_api_web/controllers/data_json_controller.ex | AWHServiceAccount/smartcitiesdata | 6957afac12809288640b6ba6b576c3016e6033d7 | [
"Apache-2.0"
] | null | null | null | apps/discovery_api/lib/discovery_api_web/controllers/data_json_controller.ex | AWHServiceAccount/smartcitiesdata | 6957afac12809288640b6ba6b576c3016e6033d7 | [
"Apache-2.0"
] | null | null | null | defmodule DiscoveryApiWeb.DataJsonController do
use DiscoveryApiWeb, :controller
alias DiscoveryApi.Services.DataJsonService
def show(conn, _params) do
case DataJsonService.ensure_data_json_file() do
{:local, file_path} ->
conn
|> put_resp_header("content-type", "application/json")
|> send_file(200, file_path)
{:error, _} ->
conn
|> Plug.Conn.resp(500, "Internal Server Error")
|> Plug.Conn.send_resp()
end
end
end
| 26.052632 | 62 | 0.654545 |
93eb0aed6ffcc530f93615ee150c3d2affd701c0 | 7,637 | ex | Elixir | lib/game/environment.ex | NatTuck/ex_venture | 7a74d33025a580f1e3e93d3755f22258eb3e9127 | [
"MIT"
] | null | null | null | lib/game/environment.ex | NatTuck/ex_venture | 7a74d33025a580f1e3e93d3755f22258eb3e9127 | [
"MIT"
] | null | null | null | lib/game/environment.ex | NatTuck/ex_venture | 7a74d33025a580f1e3e93d3755f22258eb3e9127 | [
"MIT"
] | null | null | null | defmodule Game.Environment do
@moduledoc """
Look at your surroundings, whether a room or an overworld
"""
alias Game.Character
alias Game.Room
alias Game.Overworld
alias Game.Overworld.Sector
@type state :: Data.Room.t()
defmacro __using__(_opts) do
quote do
@environment Application.get_env(:ex_venture, :game)[:environment]
end
end
@doc """
Get the type of room based on its id
"""
def room_type(room_id) do
case room_id do
"overworld:" <> _id ->
:overworld
_ ->
:room
end
end
@doc """
Look around your environment
"""
@spec look(integer() | String.t()) :: state()
def look("overworld:" <> overworld_id) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
case :global.whereis_name({Sector, zone_id, sector}) do
:undefined ->
{:error, :room_offline}
pid ->
GenServer.call(pid, {:look, overworld_id})
end
end
def look(id) do
case :global.whereis_name({Room, id}) do
:undefined ->
{:error, :room_offline}
pid ->
GenServer.call(pid, :look)
end
end
@doc """
Enter a room
Valid enter reasons: `:enter`, `:respawn`
"""
@spec enter(integer(), Character.t(), atom()) :: :ok
def enter("overworld:" <> overworld_id, character, reason) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
character = Character.to_simple(character)
GenServer.cast(Sector.pid(zone_id, sector), {:enter, overworld_id, character, reason})
end
def enter(id, character, reason) do
character = Character.to_simple(character)
GenServer.cast(Room.pid(id), {:enter, character, reason})
end
@doc """
Leave a room
Valid leave reasons: `:leave`, `:death`
"""
@spec leave(integer(), Character.t(), atom()) :: :ok
def leave("overworld:" <> overworld_id, character, reason) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
character = Character.to_simple(character)
GenServer.cast(Sector.pid(zone_id, sector), {:leave, overworld_id, character, reason})
end
def leave(id, character, reason) do
character = Character.to_simple(character)
GenServer.cast(Room.pid(id), {:leave, character, reason})
end
@doc """
Notify characters in a room of an event
"""
@spec notify(integer(), Character.t(), tuple()) :: :ok
def notify("overworld:" <> overworld_id, character, event) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
character = Character.to_simple(character)
GenServer.cast(Sector.pid(zone_id, sector), {:notify, overworld_id, character, event})
end
def notify(id, character, event) do
character = Character.to_simple(character)
GenServer.cast(Room.pid(id), {:notify, character, event})
end
@doc """
Say to the players in the room
"""
@spec say(integer(), pid(), Message.t()) :: :ok
def say("overworld:" <> overworld_id, sender, message) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
GenServer.cast(Sector.pid(zone_id, sector), {:say, overworld_id, sender, message})
end
def say(id, sender, message) do
GenServer.cast(Room.pid(id), {:say, sender, message})
end
@doc """
Emote to the players in the room
"""
@spec emote(integer(), pid(), Message.t()) :: :ok
def emote("overworld:" <> overworld_id, sender, message) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
sender = Character.to_simple(sender)
GenServer.cast(Sector.pid(zone_id, sector), {:emote, overworld_id, sender, message})
end
def emote(id, sender, message) do
sender = Character.to_simple(sender)
GenServer.cast(Room.pid(id), {:emote, sender, message})
end
@doc """
Pick up the item
"""
@spec pick_up(integer(), Item.t()) :: :ok
def pick_up("overworld:" <> overworld_id, item) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
GenServer.call(Sector.pid(zone_id, sector), {:pick_up, overworld_id, item})
end
def pick_up(id, item) do
GenServer.call(Room.pid(id), {:pick_up, item})
end
@doc """
Pick up currency
"""
@spec pick_up_currency(integer()) :: :ok
def pick_up_currency("overworld:" <> overworld_id) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
GenServer.call(Sector.pid(zone_id, sector), {:pick_up_currency, overworld_id})
end
def pick_up_currency(id) do
GenServer.call(Room.pid(id), :pick_up_currency)
end
@doc """
Drop an item into a room
"""
@spec drop(integer(), Character.t(), Item.t()) :: :ok
def drop("overworld:" <> overworld_id, character, item) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
character = Character.to_simple(character)
GenServer.cast(Sector.pid(zone_id, sector), {:drop, overworld_id, character, item})
end
def drop(id, character, item) do
character = Character.to_simple(character)
GenServer.cast(Room.pid(id), {:drop, character, item})
end
@doc """
Drop currency into a room
"""
@spec drop_currency(integer(), Character.t(), integer()) :: :ok
def drop_currency("overworld:" <> overworld_id, character, currency) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
character = Character.to_simple(character)
GenServer.cast(Sector.pid(zone_id, sector), {:drop_currency, overworld_id, character, currency})
end
def drop_currency(id, character, currency) do
character = Character.to_simple(character)
GenServer.cast(Room.pid(id), {:drop_currency, character, currency})
end
@doc """
Update the character after a stats change
"""
@spec update_character(integer(), tuple()) :: :ok
def update_character("overworld:" <> overworld_id, character) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
character = Character.to_simple(character)
GenServer.cast(Sector.pid(zone_id, sector), {:update_character, overworld_id, character})
end
def update_character(id, character) do
character = Character.to_simple(character)
GenServer.cast(Room.pid(id), {:update_character, character})
end
@doc """
Link the current process against the room's pid, finds by id
"""
def link("overworld:" <> overworld_id) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
case :global.whereis_name({Sector, zone_id, sector}) do
:undefined ->
{:error, :room_offline}
pid ->
Process.link(pid)
end
end
def link(id) do
case :global.whereis_name({Room, id}) do
:undefined ->
:ok
pid ->
Process.link(pid)
end
end
@doc """
Unlink the current process against the room's pid, finds by id
"""
def unlink("overworld:" <> overworld_id) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
case :global.whereis_name({Sector, zone_id, sector}) do
:undefined ->
{:error, :room_offline}
pid ->
Process.unlink(pid)
end
end
def unlink(id) do
case :global.whereis_name({Room, id}) do
:undefined ->
:ok
pid ->
Process.unlink(pid)
end
end
@doc """
Crash a room process with an unmatched cast
There should always remain no matching clause for this cast
"""
def crash("overworld:" <> overworld_id) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
GenServer.cast(Sector.pid(zone_id, sector), :crash)
end
def crash(id) do
GenServer.cast(Room.pid(id), :crash)
end
end
| 28.285185 | 100 | 0.671599 |
93eb2caeb62c0dc6ac86b076f03153bfe7f23e0a | 547 | exs | Elixir | elixir/pangram/pangram.exs | jjdonov/Exercism | 3585420c5b0e74ea51a6fbd60e8263620061a1d9 | [
"MIT"
] | null | null | null | elixir/pangram/pangram.exs | jjdonov/Exercism | 3585420c5b0e74ea51a6fbd60e8263620061a1d9 | [
"MIT"
] | null | null | null | elixir/pangram/pangram.exs | jjdonov/Exercism | 3585420c5b0e74ea51a6fbd60e8263620061a1d9 | [
"MIT"
] | null | null | null | defmodule Pangram do
@doc """
Determines if a word or sentence is a pangram.
A pangram is a sentence using every letter of the alphabet at least once.
Returns a boolean.
## Examples
iex> Pangram.pangram?("the quick brown fox jumps over the lazy dog")
true
"""
@chars Enum.to_list(?A..?Z)
@spec pangram?(String.t()) :: boolean
def pangram?(sentence) do
sentence
|> String.upcase
|> has_all_chars?
end
defp has_all_chars?(sentence) do
@chars -- String.to_charlist(sentence) == []
end
end
| 19.535714 | 75 | 0.654479 |
93ebaaf15018739d4aaaad81b72e8fdc68c061d4 | 66 | exs | Elixir | episode10/crypto/config/dev.exs | paulfioravanti/learn_elixir | 8424b1a7a89cb9fd1dacb85bcca487601958b8fa | [
"MIT"
] | null | null | null | episode10/crypto/config/dev.exs | paulfioravanti/learn_elixir | 8424b1a7a89cb9fd1dacb85bcca487601958b8fa | [
"MIT"
] | null | null | null | episode10/crypto/config/dev.exs | paulfioravanti/learn_elixir | 8424b1a7a89cb9fd1dacb85bcca487601958b8fa | [
"MIT"
] | null | null | null | use Mix.Config
config :crypto, Crypto.Encryptor, prefix: "Dev: "
| 16.5 | 49 | 0.727273 |
93ebbd5635cea5980f85026aba9061d22ba29d10 | 1,511 | ex | Elixir | lib/ninescraft_web/controllers/world_controller.ex | drobertduke/ninescraft | 00a08e4f3433b1ffd8f0a8dc77927d043b4e3ce8 | [
"Apache-2.0"
] | null | null | null | lib/ninescraft_web/controllers/world_controller.ex | drobertduke/ninescraft | 00a08e4f3433b1ffd8f0a8dc77927d043b4e3ce8 | [
"Apache-2.0"
] | null | null | null | lib/ninescraft_web/controllers/world_controller.ex | drobertduke/ninescraft | 00a08e4f3433b1ffd8f0a8dc77927d043b4e3ce8 | [
"Apache-2.0"
] | null | null | null | defmodule NinescraftWeb.WorldController do
use NinescraftWeb, :controller
alias Ninescraft.Accounts
alias Ninescraft.Accounts.World
action_fallback NinescraftWeb.FallbackController
def index(conn, _params, user) do
worlds = Accounts.list_worlds(user)
render(conn, "index.json", worlds: worlds, user: user)
end
def create(conn, %{"world" => world_params}, user) do
world_params = world_params
|> Map.put("user_id", user.id)
with {:ok, %World{} = world} <- Accounts.create_world(world_params) do
conn
|> put_status(:created)
|> put_resp_header("location", user_world_path(conn, :show, user, world))
|> render("show.json", world: world, user: user)
end
end
def show(conn, %{"id" => id}, user) do
world = Accounts.get_world!(user, id)
render(conn, "show.json", world: world, user: user)
end
def update(conn, %{"id" => id, "world" => world_params}, user) do
world = Accounts.get_world!(user, id)
with {:ok, %World{} = world} <- Accounts.update_world(world, world_params) do
render(conn, "show.json", world: world, user: user)
end
end
def delete(conn, %{"id" => id}, user) do
world = Accounts.get_world!(user, id)
with {:ok, %World{}} <- Accounts.delete_world(world) do
send_resp(conn, :no_content, "")
end
end
def action(conn, _) do
user = Accounts.get_user!(conn.params["user_id"])
args = [conn, conn.params, user]
apply(__MODULE__, action_name(conn), args)
end
end
| 28.509434 | 81 | 0.65321 |
93ebde157814a2e4f119f03db1ac7f47c7bcb308 | 11,728 | exs | Elixir | lib/ex_unit/test/ex_unit/capture_io_test.exs | aJunKobayashi/elixir | 14fde590a8222b845c975b594935b633eb26a115 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/test/ex_unit/capture_io_test.exs | aJunKobayashi/elixir | 14fde590a8222b845c975b594935b633eb26a115 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/test/ex_unit/capture_io_test.exs | aJunKobayashi/elixir | 14fde590a8222b845c975b594935b633eb26a115 | [
"Apache-2.0"
] | null | null | null | Code.require_file("../test_helper.exs", __DIR__)
defmodule ExUnit.CaptureIOTest do
use ExUnit.Case
defmodule GetUntil do
def until_new_line(_, :eof, _) do
{:done, :eof, []}
end
def until_new_line(this_far, chars, stop_char) do
case Enum.split_while(chars, fn c -> c != stop_char end) do
{l, []} ->
{:more, this_far ++ l}
{l, [stop_char | rest]} ->
{:done, this_far ++ l ++ [stop_char], rest}
end
end
def get_line(device \\ Process.group_leader()) do
request = {:get_until, :unicode, "", __MODULE__, :until_new_line, [?\n]}
send(device, {:io_request, self(), device, request})
receive do
{:io_reply, _, data} -> data
end
end
end
import ExUnit.CaptureIO
doctest ExUnit.CaptureIO, import: true
test "no leakage on failures" do
group_leader = Process.group_leader()
test = self()
assert_raise ArgumentError, fn ->
capture_io(fn ->
send(test, {:string_io, Process.group_leader()})
raise ArgumentError
end)
end
receive do
{:string_io, pid} ->
ref = Process.monitor(pid)
assert_receive {:DOWN, ^ref, _, _, _}
end
assert Process.group_leader() == group_leader
end
test "with no output" do
assert capture_io(fn -> nil end) == ""
end
test "with put chars" do
assert capture_io(fn ->
:io.put_chars("")
end) == ""
assert capture_io(fn ->
:io.put_chars("a")
:io.put_chars("b")
end) == "ab"
assert capture_io(fn ->
:io.put_chars("josé")
end) == "josé"
assert capture_io(fn ->
spawn(fn -> :io.put_chars("a") end)
Process.sleep(10)
end) == "a"
assert capture_io(fn ->
assert :io.put_chars("a") == :ok
end)
end
test "with put chars to stderr" do
assert capture_io(:stderr, fn ->
:io.put_chars(:standard_error, "a")
end) == "a"
end
test "async capture_io works with put chars to stderr" do
parent = self()
[pid1, pid2, pid3] =
for num <- 1..3 do
pid =
spawn_link(fn ->
captured =
capture_io(:stderr, fn ->
:io.put_chars(:standard_error, "before:#{num}\n")
send(parent, {self(), :logged})
assert_receive :continue
:io.put_chars(:standard_error, "after:#{num}\n")
end)
send(parent, captured)
end)
assert_receive {^pid, :logged}
pid
end
send(pid3, :continue)
assert_receive "before:3\nafter:3\n"
send(pid2, :continue)
assert_receive "before:2\nbefore:3\nafter:3\nafter:2\n"
send(pid1, :continue)
assert_receive "before:1\nbefore:2\nbefore:3\nafter:3\nafter:2\nafter:1\n"
end
test "raises when async capturing a named device with a different encoding than the first" do
parent = self()
pid =
spawn_link(fn ->
output =
capture_io(:stderr, [encoding: :latin1], fn ->
:io.put_chars(:standard_error, "a")
send(parent, {self(), :logged})
assert_receive :continue
end)
send(parent, output)
end)
assert_receive {^pid, :logged}
assert_raise ArgumentError,
~r"attempted to change the encoding for a currently captured device :standard_error",
fn ->
capture_io(:stderr, [encoding: :unicode], fn ->
:io.put_chars(:standard_error, "b")
end)
end
assert capture_io(:stderr, [encoding: :latin1], fn ->
:io.put_chars(:standard_error, "c")
end) == "c"
send(pid, :continue)
assert_receive "ac"
end
test "raises when async capturing a named device with an input given to an already captured device" do
parent = self()
pid =
spawn_link(fn ->
capture_io(:stderr, [input: "first"], fn ->
send(parent, {self(), :logged})
Process.sleep(:infinity)
end)
end)
assert_receive {^pid, :logged}
message =
"attempted multiple captures on device :standard_error with input. If you need to give an input to a captured device, you cannot run your test asynchronously"
assert_raise ArgumentError, message, fn ->
capture_io(:stderr, [input: "second"], fn ->
:io.put_chars(:standard_error, "b")
end)
end
assert_raise ArgumentError, message, fn ->
capture_io(:stderr, [input: ""], fn ->
:io.put_chars(:standard_error, "b")
end)
end
end
test "monitors calling processes and releases the capture on exit" do
parent = self()
pid =
spawn(fn ->
capture_io(:stderr, [input: "a"], fn ->
send(parent, :ready)
Process.sleep(:infinity)
end)
end)
assert_receive :ready
ref = Process.monitor(pid)
# Kill the process and make sure the capture is released
Process.exit(pid, :shutdown)
# Make sure the process has exited before we try and start a new capture
assert_receive {:DOWN, ^ref, _, _, _}
assert capture_io(:stderr, [input: "b"], fn -> :ok end)
end
test "with fwrite" do
assert capture_io(fn ->
:io.fwrite(<<127, 128>>)
end) == <<127, 194, 128>>
assert capture_io([encoding: :latin1], fn ->
:io.fwrite(<<127, 128>>)
end) == <<127, 128>>
end
test "with get chars" do
assert capture_io(fn ->
:io.get_chars(">", 3)
end) == ">"
assert capture_io([capture_prompt: false], fn ->
:io.get_chars(">", 3)
end) == ""
capture_io(fn ->
assert :io.get_chars(">", 3) == :eof
end)
capture_io("", fn ->
assert :io.get_chars(">", 3) == :eof
end)
capture_io("abc\ndef", fn ->
assert :io.get_chars(">", 3) == "abc"
assert :io.get_chars(">", 5) == "\ndef"
assert :io.get_chars(">", 7) == :eof
end)
capture_io("あいう", fn ->
assert :io.get_chars(">", 2) == "あい"
assert :io.get_chars(">", 1) == "う"
assert :io.get_chars(">", 1) == :eof
end)
end
test "with get line" do
assert capture_io(fn ->
:io.get_line(">")
end) == ">"
assert capture_io([capture_prompt: false], fn ->
:io.get_line(">")
end) == ""
capture_io(fn ->
assert :io.get_line(">") == :eof
end)
capture_io("", fn ->
assert :io.get_line(">") == :eof
end)
capture_io("\n", fn ->
assert :io.get_line(">") == "\n"
assert :io.get_line(">") == :eof
end)
capture_io("a", fn ->
assert :io.get_line(">") == "a"
assert :io.get_line(">") == :eof
end)
capture_io("a\n", fn ->
assert :io.get_line(">") == "a\n"
assert :io.get_line(">") == :eof
end)
capture_io("a\nb", fn ->
assert :io.get_line(">") == "a\n"
assert :io.get_line(">") == "b"
assert :io.get_line(">") == :eof
end)
capture_io("あい\nう", fn ->
assert :io.get_line(">") == "あい\n"
assert :io.get_line(">") == "う"
assert :io.get_line(">") == :eof
end)
end
test "with get password" do
capture_io(fn ->
assert :io.get_password() == :eof
end)
capture_io("", fn ->
assert :io.get_password() == :eof
end)
capture_io("abc", fn ->
assert :io.get_password() == "abc"
assert :io.get_password() == :eof
end)
capture_io("abc\n", fn ->
assert :io.get_password() == "abc\n"
assert :io.get_password() == :eof
end)
capture_io("\n", fn ->
assert :io.get_password() == "\n"
assert :io.get_password() == :eof
end)
capture_io("a\nb", fn ->
assert :io.get_password() == "a\n"
assert :io.get_password() == "b"
assert :io.get_password() == :eof
end)
capture_io("あい\nう", fn ->
assert :io.get_password() == "あい\n"
assert :io.get_password() == "う"
assert :io.get_password() == :eof
end)
end
test "with get until" do
assert capture_io(fn ->
:io.scan_erl_form('>')
end) == ">"
assert capture_io("1.\n", fn ->
:io.scan_erl_form('>')
end) == ">"
assert capture_io("1\n.\n", fn ->
:io.scan_erl_form('>')
end) == ">>"
assert capture_io([capture_prompt: false], fn ->
:io.scan_erl_form('>')
end) == ""
capture_io(fn ->
assert :io.scan_erl_form('>') == {:eof, 1}
end)
capture_io("1", fn ->
assert :io.scan_erl_form('>') == {:ok, [{:integer, 1, 1}], 1}
assert :io.scan_erl_form('>') == {:eof, 1}
end)
capture_io("1\n.", fn ->
assert :io.scan_erl_form('>') == {:ok, [{:integer, 1, 1}, {:dot, 2}], 2}
assert :io.scan_erl_form('>') == {:eof, 1}
end)
capture_io("1.\n.", fn ->
assert :io.scan_erl_form('>') == {:ok, [{:integer, 1, 1}, {:dot, 1}], 2}
assert :io.scan_erl_form('>') == {:ok, [dot: 1], 1}
assert :io.scan_erl_form('>') == {:eof, 1}
end)
capture_io("\"a", fn ->
assert :io.scan_erl_form('>') == {:error, {1, :erl_scan, {:string, 34, 'a'}}, 1}
assert :io.scan_erl_form('>') == {:eof, 1}
end)
capture_io("\"a\n\"", fn ->
assert :io.scan_erl_form('>') == {:ok, [{:string, 1, 'a\n'}], 2}
assert :io.scan_erl_form('>') == {:eof, 1}
end)
capture_io(":erl. mof*,,l", fn ->
assert :io.scan_erl_form('>') == {:ok, [{:":", 1}, {:atom, 1, :erl}, {:dot, 1}], 1}
expected_tokens = [{:atom, 1, :mof}, {:*, 1}, {:",", 1}, {:",", 1}, {:atom, 1, :l}]
assert :io.scan_erl_form('>') == {:ok, expected_tokens, 1}
assert :io.scan_erl_form('>') == {:eof, 1}
end)
capture_io("a\nb\nc", fn ->
assert GetUntil.get_line() == "a\n"
assert GetUntil.get_line() == "b\n"
assert GetUntil.get_line() == :eof
end)
end
test "with setopts" do
assert capture_io(fn ->
assert :io.setopts({:encoding, :latin1}) == {:error, :enotsup}
end) == ""
end
test "with getopts" do
assert capture_io(fn ->
assert :io.getopts() == [binary: true, encoding: :unicode]
end) == ""
end
test "with columns" do
assert capture_io(fn ->
:io.columns()
end) == ""
capture_io(fn ->
assert :io.columns() == {:error, :enotsup}
end)
end
test "with rows" do
assert capture_io(fn ->
:io.rows()
end) == ""
capture_io(fn ->
assert :io.rows() == {:error, :enotsup}
end)
end
test "with multiple IO requests" do
requests = [{:put_chars, :unicode, "a"}, {:put_chars, :unicode, "b"}]
assert capture_io(fn ->
send_and_receive_io({:requests, requests})
end) == "ab"
capture_io(fn ->
assert send_and_receive_io({:requests, requests}) == :ok
end)
end
test "with unknown IO request" do
assert capture_io(fn ->
send_and_receive_io(:unknown)
end) == ""
capture_io(fn ->
assert send_and_receive_io(:unknown) == {:error, :request}
end)
end
test "with assert inside" do
try do
capture_io(fn ->
assert false
end)
rescue
error in [ExUnit.AssertionError] ->
assert error.message == "Expected truthy, got false"
end
end
defp send_and_receive_io(req) do
pid = self()
send(:erlang.group_leader(), {:io_request, pid, pid, req})
receive do
{:io_reply, ^pid, res} -> res
end
end
end
| 25.167382 | 164 | 0.528479 |
93ec0896cbd8b38522540d0b00802f240fa69575 | 1,982 | ex | Elixir | clients/dns/lib/google_api/dns/v1/model/managed_zone_forwarding_config.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/dns/lib/google_api/dns/v1/model/managed_zone_forwarding_config.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/dns/lib/google_api/dns/v1/model/managed_zone_forwarding_config.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DNS.V1.Model.ManagedZoneForwardingConfig do
@moduledoc """
## Attributes
* `kind` (*type:* `String.t`, *default:* `dns#managedZoneForwardingConfig`) - Identifies what kind of resource this is. Value: the fixed string "dns#managedZoneForwardingConfig".
* `targetNameServers` (*type:* `list(GoogleApi.DNS.V1.Model.ManagedZoneForwardingConfigNameServerTarget.t)`, *default:* `nil`) - List of target name servers to forward to. Cloud DNS will select the best available name server if more than one target is given.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:kind => String.t(),
:targetNameServers =>
list(GoogleApi.DNS.V1.Model.ManagedZoneForwardingConfigNameServerTarget.t())
}
field(:kind)
field(:targetNameServers,
as: GoogleApi.DNS.V1.Model.ManagedZoneForwardingConfigNameServerTarget,
type: :list
)
end
defimpl Poison.Decoder, for: GoogleApi.DNS.V1.Model.ManagedZoneForwardingConfig do
def decode(value, options) do
GoogleApi.DNS.V1.Model.ManagedZoneForwardingConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DNS.V1.Model.ManagedZoneForwardingConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.036364 | 262 | 0.745207 |
93ec19d58df3c284a48086cc6790a02aec3c1b2d | 27,217 | ex | Elixir | lib/mix/lib/mix/tasks/test.ex | moogle19/elixir | 91fa0e3adad1654a4fe2c8bd218e9e6d42b92b47 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/test.ex | moogle19/elixir | 91fa0e3adad1654a4fe2c8bd218e9e6d42b92b47 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/test.ex | moogle19/elixir | 91fa0e3adad1654a4fe2c8bd218e9e6d42b92b47 | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.Test do
use Mix.Task
alias Mix.Compilers.Test, as: CT
@compile {:no_warn_undefined, [ExUnit, ExUnit.Filters]}
@shortdoc "Runs a project's tests"
@recursive true
@preferred_cli_env :test
@moduledoc ~S"""
Runs the tests for a project.
This task starts the current application, loads up
`test/test_helper.exs` and then, requires all files matching the
`test/**/*_test.exs` pattern in parallel.
A list of files and/or directories can be given after the task
name in order to select the files to run:
mix test test/some/particular/file_test.exs
mix test test/some/particular/dir
Tests in umbrella projects can be run from the root by specifying
the full suite path, including `apps/my_app/test`, in which case
recursive tests for other child apps will be skipped completely:
# To run all tests for my_app from the umbrella root
mix test apps/my_app/test
# To run a given test file on my_app from the umbrella root
mix test apps/my_app/test/some/particular/file_test.exs
## Understanding test results
When you run your test suite, it prints results as they run with
a summary at the end, as seen below:
$ mix test
...
1) test greets the world (FooTest)
test/foo_test.exs:5
Assertion with == failed
code: assert Foo.hello() == :world!
left: :world
right: :world!
stacktrace:
test/foo_test.exs:6: (test)
........
Finished in 0.05 seconds (0.00s async, 0.05s sync)
1 doctest, 11 tests, 1 failure
Randomized with seed 646219
For each test, the test suite will print a dot. Failed tests
are printed immediately in the format described in the next
section.
After all tests run, we print the suite summary. The first
line contains the total time spent on the suite, followed
by how much time was spent on async tests (defined with
`use ExUnit.Case, async: true`) vs sync ones:
Finished in 0.05 seconds (0.00s async, 0.05s sync)
Developers want to minimize the time spent on sync tests
whenever possible, as sync tests run serially and async
tests run concurrently.
Finally, how many tests we have run, how many of them
failed, how many were invalid, etc.
### Understanding test failures
First, it contains the failure counter, followed by the test
name and the module the test was defined:
1) test greets the world (FooTest)
The next line contains the exact location of the test in the
`FILE:LINE` format:
test/foo_test.exs:5
If you want to re-run only this test, all you need to do is to
copy the line above and past it in front of `mix test`:
mix test test/foo_test.exs:5
Then we show the error message, code snippet, and general information
about the failed test:
Assertion with == failed
code: assert Foo.hello() == :world!
left: :world
right: :world!
If your terminal supports coloring (see the "Coloring" section below),
a diff is typically shown between `left` and `right` sides. Finally,
we print the stacktrace of the failure:
stacktrace:
test/foo_test.exs:6: (test)
## Command line options
* `--color` - enables color in the output
* `--cover` - runs coverage tool. See "Coverage" section below
* `--exclude` - excludes tests that match the filter
* `--exit-status` - use an alternate exit status to use when the test suite
fails (default is 2).
* `--export-coverage` - the name of the file to export coverage results to.
Only has an effect when used with `--cover`
* `--failed` - runs only tests that failed the last time they ran
* `--force` - forces compilation regardless of modification times
* `--formatter` - sets the formatter module that will print the results.
Defaults to ExUnit's built-in CLI formatter
* `--include` - includes tests that match the filter
* `--listen-on-stdin` - runs tests, and then listens on stdin. It will
re-run tests once a newline is received. See the "File system watchers"
section below
* `--max-cases` - sets the maximum number of tests running asynchronously. Only tests from
different modules run in parallel. Defaults to twice the number of cores
* `--max-failures` - the suite stops evaluating tests when this number of test
failures is reached. It runs all tests if omitted
* `--no-archives-check` - does not check archives
* `--no-color` - disables color in the output
* `--no-compile` - does not compile, even if files require compilation
* `--no-deps-check` - does not check dependencies
* `--no-elixir-version-check` - does not check the Elixir version from `mix.exs`
* `--no-start` - does not start applications after compilation
* `--only` - runs only tests that match the filter
* `--partitions` - sets the amount of partitions to split tests in. It must be
a number greater than zero. If set to one, it acts a no-op. If more than one,
then you must also set the `MIX_TEST_PARTITION` environment variable with the
partition to use in the current test run. See the "Operating system process
partitioning" section for more information
* `--preload-modules` - preloads all modules defined in applications
* `--profile-require` - profiles the time spent to require test files.
Used only for debugging. The test suite does not run.
* `--raise` - raises if the test suite failed
* `--seed` - seeds the random number generator used to randomize the order of tests;
`--seed 0` disables randomization so the tests in a single file will always be ran
in the same order they were defined in
* `--slowest` - prints timing information for the N slowest tests.
Automatically sets `--trace` and `--preload-modules`
* `--stale` - runs only tests which reference modules that changed since the
last time tests were ran with `--stale`. You can read more about this option
in the "The --stale option" section below
* `--timeout` - sets the timeout for the tests
* `--trace` - runs tests with detailed reporting. Automatically sets `--max-cases` to `1`.
Note that in trace mode test timeouts will be ignored as timeout is set to `:infinity`
* `--warnings-as-errors` - (since v1.12.0) treats warnings as errors and returns a non-zero
exit status. This option only applies to test files. To treat warnings as errors during
compilation and during tests, run:
MIX_ENV=test mix do compile --warnings-as-errors + test --warnings-as-errors
## Configuration
These configurations can be set in the `def project` section of your `mix.exs`:
* `:test_coverage` - a set of options to be passed down to the coverage
mechanism. See the "Coverage" section for more information
* `:test_elixirc_options` - the compiler options to used when
loading/compiling test files. By default it disables the debug chunk
and docs chunk
* `:test_paths` - list of paths containing test files. Defaults to
`["test"]` if the `test` directory exists; otherwise, it defaults to `[]`.
It is expected that all test paths contain a `test_helper.exs` file
* `:test_pattern` - a pattern to load test files. Defaults to `*_test.exs`
* `:warn_test_pattern` - a pattern to match potentially misnamed test files
and display a warning. Defaults to `*_test.ex`
## Coloring
Coloring is enabled by default on most Unix terminals. They are also
available on Windows consoles from Windows 10, although it must be
explicitly enabled for the current user in the registry by running
the following command:
reg add HKCU\Console /v VirtualTerminalLevel /t REG_DWORD /d 1
After running the command above, you must restart your current console.
## Filters
ExUnit provides tags and filtering functionality that allow developers
to select which tests to run. The most common functionality is to exclude
some particular tests from running by default in your test helper file:
# Exclude all external tests from running
ExUnit.configure(exclude: [external: true])
Then, whenever desired, those tests could be included in the run via the
`--include` option:
mix test --include external:true
The example above will run all tests that have the external option set to
`true`. It is also possible to include all examples that have a given tag,
regardless of its value:
mix test --include external
Note that all tests are included by default, so unless they are excluded
first (either in the test helper or via the `--exclude` option) the
`--include` option has no effect.
For this reason, Mix also provides an `--only` option that excludes all
tests and includes only the given ones:
mix test --only external
Which is similar to:
mix test --include external --exclude test
It differs in that the test suite will fail if no tests are executed when the `--only` option is used.
In case a single file is being tested, it is possible to pass one or more specific
line numbers to run only those given tests:
mix test test/some/particular/file_test.exs:12
Which is equivalent to:
mix test --exclude test --include line:12 test/some/particular/file_test.exs
Or:
mix test test/some/particular/file_test.exs:12:24
Which is equivalent to:
mix test --exclude test --include line:12 --include line:24 test/some/particular/file_test.exs
If a given line starts a `describe` block, that line filter runs all tests in it.
Otherwise, it runs the closest test on or before the given line number.
## Coverage
The `:test_coverage` configures the coverage tool and reports generated
by the `--cover` flag. It accepts the following options:
* `:output` - the output directory for cover results. Defaults to `"cover"`.
* `:tool` - a module specifying the coverage tool to use.
* `:summary` - at the end of each coverage run, a summary of each
module is printed, with results in red or green depending on whether
the percentage is below or above a given threshold. The task will
exit with status of 1 if the total coverage is below the threshold.
The `:summary` option allows you to customize the summary generation
and defaults to `[threshold: 90]`, but it may be set to `false` to
disable such reports.
* `:export` - a filename to export results to instead of generating
the coverage result on the fly. The `.coverdata` extension is
automatically added to the given file. This option is automatically
set via the `--export-coverage` option or when using process partitioning.
See `mix test.coverage` to compile a report from multiple exports.
* `:ignore_modules` - modules to ignore from generating reports and
in summaries. It is a list of module names as atoms and regular
expressions that are matched against the module names.
* `:local_only` - by default coverage only tracks local calls,
set this option to false if you plan to run coverage across nodes.
By default, a wrapper around OTP's `cover` is used as the default coverage
tool. You can learn more about how it works in the docs for
`mix test.coverage`. Your tool of choice can be given as follows:
def project() do
[
...
test_coverage: [tool: CoverModule]
...
]
end
`CoverModule` can be any module that exports `start/2`, receiving the
compilation path and the `test_coverage` options as arguments.
It must return either `nil` or an anonymous function of zero arity that
will run after the test suite is done.
## Operating system process partitioning
While ExUnit supports the ability to run tests concurrently within the same
Elixir instance, it is not always possible to run all tests concurrently. For
example, some tests may rely on global resources.
For this reason, `mix test` supports partitioning the test files across
different Elixir instances. This is done by setting the `--partitions` option
to an integer, with the number of partitions, and setting the `MIX_TEST_PARTITION`
environment variable to control which test partition that particular instance
is running. This can also be useful if you want to distribute testing across
multiple machines.
For example, to split a test suite into 4 partitions and run them, you would
use the following commands:
MIX_TEST_PARTITION=1 mix test --partitions 4
MIX_TEST_PARTITION=2 mix test --partitions 4
MIX_TEST_PARTITION=3 mix test --partitions 4
MIX_TEST_PARTITION=4 mix test --partitions 4
The test files are sorted upfront in a round-robin fashion. Note the partition
itself is given as an environment variable so it can be accessed in config files
and test scripts. For example, it can be used to setup a different database instance
per partition in `config/test.exs`.
If partitioning is enabled and `--cover` is used, no cover reports are generated,
as they only contain a subset of the coverage data. Instead, the coverage data
is exported to files such as `cover/MIX_TEST_PARTITION.coverdata`. Once you have
the results of all partitions inside `cover/`, you can run `mix test.coverage` to
get the unified report.
## The --stale option
The `--stale` command line option attempts to run only the test files which
reference modules that have changed since the last time you ran this task with
`--stale`.
The first time this task is run with `--stale`, all tests are run and a manifest
is generated. On subsequent runs, a test file is marked "stale" if any modules it
references (and any modules those modules reference, recursively) were modified
since the last run with `--stale`. A test file is also marked "stale" if it has
been changed since the last run with `--stale`.
The `--stale` option is extremely useful for software iteration, allowing you to
run only the relevant tests as you perform changes to the codebase.
## File-system watchers
You can integrate `mix test` with filesystem watchers through the command line
via the `--listen-on-stdin` option. For example, you can use [fswatch](https://github.com/emcrisostomo/fswatch)
or similar to emit newlines whenever there is a change, which will cause your test
suite to re-run:
fswatch lib test | mix test --listen-on-stdin
This can be combined with the `--stale` option to re-run only the test files that
have changed as well as the tests that have gone stale due to changes in `lib`.
## Aborting the suite
It is possible to abort the test suite with `Ctrl+\ `, which sends a SIGQUIT
signal to the Erlang VM. ExUnit will intercept this signal to show all tests
that have been aborted and print the results collected so far.
This can be useful in case the suite gets stuck and you don't want to wait
until the timeout times passes (which defaults to 30 seconds).
"""
@switches [
force: :boolean,
color: :boolean,
cover: :boolean,
export_coverage: :string,
trace: :boolean,
max_cases: :integer,
max_failures: :integer,
include: :keep,
exclude: :keep,
seed: :integer,
only: :keep,
compile: :boolean,
start: :boolean,
timeout: :integer,
raise: :boolean,
deps_check: :boolean,
archives_check: :boolean,
elixir_version_check: :boolean,
failed: :boolean,
stale: :boolean,
listen_on_stdin: :boolean,
formatter: :keep,
slowest: :integer,
partitions: :integer,
preload_modules: :boolean,
warnings_as_errors: :boolean,
profile_require: :string,
exit_status: :integer
]
@cover [output: "cover", tool: Mix.Tasks.Test.Coverage]
@impl true
def run(args) do
{opts, files} = OptionParser.parse!(args, strict: @switches)
if not Mix.Task.recursing?() do
do_run(opts, args, files)
else
{files_in_apps_path, files_not_in_apps_path} =
files
|> Enum.map(&Path.expand/1)
|> Enum.map(&Path.relative_to_cwd/1)
|> Enum.split_with(&String.starts_with?(&1, "apps/"))
app = Mix.Project.config()[:app]
current_app_path = "apps/#{app}/"
files_in_current_app_path =
for file <- files_in_apps_path,
String.starts_with?(file, current_app_path) or not relative_app_file_exists?(file),
do: String.trim_leading(file, current_app_path)
files = files_in_current_app_path ++ files_not_in_apps_path
if files == [] and files_in_apps_path != [] do
:ok
else
do_run([test_location_relative_path: "apps/#{app}"] ++ opts, args, files)
end
end
end
defp relative_app_file_exists?(file) do
{file, _} = ExUnit.Filters.parse_path(file)
File.exists?(Path.join("../..", file))
end
defp do_run(opts, args, files) do
if opts[:listen_on_stdin] do
System.at_exit(fn _ ->
IO.gets(:stdio, "")
Mix.shell().info("Restarting...")
:init.restart()
Process.sleep(:infinity)
end)
end
unless System.get_env("MIX_ENV") || Mix.env() == :test do
Mix.raise("""
"mix test" is running in the \"#{Mix.env()}\" environment. If you are \
running tests from within another command, you can either:
1. set MIX_ENV explicitly:
MIX_ENV=test mix test.another
2. set the :preferred_cli_env for a command inside "def project" in your mix.exs:
preferred_cli_env: ["test.another": :test]
""")
end
# Load ExUnit before we compile anything in case we are compiling
# helper modules that depend on ExUnit.
Application.ensure_loaded(:ex_unit)
# --warnings-as-errors in test does not pass down to compile,
# if you need this, call compile explicitly before.
Mix.Task.run("compile", args -- ["--warnings-as-errors"])
project = Mix.Project.config()
{partitions, opts} = Keyword.pop(opts, :partitions)
partitioned? = is_integer(partitions) and partitions > 1
# Start cover after we load deps but before we start the app.
cover =
if opts[:cover] do
compile_path = Mix.Project.compile_path(project)
partition = partitioned? && System.get_env("MIX_TEST_PARTITION")
cover =
@cover
|> Keyword.put(:export, opts[:export_coverage] || partition)
|> Keyword.merge(project[:test_coverage] || [])
cover[:tool].start(compile_path, cover)
end
# Start the app and configure ExUnit with command line options
# before requiring test_helper.exs so that the configuration is
# available in test_helper.exs
Mix.shell().print_app
app_start_args = if opts[:slowest], do: ["--preload-modules" | args], else: args
Mix.Task.run("app.start", app_start_args)
# The test helper may change the Mix.shell(), so revert it whenever we raise and after suite
shell = Mix.shell()
# Configure ExUnit now and then again so the task options override test_helper.exs
{ex_unit_opts, allowed_files} = process_ex_unit_opts(opts)
ExUnit.configure(ex_unit_opts)
test_paths = project[:test_paths] || default_test_paths()
Enum.each(test_paths, &require_test_helper(shell, &1))
ExUnit.configure(merge_helper_opts(ex_unit_opts))
# Finally parse, require and load the files
test_elixirc_options = project[:test_elixirc_options] || []
test_files = parse_files(files, shell, test_paths)
test_pattern = project[:test_pattern] || "*_test.exs"
warn_test_pattern = project[:warn_test_pattern] || "*_test.ex"
matched_test_files =
test_files
|> Mix.Utils.extract_files(test_pattern)
|> filter_to_allowed_files(allowed_files)
|> filter_by_partition(shell, partitions)
display_warn_test_pattern(test_files, test_pattern, matched_test_files, warn_test_pattern)
case CT.require_and_run(matched_test_files, test_paths, test_elixirc_options, opts) do
{:ok, %{excluded: excluded, failures: failures, total: total}} ->
Mix.shell(shell)
cover && cover.()
cond do
failures > 0 and opts[:raise] ->
raise_with_shell(shell, "\"mix test\" failed")
failures > 0 ->
System.at_exit(fn _ ->
exit({:shutdown, Keyword.fetch!(ex_unit_opts, :exit_status)})
end)
excluded == total and Keyword.has_key?(opts, :only) ->
message = "The --only option was given to \"mix test\" but no test was executed"
raise_or_error_at_exit(shell, message, opts)
true ->
:ok
end
:noop ->
cond do
opts[:stale] ->
Mix.shell().info("No stale tests")
files == [] ->
Mix.shell().info("There are no tests to run")
true ->
message = "Paths given to \"mix test\" did not match any directory/file: "
raise_or_error_at_exit(shell, message <> Enum.join(files, ", "), opts)
end
:ok
end
end
defp raise_with_shell(shell, message) do
Mix.shell(shell)
Mix.raise(message)
end
defp raise_or_error_at_exit(shell, message, opts) do
cond do
opts[:raise] ->
raise_with_shell(shell, message)
Mix.Task.recursing?() ->
Mix.shell().info(message)
true ->
Mix.shell().error(message)
System.at_exit(fn _ -> exit({:shutdown, 1}) end)
end
end
defp display_warn_test_pattern(test_files, test_pattern, matched_test_files, warn_test_pattern) do
files = Mix.Utils.extract_files(test_files, warn_test_pattern) -- matched_test_files
for file <- files do
Mix.shell().info(
"warning: #{file} does not match #{inspect(test_pattern)} and won't be loaded"
)
end
end
@option_keys [
:trace,
:max_cases,
:max_failures,
:include,
:exclude,
:seed,
:timeout,
:formatters,
:colors,
:slowest,
:failures_manifest_file,
:only_test_ids,
:test_location_relative_path,
:exit_status
]
@doc false
def process_ex_unit_opts(opts) do
{opts, allowed_files} = manifest_opts(opts)
opts =
opts
|> filter_opts(:include)
|> filter_opts(:exclude)
|> filter_opts(:only)
|> formatter_opts()
|> color_opts()
|> exit_status_opts()
|> Keyword.take(@option_keys)
|> default_opts()
{opts, allowed_files}
end
defp merge_helper_opts(opts) do
# The only options that are additive from app env are the excludes
merge_opts(opts, :exclude)
end
defp merge_opts(opts, key) do
value = List.wrap(Application.get_env(:ex_unit, key, []))
Keyword.update(opts, key, value, &Enum.uniq(&1 ++ value))
end
defp default_opts(opts) do
# Set autorun to false because Mix
# automatically runs the test suite for us.
[autorun: false] ++ opts
end
defp parse_files([], _shell, test_paths) do
test_paths
end
defp parse_files([single_file], _shell, _test_paths) do
# Check if the single file path matches test/path/to_test.exs:123. If it does,
# apply "--only line:123" and trim the trailing :123 part.
{single_file, opts} = ExUnit.Filters.parse_path(single_file)
ExUnit.configure(opts)
[single_file]
end
defp parse_files(files, shell, _test_paths) do
if Enum.any?(files, &match?({_, [_ | _]}, ExUnit.Filters.parse_path(&1))) do
raise_with_shell(shell, "Line numbers can only be used when running a single test file")
else
files
end
end
defp parse_filters(opts, key) do
if Keyword.has_key?(opts, key) do
ExUnit.Filters.parse(Keyword.get_values(opts, key))
end
end
defp filter_opts(opts, :only) do
if filters = parse_filters(opts, :only) do
opts
|> Keyword.update(:include, filters, &(filters ++ &1))
|> Keyword.update(:exclude, [:test], &[:test | &1])
else
opts
end
end
defp filter_opts(opts, key) do
if filters = parse_filters(opts, key) do
Keyword.put(opts, key, filters)
else
opts
end
end
defp formatter_opts(opts) do
if Keyword.has_key?(opts, :formatter) do
formatters =
opts
|> Keyword.get_values(:formatter)
|> Enum.map(&Module.concat([&1]))
Keyword.put(opts, :formatters, formatters)
else
opts
end
end
@manifest_file_name ".mix_test_failures"
defp manifest_opts(opts) do
manifest_file = Path.join(Mix.Project.manifest_path(), @manifest_file_name)
opts = Keyword.put(opts, :failures_manifest_file, manifest_file)
if opts[:failed] do
if opts[:stale] do
Mix.raise("Combining --failed and --stale is not supported.")
end
{allowed_files, failed_ids} = ExUnit.Filters.failure_info(manifest_file)
{Keyword.put(opts, :only_test_ids, failed_ids), allowed_files}
else
{opts, nil}
end
end
defp filter_to_allowed_files(matched_test_files, nil), do: matched_test_files
defp filter_to_allowed_files(matched_test_files, %MapSet{} = allowed_files) do
Enum.filter(matched_test_files, &MapSet.member?(allowed_files, Path.expand(&1)))
end
defp filter_by_partition(files, _shell, total) when total in [nil, 1],
do: files
defp filter_by_partition(files, shell, total) when total > 1 do
partition = System.get_env("MIX_TEST_PARTITION")
case partition && Integer.parse(partition) do
{partition, ""} when partition in 1..total ->
partition = partition - 1
# We sort the files because Path.wildcard does not guarantee
# ordering, so different OSes could return a different order,
# meaning run across OSes on different partitions could run
# duplicate files.
for {file, index} <- Enum.with_index(Enum.sort(files)),
rem(index, total) == partition,
do: file
_ ->
raise_with_shell(
shell,
"The MIX_TEST_PARTITION environment variable must be set to an integer between " <>
"1..#{total} when the --partitions option is set, got: #{inspect(partition)}"
)
end
end
defp filter_by_partition(_files, shell, total) do
raise_with_shell(
shell,
"--partitions : expected to be positive integer, got #{total}"
)
end
defp color_opts(opts) do
case Keyword.fetch(opts, :color) do
{:ok, enabled?} ->
Keyword.put(opts, :colors, enabled: enabled?)
:error ->
opts
end
end
defp exit_status_opts(opts) do
Keyword.put_new(opts, :exit_status, 2)
end
defp require_test_helper(shell, dir) do
file = Path.join(dir, "test_helper.exs")
if File.exists?(file) do
Code.require_file(file)
else
raise_with_shell(
shell,
"Cannot run tests because test helper file #{inspect(file)} does not exist"
)
end
end
defp default_test_paths do
if File.dir?("test") do
["test"]
else
[]
end
end
end
| 33.518473 | 113 | 0.678951 |
93ec26972df0ec6b0e3e716b24f6e16bc7ac480a | 14,034 | ex | Elixir | clients/service_networking/lib/google_api/service_networking/v1/api/operations.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/service_networking/lib/google_api/service_networking/v1/api/operations.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/service_networking/lib/google_api/service_networking/v1/api/operations.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ServiceNetworking.V1.Api.Operations do
@moduledoc """
API calls for all endpoints tagged `Operations`.
"""
alias GoogleApi.ServiceNetworking.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
## Parameters
* `connection` (*type:* `GoogleApi.ServiceNetworking.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - The name of the operation resource to be cancelled.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.ServiceNetworking.V1.Model.CancelOperationRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.ServiceNetworking.V1.Model.Empty{}}` on success
* `{:error, info}` on failure
"""
@spec servicenetworking_operations_cancel(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.ServiceNetworking.V1.Model.Empty.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def servicenetworking_operations_cancel(connection, name, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+name}:cancel", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.ServiceNetworking.V1.Model.Empty{}])
end
@doc """
Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
## Parameters
* `connection` (*type:* `GoogleApi.ServiceNetworking.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - The name of the operation resource to be deleted.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.ServiceNetworking.V1.Model.Empty{}}` on success
* `{:error, info}` on failure
"""
@spec servicenetworking_operations_delete(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.ServiceNetworking.V1.Model.Empty.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def servicenetworking_operations_delete(connection, name, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.ServiceNetworking.V1.Model.Empty{}])
end
@doc """
Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
## Parameters
* `connection` (*type:* `GoogleApi.ServiceNetworking.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - The name of the operation resource.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.ServiceNetworking.V1.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec servicenetworking_operations_get(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.ServiceNetworking.V1.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def servicenetworking_operations_get(connection, name, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.ServiceNetworking.V1.Model.Operation{}])
end
@doc """
Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
## Parameters
* `connection` (*type:* `GoogleApi.ServiceNetworking.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - The name of the operation's parent resource.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:filter` (*type:* `String.t`) - The standard list filter.
* `:pageSize` (*type:* `integer()`) - The standard list page size.
* `:pageToken` (*type:* `String.t`) - The standard list page token.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.ServiceNetworking.V1.Model.ListOperationsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec servicenetworking_operations_list(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.ServiceNetworking.V1.Model.ListOperationsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def servicenetworking_operations_list(connection, name, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:filter => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.ServiceNetworking.V1.Model.ListOperationsResponse{}]
)
end
end
| 50.482014 | 607 | 0.641015 |
93ec78fa6905e4cd3b5d6a0a9251cb9db56a6769 | 3,953 | ex | Elixir | clients/android_enterprise/lib/google_api/android_enterprise/v1/model/user.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/android_enterprise/lib/google_api/android_enterprise/v1/model/user.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/android_enterprise/lib/google_api/android_enterprise/v1/model/user.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.AndroidEnterprise.V1.Model.User do
@moduledoc """
A Users resource represents an account associated with an enterprise. The account may be specific to a device or to an individual user (who can then use the account across multiple devices). The account may provide access to managed Google Play only, or to other Google services, depending on the identity model: - The Google managed domain identity model requires synchronization to Google account sources (via primaryEmail). - The managed Google Play Accounts identity model provides a dynamic means for enterprises to create user or device accounts as needed. These accounts provide access to managed Google Play.
## Attributes
- accountIdentifier (String.t): A unique identifier you create for this user, such as \"user342\" or \"asset#44418\". Do not use personally identifiable information (PII) for this property. Must always be set for EMM-managed users. Not set for Google-managed users. Defaults to: `null`.
- accountType (String.t): The type of account that this user represents. A userAccount can be installed on multiple devices, but a deviceAccount is specific to a single device. An EMM-managed user (emmManaged) can be either type (userAccount, deviceAccount), but a Google-managed user (googleManaged) is always a userAccount. Defaults to: `null`.
- displayName (String.t): The name that will appear in user interfaces. Setting this property is optional when creating EMM-managed users. If you do set this property, use something generic about the organization (such as \"Example, Inc.\") or your name (as EMM). Not used for Google-managed user accounts. Defaults to: `null`.
- id (String.t): The unique ID for the user. Defaults to: `null`.
- kind (String.t): Identifies what kind of resource this is. Value: the fixed string \"androidenterprise#user\". Defaults to: `null`.
- managementType (String.t): The entity that manages the user. With googleManaged users, the source of truth is Google so EMMs have to make sure a Google Account exists for the user. With emmManaged users, the EMM is in charge. Defaults to: `null`.
- primaryEmail (String.t): The user's primary email address, for example, \"[email protected]\". Will always be set for Google managed users and not set for EMM managed users. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:accountIdentifier => any(),
:accountType => any(),
:displayName => any(),
:id => any(),
:kind => any(),
:managementType => any(),
:primaryEmail => any()
}
field(:accountIdentifier)
field(:accountType)
field(:displayName)
field(:id)
field(:kind)
field(:managementType)
field(:primaryEmail)
end
defimpl Poison.Decoder, for: GoogleApi.AndroidEnterprise.V1.Model.User do
def decode(value, options) do
GoogleApi.AndroidEnterprise.V1.Model.User.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AndroidEnterprise.V1.Model.User do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 59.893939 | 620 | 0.743992 |
93ec7d05b4ef8b73fc195ff4a47f9fe6b6776847 | 1,683 | ex | Elixir | clients/home_graph/lib/google_api/home_graph/v1/model/state_and_notification_payload.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/home_graph/lib/google_api/home_graph/v1/model/state_and_notification_payload.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/home_graph/lib/google_api/home_graph/v1/model/state_and_notification_payload.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.HomeGraph.V1.Model.StateAndNotificationPayload do
@moduledoc """
Payload containing the state and notification information for devices.
## Attributes
* `devices` (*type:* `GoogleApi.HomeGraph.V1.Model.ReportStateAndNotificationDevice.t`, *default:* `nil`) - The devices for updating state and sending notifications.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:devices => GoogleApi.HomeGraph.V1.Model.ReportStateAndNotificationDevice.t()
}
field(:devices, as: GoogleApi.HomeGraph.V1.Model.ReportStateAndNotificationDevice)
end
defimpl Poison.Decoder, for: GoogleApi.HomeGraph.V1.Model.StateAndNotificationPayload do
def decode(value, options) do
GoogleApi.HomeGraph.V1.Model.StateAndNotificationPayload.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.HomeGraph.V1.Model.StateAndNotificationPayload do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.808511 | 169 | 0.767083 |
93ec838d3aff9acca60ecad88983b7c300a8eb61 | 2,237 | ex | Elixir | lib/central/logging/lib/startup.ex | badosu/teiserver | 19b623aeb7c2ab28756405f7486e92b714777c54 | [
"MIT"
] | 4 | 2021-07-29T16:23:20.000Z | 2022-02-23T05:34:36.000Z | lib/central/logging/lib/startup.ex | badosu/teiserver | 19b623aeb7c2ab28756405f7486e92b714777c54 | [
"MIT"
] | 14 | 2021-08-01T02:36:14.000Z | 2022-01-30T21:15:03.000Z | lib/central/logging/lib/startup.ex | badosu/teiserver | 19b623aeb7c2ab28756405f7486e92b714777c54 | [
"MIT"
] | 7 | 2021-05-13T12:55:28.000Z | 2022-01-14T06:39:06.000Z | defmodule Central.Logging.Startup do
@moduledoc false
use CentralWeb, :startup
def startup do
add_permission_set("logging", "page_view", ~w(show delete report))
add_permission_set("logging", "aggregate", ~w(show delete report))
add_permission_set("logging", "audit", ~w(show delete report))
add_permission_set("logging", "error", ~w(show delete report))
add_permission_set("logging", "live", ~w(show))
QuickAction.add_items([
%{
label: "Live view dashboard",
icons: ["far fa-tachometer-alt", :list],
url: "/logging/live/dashboard",
permissions: "logging.live"
},
%{
label: "Error logs",
icons: [Central.Logging.ErrorLogLib.icon(), :list],
url: "/logging/error_logs",
permissions: "logging.error"
},
%{
label: "Audit logs",
icons: [Central.Logging.AuditLogLib.icon(), :list],
url: "/logging/audit",
permissions: "logging.audit"
},
%{
label: "Aggregate logs",
icons: [Central.Logging.AggregateViewLogLib.icon(), :list],
url: "/logging/aggregate_views",
permissions: "logging.agggregate"
},
%{
label: "Page view logs",
icons: [Central.Logging.PageViewLogLib.icon(), :list],
url: "/logging/page_views",
permissions: "logging.page_view"
}
])
# HookLib.register_events([
# %Event{
# name: "logging.Page view log",
# description: "Triggered when a user within your admin group loads a page",
# permissions: ["logging.page_view"],
# icons: [
# CentralWeb.Logging.GeneralHelper.icon(),
# Central.Logging.PageViewLogHelper.icon(),
# "far fa-plus"
# ],
# colour: elem(Central.Logging.PageViewLogHelper.colours(), 0),
# onload: nil,#CentralWeb.Bedrock.PolicyHook.latest_policies,
# onload_defaults: %{},
# outputs: [:page_view_log],
# example: %{
# ip: "127.0.0.1",
# log_id: 101,
# path: "/dashboard/displays/1",
# timestamp: "18:06:50",
# user_id: 1,
# username: "Test user"
# }},
# ])
end
end
| 30.22973 | 84 | 0.569066 |
93ecba0ccbefeff350a97399d82d0f5c84be066a | 2,288 | exs | Elixir | apps/omg_watcher/test/db/txoutput_test.exs | hoardexchange/elixir-omg | 423528699d467f1cc0d02c596290ab907af38c2c | [
"Apache-2.0"
] | null | null | null | apps/omg_watcher/test/db/txoutput_test.exs | hoardexchange/elixir-omg | 423528699d467f1cc0d02c596290ab907af38c2c | [
"Apache-2.0"
] | null | null | null | apps/omg_watcher/test/db/txoutput_test.exs | hoardexchange/elixir-omg | 423528699d467f1cc0d02c596290ab907af38c2c | [
"Apache-2.0"
] | 2 | 2020-06-07T11:14:54.000Z | 2020-08-02T07:36:32.000Z | # Copyright 2018 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.Watcher.DB.TxOutputTest do
use ExUnitFixtures
use ExUnit.Case, async: false
use OMG.API.Fixtures
alias OMG.API
alias OMG.API.Utxo
alias OMG.Watcher.DB
require Utxo
@eth OMG.Eth.RootChain.eth_pseudo_address()
@tag fixtures: [:initial_blocks]
test "compose_utxo_exit should return proper proof format" do
{:ok,
%{
utxo_pos: _utxo_pos,
txbytes: _txbytes,
proof: proof,
sigs: _sigs
}} = DB.TxOutput.compose_utxo_exit(Utxo.position(3000, 0, 1))
assert <<_proof::bytes-size(512)>> = proof
end
@tag fixtures: [:initial_blocks]
test "compose_utxo_exit should return error when there is no txs in specfic block" do
{:error, :no_deposit_for_given_blknum} = DB.TxOutput.compose_utxo_exit(Utxo.position(1001, 1, 0))
end
@tag fixtures: [:initial_blocks]
test "compose_utxo_exit should return error when there is no tx in specfic block" do
{:error, :utxo_not_found} = DB.TxOutput.compose_utxo_exit(Utxo.position(2000, 1, 0))
end
@tag fixtures: [:phoenix_ecto_sandbox, :alice]
test "transaction output schema handles big numbers properly", %{alice: alice} do
power_of_2 = fn n -> :lists.duplicate(n, 2) |> Enum.reduce(&(&1 * &2)) end
assert 16 == power_of_2.(4)
big_amount = power_of_2.(260)
DB.Transaction.update_with(%{
transactions: [
API.TestHelper.create_recovered([], @eth, [{alice, big_amount}])
],
blknum: 11_000,
blkhash: <<?#::256>>,
timestamp: :os.system_time(:second),
eth_height: 10
})
utxo = DB.TxOutput.get_by_position(Utxo.position(11_000, 0, 0))
assert not is_nil(utxo)
assert utxo.amount == big_amount
end
end
| 31.342466 | 101 | 0.698427 |
93ecc73ae0320719424a1d10371167fe156d8935 | 17,966 | ex | Elixir | lib/prom_ex/plugins/phoenix.ex | cschmatzler/prom_ex | ca899bdc92d7372609b46685127faba7f02d8102 | [
"MIT"
] | 354 | 2020-10-21T06:27:15.000Z | 2022-03-29T13:22:46.000Z | lib/prom_ex/plugins/phoenix.ex | cschmatzler/prom_ex | ca899bdc92d7372609b46685127faba7f02d8102 | [
"MIT"
] | 111 | 2020-11-25T21:27:13.000Z | 2022-03-28T10:42:59.000Z | lib/prom_ex/plugins/phoenix.ex | cschmatzler/prom_ex | ca899bdc92d7372609b46685127faba7f02d8102 | [
"MIT"
] | 45 | 2020-12-31T20:37:11.000Z | 2022-03-18T13:12:21.000Z | if Code.ensure_loaded?(Phoenix) do
defmodule PromEx.Plugins.Phoenix do
@moduledoc """
This plugin captures metrics emitted by Phoenix. Specifically, it captures HTTP request metrics and
Phoenix channel metrics.
## Plugin options
This plugin supports the following options:
- `metric_prefix`: This option is OPTIONAL and is used to override the default metric prefix of
`[otp_app, :prom_ex, :phoenix]`. If this changes you will also want to set `phoenix_metric_prefix`
in your `dashboard_assigns` to the snakecase version of your prefix, the default
`phoenix_metric_prefix` is `{otp_app}_prom_ex_phoenix`.
### Single Endpoint/Router
- `router`: This option is REQUIRED and is the full module name of your Phoenix Router (e.g MyAppWeb.Router).
- `endpoint`: This is a REQUIRED option and is the full module name of your Phoenix Endpoint (e.g MyAppWeb.Endpoint).
- `event_prefix`: This option is OPTIONAL and allows you to set the event prefix for the Telemetry events. This
value should align with what you pass to `Plug.Telemetry` in your `endpoint.ex` file (see the plug docs
for more information https://hexdocs.pm/plug/Plug.Telemetry.html) This value should align with what you pass
to `Plug.Telemetry` in your `endpoint.ex` file (see the plug docs for more
information https://hexdocs.pm/plug/Plug.Telemetry.html)
- `additional_routes`: This option is OPTIONAL and allows you to specify route path labels for applications routes
not defined in your Router module.
For example, if you want to track telemetry events for a plug in your
`endpoint.ex` file, you can provide a keyword list with the structure `[some-route: ~r(\/some-path)]` and any
time that the route is called and the plug handles the call, the path label for this particular Prometheus metric
will be set to `some-route`. You can pass in either a regular expression or a string to match the incoming
request.
#### Example plugin configuration
```elixir
{
PromEx.Plugins.Phoenix,
endpoint: MyApp.Endpoint,
router: MyAppWeb.Public.Router,
event_prefix: [:admin, :endpoint]
}
```
### Multiple Endpoints/Router
- `endpoints`: This accepts a list of per Phoenix Endpoint options `{endpoint_name, endpoint_opts}`
- `endpoint_name`: This option is REQUIRED and is the full module name of your Phoenix Endpoint (e.g MyAppWeb.Endpoint).
- `endpoint_opts`: Per endpoint plugin options:
- `:routers`: This option is REQUIRED and lists all of routers modules for the endpoint, the HTTP metrics will
be augmented with controller/action/path information from the routers.
- `:event_prefix`: This option is OPTIONAL and allows you to set the event prefix for the Telemetry events. This
value should align with what you pass to `Plug.Telemetry` in the corresponding endpoint module (see the plug docs
for more information https://hexdocs.pm/plug/Plug.Telemetry.html)
- `:additional_routes`: This option is OPTIONAL and allows you to specify route path labels for applications routes
not defined in your Router modules for the corresponding endpoint.
#### Example plugin configuration
```elixir
{
PromEx.Plugins.Phoenix,
endpoints: [
{MyApp.Endpoint, routers: [MyAppWeb.Public.Router]},
{MyApp.Endpoint2, routers: [MyAppWeb.Admin.Router], event_prefix: [:admin, :endpoint]}
]
}
```
## Metric Groups
This plugin exposes the following metric groups:
- `:phoenix_http_event_metrics`
- `:phoenix_channel_event_metrics`
- `:phoenix_socket_event_metrics`
- `:phoenix_endpoint_manual_metrics`
## Usage
To use plugin in your application, add the following to your PromEx module:
```elixir
defmodule WebApp.PromEx do
use PromEx, otp_app: :web_app
@impl true
def plugins do
[
...
{
PromEx.Plugins.Phoenix,
endpoint: MyApp.Endpoint,
router: MyAppWeb.Public.Router
}
]
end
@impl true
def dashboards do
[
...
{:prom_ex, "phoenix.json"}
]
end
end
```
When working with multiple Phoenix routers use the `endpoints` option instead:
```elixir
defmodule WebApp.PromEx do
use PromEx, otp_app: :web_app
@impl true
def plugins do
[
...
{
PromEx.Plugins.Phoenix,
endpoints: [
{MyApp.Endpoint, routers: [MyAppWeb.Public.Router]},
{MyApp.Endpoint2, routers: [MyAppWeb.Admin.Router], event_prefix: [:admin, :endpoint]}
]
}
]
end
@impl true
def dashboards do
[
...
{:prom_ex, "phoenix.json"}
]
end
end
```
"""
use PromEx.Plugin
require Logger
alias Phoenix.Socket
alias Plug.Conn
@stop_event [:prom_ex, :plugin, :phoenix, :stop]
@impl true
def event_metrics(opts) do
otp_app = Keyword.fetch!(opts, :otp_app)
metric_prefix = Keyword.get(opts, :metric_prefix, PromEx.metric_prefix(otp_app, :phoenix))
phoenix_event_prefixes = fetch_event_prefixes!(opts)
set_up_telemetry_proxy(phoenix_event_prefixes)
# Event metrics definitions
[
http_events(metric_prefix, opts),
channel_events(metric_prefix),
socket_events(metric_prefix)
]
end
@impl true
def manual_metrics(opts) do
otp_app = Keyword.fetch!(opts, :otp_app)
metric_prefix = PromEx.metric_prefix(otp_app, :phoenix)
[
endpoint_info(metric_prefix, opts)
]
end
defp endpoint_info(metric_prefix, opts) do
# Fetch user options
phoenix_endpoint = Keyword.get(opts, :endpoint) || Keyword.get(opts, :endpoints)
Manual.build(
:phoenix_endpoint_manual_metrics,
{__MODULE__, :execute_phoenix_endpoint_info, [phoenix_endpoint]},
[
last_value(
metric_prefix ++ [:endpoint, :url, :info],
event_name: [:prom_ex, :plugin, :phoenix, :endpoint_url],
description: "The configured URL of the Endpoint module.",
measurement: :status,
tags: [:url, :endpoint]
),
last_value(
metric_prefix ++ [:endpoint, :port, :info],
event_name: [:prom_ex, :plugin, :phoenix, :endpoint_port],
description: "The configured port of the Endpoint module.",
measurement: :status,
tags: [:port, :endpoint]
)
]
)
end
@doc false
def execute_phoenix_endpoint_info(endpoint) do
# TODO: This is a bit of a hack until Phoenix supports an init telemetry event to
# reliably get the configuration.
endpoint_init_checker = fn
count, endpoint_module, endpoint_init_checker_function when count < 10 ->
case Process.whereis(endpoint_module) do
pid when is_pid(pid) ->
measurements = %{status: 1}
url_metadata = %{url: endpoint_module.url(), endpoint: normalize_module_name(endpoint_module)}
:telemetry.execute([:prom_ex, :plugin, :phoenix, :endpoint_url], measurements, url_metadata)
%URI{port: port} = endpoint_module.struct_url()
port_metadata = %{port: port, endpoint: normalize_module_name(endpoint_module)}
:telemetry.execute([:prom_ex, :plugin, :phoenix, :endpoint_port], measurements, port_metadata)
_ ->
Process.sleep(1_000)
endpoint_init_checker_function.(count + 1, endpoint_module, endpoint_init_checker_function)
end
_, _, _ ->
:noop
end
if is_list(endpoint) do
endpoint
|> Enum.each(fn {endpoint_module, _} ->
Task.start(fn ->
endpoint_init_checker.(0, endpoint_module, endpoint_init_checker)
end)
end)
else
Task.start(fn ->
endpoint_init_checker.(0, endpoint, endpoint_init_checker)
end)
end
end
defp http_events(metric_prefix, opts) do
routers = fetch_routers!(opts)
additional_routes = fetch_additional_routes!(opts)
http_metrics_tags = [:status, :method, :path, :controller, :action]
Event.build(
:phoenix_http_event_metrics,
[
# Capture request duration information
distribution(
metric_prefix ++ [:http, :request, :duration, :milliseconds],
event_name: @stop_event,
measurement: :duration,
description: "The time it takes for the application to respond to HTTP requests.",
reporter_options: [
buckets: exponential!(1, 2, 12)
],
tag_values: get_conn_tags(routers, additional_routes),
tags: http_metrics_tags,
unit: {:native, :millisecond}
),
# Capture response payload size information
distribution(
metric_prefix ++ [:http, :response, :size, :bytes],
event_name: @stop_event,
description: "The size of the HTTP response payload.",
reporter_options: [
buckets: exponential!(1, 4, 12)
],
measurement: fn _measurements, metadata ->
case metadata.conn.resp_body do
nil -> 0
_ -> :erlang.iolist_size(metadata.conn.resp_body)
end
end,
tag_values: get_conn_tags(routers, additional_routes),
tags: http_metrics_tags,
unit: :byte
),
# Capture the number of requests that have been serviced
counter(
metric_prefix ++ [:http, :requests, :total],
event_name: @stop_event,
description: "The number of requests have been serviced.",
tag_values: get_conn_tags(routers, additional_routes),
tags: http_metrics_tags
)
]
)
end
defp channel_events(metric_prefix) do
Event.build(
:phoenix_channel_event_metrics,
[
# Capture the number of channel joins that have occurred
counter(
metric_prefix ++ [:channel, :joined, :total],
event_name: [:phoenix, :channel_joined],
description: "The number of channel joins that have occurred.",
tag_values: fn %{result: result, socket: %Socket{transport: transport, endpoint: endpoint}} ->
%{
transport: transport,
result: result,
endpoint: normalize_module_name(endpoint)
}
end,
tags: [:result, :transport, :endpoint]
),
# Capture channel handle_in duration
distribution(
metric_prefix ++ [:channel, :handled_in, :duration, :milliseconds],
event_name: [:phoenix, :channel_handled_in],
measurement: :duration,
description: "The time it takes for the application to respond to channel messages.",
reporter_options: [
buckets: exponential!(1, 2, 12)
],
tag_values: fn %{socket: %Socket{endpoint: endpoint}} ->
%{
endpoint: normalize_module_name(endpoint)
}
end,
tags: [:endpoint],
unit: {:native, :millisecond}
)
]
)
end
defp socket_events(metric_prefix) do
Event.build(
:phoenix_socket_event_metrics,
[
# Capture socket connection duration
distribution(
metric_prefix ++ [:socket, :connected, :duration, :milliseconds],
event_name: [:phoenix, :socket_connected],
measurement: :duration,
description: "The time it takes for the application to establish a socket connection.",
reporter_options: [
buckets: exponential!(1, 2, 12)
],
tag_values: fn %{result: result, endpoint: endpoint, transport: transport} ->
%{
transport: transport,
result: result,
endpoint: normalize_module_name(endpoint)
}
end,
tags: [:result, :transport, :endpoint],
unit: {:native, :millisecond}
)
]
)
end
defp get_conn_tags(routers, []) do
fn
%{conn: %Conn{} = conn} ->
default_route_tags = %{
path: "Unknown",
controller: "Unknown",
action: "Unknown"
}
conn
|> do_get_router_info(routers, default_route_tags)
|> Map.merge(%{
status: conn.status,
method: conn.method
})
_ ->
# TODO: Change this to warning as warn is deprecated as of Elixir 1.11
Logger.warn("Could not resolve path for request")
end
end
defp get_conn_tags(routers, additional_routes) do
fn
%{conn: %Conn{} = conn} ->
default_route_tags = handle_additional_routes_check(conn, additional_routes)
conn
|> do_get_router_info(routers, default_route_tags)
|> Map.merge(%{
status: conn.status,
method: conn.method
})
_ ->
# TODO: Change this to warning as warn is deprecated as of Elixir 1.11
Logger.warn("Could not resolve path for request")
end
end
defp do_get_router_info(conn, routers, default_route_tags) do
routers
|> Enum.find_value(default_route_tags, fn router ->
case Phoenix.Router.route_info(router, conn.method, conn.request_path, "") do
:error ->
false
%{route: path, plug: controller, plug_opts: action} ->
%{
path: path,
controller: normalize_module_name(controller),
action: normalize_action(action)
}
end
end)
end
defp handle_additional_routes_check(%Conn{request_path: request_path}, additional_routes) do
default_tags = %{
path: "Unknown",
controller: "Unknown",
action: "Unknown"
}
additional_routes
|> Enum.find_value(default_tags, fn {path_label, route_check} ->
cond do
is_binary(route_check) and route_check == request_path ->
%{
path: path_label,
controller: "NA",
action: "NA"
}
match?(%Regex{}, route_check) and Regex.match?(route_check, request_path) ->
%{
path: path_label,
controller: "NA",
action: "NA"
}
true ->
false
end
end)
end
defp set_up_telemetry_proxy(phoenix_event_prefixes) do
phoenix_event_prefixes
|> Enum.each(fn telemetry_prefix ->
stop_event = telemetry_prefix ++ [:stop]
:telemetry.attach(
[:prom_ex, :phoenix, :proxy] ++ telemetry_prefix,
stop_event,
&__MODULE__.handle_proxy_phoenix_event/4,
%{}
)
end)
end
@doc false
def handle_proxy_phoenix_event(_event_name, event_measurement, event_metadata, _config) do
:telemetry.execute(@stop_event, event_measurement, event_metadata)
end
defp normalize_module_name(name) when is_atom(name) do
name
|> Atom.to_string()
|> String.trim_leading("Elixir.")
end
defp normalize_module_name(name), do: name
defp normalize_action(action) when is_atom(action), do: action
defp normalize_action(_action), do: "Unknown"
defp fetch_additional_routes!(opts) do
opts
|> fetch_either!(:router, :endpoints)
|> case do
endpoints when is_list(endpoints) ->
endpoints
|> Enum.flat_map(fn
{_endpoint, endpoint_opts} ->
Keyword.get(endpoint_opts, :additional_routes, [])
end)
|> MapSet.new()
|> MapSet.to_list()
_router ->
Keyword.get(opts, :additional_routes, [])
end
end
defp fetch_event_prefixes!(opts) do
opts
|> fetch_either!(:router, :endpoints)
|> case do
endpoints when is_list(endpoints) ->
endpoints
|> Enum.map(fn
{_endpoint, endpoint_opts} ->
Keyword.get(endpoint_opts, :event_prefix, [:phoenix, :endpoint])
end)
_router ->
[Keyword.get(opts, :event_prefix, [:phoenix, :endpoint])]
end
|> MapSet.new()
|> MapSet.to_list()
end
defp fetch_routers!(opts) do
opts
|> fetch_either!(:router, :endpoints)
|> case do
endpoints when is_list(endpoints) ->
endpoints
|> Enum.flat_map(fn
{_endpoint, endpoint_opts} ->
endpoint_opts
|> Keyword.fetch!(:routers)
end)
|> MapSet.new()
|> MapSet.to_list()
router ->
[router]
end
end
defp fetch_either!(keywordlist, key1, key2) do
case {Keyword.has_key?(keywordlist, key1), Keyword.has_key?(keywordlist, key2)} do
{true, _} ->
keywordlist[key1]
{false, true} ->
keywordlist[key2]
{false, false} ->
raise KeyError, "Neither #{inspect(key1)} nor #{inspect(key2)} found in #{inspect(keywordlist)}"
end
end
end
else
defmodule PromEx.Plugins.Phoenix do
@moduledoc false
use PromEx.Plugin
@impl true
def event_metrics(_opts) do
PromEx.Plugin.no_dep_raise(__MODULE__, "Phoenix")
end
end
end
| 32.139535 | 126 | 0.593009 |
93ed02800f2c366cf9d10883bf3de68d24c21cfc | 534 | exs | Elixir | test/fake_server/server/access_test.exs | TakteS/fake_server | c982457977acb1aee491a5d5102e68eed84873f2 | [
"Apache-2.0"
] | 72 | 2016-09-20T21:45:33.000Z | 2021-06-10T17:35:35.000Z | test/fake_server/server/access_test.exs | TakteS/fake_server | c982457977acb1aee491a5d5102e68eed84873f2 | [
"Apache-2.0"
] | 44 | 2016-09-20T18:31:36.000Z | 2020-05-31T17:31:00.000Z | test/fake_server/server/access_test.exs | TakteS/fake_server | c982457977acb1aee491a5d5102e68eed84873f2 | [
"Apache-2.0"
] | 21 | 2016-09-22T08:27:07.000Z | 2021-11-09T21:29:39.000Z | defmodule FakeServer.Server.AccessTest do
use ExUnit.Case
alias FakeServer.Server.Access
test "compute access for a route" do
{:ok, server} = Access.start_link()
assert Access.compute_access(server, %FakeServer.Request{
path: "/test",
method: "PUT",
headers: %{},
body: ""
})
assert Access.access_list(server) == [
%FakeServer.Request{path: "/test", method: "PUT", headers: %{}, body: ""}
]
Access.stop(server)
end
end
| 24.272727 | 86 | 0.565543 |
93ed1970b5aa7d7ad3fba29daed2f85a4aaa8ead | 3,080 | ex | Elixir | lib/ibmcloud/crn.ex | Bhaskers-Blu-Org1/elixir-ibmcloud | 28639020a3e5c222356329917b3fb37c5da3eb42 | [
"Apache-2.0"
] | 1 | 2020-01-13T19:30:37.000Z | 2020-01-13T19:30:37.000Z | lib/ibmcloud/crn.ex | IBM/elixir-ibmcloud | b57888b18006ffaadb6fbc2adf9685e30c86c082 | [
"Apache-2.0"
] | null | null | null | lib/ibmcloud/crn.ex | IBM/elixir-ibmcloud | b57888b18006ffaadb6fbc2adf9685e30c86c082 | [
"Apache-2.0"
] | 1 | 2020-06-29T15:04:34.000Z | 2020-06-29T15:04:34.000Z | defmodule IBMCloud.CRN do
@moduledoc """
Cloud Resource Name.
- [Cloud Resource Names](https://cloud.ibm.com/docs/overview/crn.html)
"""
@type scope() :: {:account | :organization | :space, binary()} | nil
@type t :: %__MODULE__{
version: binary(),
cname: binary(),
ctype: binary(),
service_name: binary() | nil,
location: binary() | nil,
scope: binary() | nil,
service_instance: binary() | nil,
resource_type: binary() | nil,
resource: binary() | nil
}
defstruct version: "v1",
cname: "bluemix",
ctype: "public",
service_name: nil,
location: nil,
scope: nil,
service_instance: nil,
resource_type: nil,
resource: nil
def parse(binary) when is_binary(binary) do
case String.split(binary, ":") do
[
"crn",
version,
cname,
ctype,
service_name,
location,
scope,
service_instance,
resource_type,
resource
] ->
{:ok,
%__MODULE__{
version: parse_raw(:version, version),
cname: parse_raw(:cname, cname),
ctype: parse_raw(:ctype, ctype),
service_name: parse_raw(:service_name, service_name),
location: parse_raw(:location, location),
scope: parse_raw(:scope, scope),
service_instance: parse_raw(:service_instance, service_instance),
resource_type: parse_raw(:resource_type, resource_type),
resource: parse_raw(:resource, resource)
}}
_ ->
{:error, "invalid CRN"}
end
end
def to_string(%__MODULE__{
version: version,
cname: cname,
ctype: ctype,
service_name: service_name,
location: location,
scope: scope,
service_instance: service_instance,
resource_type: resource_type,
resource: resource
}) do
[
"crn",
raw_to_string(:version, version),
raw_to_string(:cname, cname),
raw_to_string(:ctype, ctype),
raw_to_string(:service_name, service_name),
raw_to_string(:location, location),
raw_to_string(:scope, scope),
raw_to_string(:service_instance, service_instance),
raw_to_string(:resource_type, resource_type),
raw_to_string(:resource, resource)
]
|> Enum.join(":")
end
def parse_scope("a/" <> val), do: {:account, val}
def parse_scope("o/" <> val), do: {:organization, val}
def parse_scope("s/" <> val), do: {:space, val}
defp parse_raw(_field, ""), do: nil
defp parse_raw(:scope, val), do: parse_scope(val)
defp parse_raw(_field, val), do: val
def scope_to_string({:account, val}), do: "a/" <> val
def scope_to_string({:organization, val}), do: "o/" <> val
def scope_to_string({:space, val}), do: "s/" <> val
defp raw_to_string(_field, nil), do: ""
defp raw_to_string(:scope, val), do: scope_to_string(val)
defp raw_to_string(_field, val), do: val
end
| 29.615385 | 76 | 0.57987 |
93ed1a10f6304b9abbaf13624a7a0505d6d5f442 | 376 | exs | Elixir | examples/profiling/mix.exs | SergeTupchiy/nonuniform_rand | c6ddd4be2b6c06707ca54f4a98cdbe401e4968b2 | [
"MIT"
] | null | null | null | examples/profiling/mix.exs | SergeTupchiy/nonuniform_rand | c6ddd4be2b6c06707ca54f4a98cdbe401e4968b2 | [
"MIT"
] | null | null | null | examples/profiling/mix.exs | SergeTupchiy/nonuniform_rand | c6ddd4be2b6c06707ca54f4a98cdbe401e4968b2 | [
"MIT"
] | null | null | null | defmodule Profiling.MixProject do
use Mix.Project
def project do
[
app: :profiling,
version: "0.1.0",
elixir: "~> 1.7",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
def application do
[
extra_applications: [:logger]
]
end
defp deps do
[
{:nonuniform_rand, path: "../../"}
]
end
end
| 14.461538 | 42 | 0.529255 |
93ed4887c8ddf58de1ab5457a29d6610d917f382 | 642 | ex | Elixir | web/router.ex | chrhsmt/hello_phoenix | 18c719cab56827847ddcf676df5141d1cf5e420b | [
"MIT"
] | null | null | null | web/router.ex | chrhsmt/hello_phoenix | 18c719cab56827847ddcf676df5141d1cf5e420b | [
"MIT"
] | 1 | 2018-12-14T08:36:11.000Z | 2018-12-14T08:36:11.000Z | web/router.ex | chrhsmt/hello_phoenix | 18c719cab56827847ddcf676df5141d1cf5e420b | [
"MIT"
] | null | null | null | defmodule HelloPhoenix.Router do
use HelloPhoenix.Web, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", HelloPhoenix do
pipe_through :browser # Use the default browser stack
get "/", PageController, :index
get "/info", PageController, :info
end
scope "/api", HelloPhoenix do
pipe_through :api
resources "/contacts", ContactController
end
# Other scopes may use custom stacks.
# scope "/api", HelloPhoenix do
# pipe_through :api
# end
end
| 20.0625 | 57 | 0.676012 |
93ed6dab18adbfe7b5dbe4b097555db752faabe8 | 1,902 | ex | Elixir | lib/livebook/system_resources.ex | gpopides/livebook | ec6de91674b894c0585adc893133735a590ba8c8 | [
"Apache-2.0"
] | null | null | null | lib/livebook/system_resources.ex | gpopides/livebook | ec6de91674b894c0585adc893133735a590ba8c8 | [
"Apache-2.0"
] | null | null | null | lib/livebook/system_resources.ex | gpopides/livebook | ec6de91674b894c0585adc893133735a590ba8c8 | [
"Apache-2.0"
] | null | null | null | defmodule Livebook.SystemResources do
@moduledoc false
# Periodically computes system resource usage.
@type memory :: %{total: non_neg_integer(), free: non_neg_integer()}
use GenServer
@name __MODULE__
@doc """
Returns system memory.
"""
@spec memory() :: memory()
def memory do
:ets.lookup_element(@name, :memory, 2)
end
@doc """
Subscribes to resource usage updates.
## Messages
* `{:memory_update, memory}`
"""
@spec subscribe() :: :ok | {:error, term()}
def subscribe() do
Phoenix.PubSub.subscribe(Livebook.PubSub, "system_resources")
end
@doc """
Updates the resources kept by this process.
"""
@spec update() :: :ok
def update do
GenServer.cast(@name, :update)
end
@doc false
def start_link(_opts) do
GenServer.start_link(__MODULE__, :ok, name: @name)
end
@impl true
def init(:ok) do
:ets.new(@name, [:set, :named_table, :protected])
measure()
schedule()
{:ok, %{}}
end
@impl true
def handle_info(:measure, state) do
measure()
schedule()
{:noreply, state}
end
@impl true
def handle_cast(:update, state) do
memory = measure()
Phoenix.PubSub.local_broadcast(Livebook.PubSub, "system_resources", {:memory_update, memory})
{:noreply, state}
end
defp measure() do
memory_data = :memsup.get_system_memory_data()
free_memory = free_memory(Map.new(memory_data))
memory = %{total: memory_data[:total_memory], free: free_memory}
:ets.insert(@name, {:memory, memory})
memory
end
defp free_memory(%{available_memory: available}), do: available
defp free_memory(%{cached_memory: cached, buffered_memory: buffered, free_memory: free}) do
cached + buffered + free
end
defp free_memory(%{free_memory: free}), do: free
defp free_memory(_), do: 0
defp schedule() do
Process.send_after(self(), :measure, 15000)
end
end
| 21.613636 | 97 | 0.662986 |
93ed81732e5b6a1e09a907ac74d58fb1865f02cf | 1,475 | exs | Elixir | test/support/test_tcp_tunnel.exs | lucaong/tortoise | fd2f83527937ba39b47f58eb8d392a1aa927e28f | [
"Apache-2.0"
] | 272 | 2018-04-22T22:47:35.000Z | 2022-03-01T05:22:02.000Z | test/support/test_tcp_tunnel.exs | lucaong/tortoise | fd2f83527937ba39b47f58eb8d392a1aa927e28f | [
"Apache-2.0"
] | 120 | 2018-04-22T20:42:04.000Z | 2022-01-20T23:12:13.000Z | test/support/test_tcp_tunnel.exs | lucaong/tortoise | fd2f83527937ba39b47f58eb8d392a1aa927e28f | [
"Apache-2.0"
] | 58 | 2018-04-24T06:28:36.000Z | 2022-02-09T06:55:42.000Z | defmodule Tortoise.Integration.TestTCPTunnel do
@moduledoc """
Create a TCP-tunnel making it possible to use :gen_tcp.send/2-3 to
send to the client_socket and assert on the received data on the
server_socket.
This work for our Transmitter-module which is handled a TCP-socket
from the Receiver.
"""
use GenServer
defstruct [:socket, :ip, :port]
# Client API
def start_link() do
initial_state = %__MODULE__{}
GenServer.start_link(__MODULE__, initial_state, name: __MODULE__)
end
def new() do
{ref, {ip, port}} = GenServer.call(__MODULE__, :create)
{:ok, client_socket} = :gen_tcp.connect(ip, port, [:binary, active: false])
receive do
{:server_socket, ^ref, server_socket} ->
{:ok, client_socket, server_socket}
after
1000 ->
throw("Could not create TCP test tunnel")
end
end
# Server callbacks
def init(state) do
{:ok, socket} = :gen_tcp.listen(0, [:binary, active: false])
{:ok, {ip, port}} = :inet.sockname(socket)
{:ok, %{state | socket: socket, ip: ip, port: port}}
end
def handle_call(:create, {process_pid, ref} = from, state) do
GenServer.reply(from, {ref, {state.ip, state.port}})
# the process should now wait for the caller to accept the socket
{:ok, server} = :gen_tcp.accept(state.socket, 200)
:ok = :gen_tcp.controlling_process(server, process_pid)
send(process_pid, {:server_socket, ref, server})
{:noreply, state}
end
end
| 30.102041 | 79 | 0.669153 |
93ed95cea1d44da210566289770df2ec96b689d5 | 109 | ex | Elixir | web/views/admin/page_view.ex | soleo/changelog.com | 621c7471b23379e1cdd4a0c960b66ed98d8d1a53 | [
"MIT"
] | null | null | null | web/views/admin/page_view.ex | soleo/changelog.com | 621c7471b23379e1cdd4a0c960b66ed98d8d1a53 | [
"MIT"
] | null | null | null | web/views/admin/page_view.ex | soleo/changelog.com | 621c7471b23379e1cdd4a0c960b66ed98d8d1a53 | [
"MIT"
] | null | null | null | defmodule Changelog.Admin.PageView do
use Changelog.Web, :view
alias Changelog.Admin.NewsletterView
end
| 18.166667 | 38 | 0.807339 |
93edc1e20cf8036abfd0edea71db9f4bda56c97e | 3,203 | exs | Elixir | exercises/concept/file-sniffer/test/file_sniffer_test.exs | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 343 | 2017-06-22T16:28:28.000Z | 2022-03-25T21:33:32.000Z | exercises/concept/file-sniffer/test/file_sniffer_test.exs | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 583 | 2017-06-19T10:48:40.000Z | 2022-03-28T21:43:12.000Z | exercises/concept/file-sniffer/test/file_sniffer_test.exs | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 228 | 2017-07-05T07:09:32.000Z | 2022-03-27T08:59:08.000Z | defmodule FileSnifferTest do
use ExUnit.Case
@bmp_file File.read!(Path.join("assets", "bmp.bmp"))
@gif_file File.read!(Path.join("assets", "gif.gif"))
@jpg_file File.read!(Path.join("assets", "jpeg.jpg"))
@png_file File.read!(Path.join("assets", "png-transparent.png"))
@exe_file File.read!(Path.join("assets", "elf.o"))
describe "get type from extension:" do
@tag task_id: 1
test "bmp" do
assert FileSniffer.type_from_extension("bmp") == "image/bmp"
end
@tag task_id: 1
test "gif" do
assert FileSniffer.type_from_extension("gif") == "image/gif"
end
@tag task_id: 1
test "jpg" do
assert FileSniffer.type_from_extension("jpg") == "image/jpg"
end
@tag task_id: 1
test "png" do
assert FileSniffer.type_from_extension("png") == "image/png"
end
@tag task_id: 1
test "exe" do
assert FileSniffer.type_from_extension("exe") == "application/octet-stream"
end
end
describe "get type from binary:" do
@tag task_id: 2
test "bmp" do
assert FileSniffer.type_from_binary(@bmp_file) == "image/bmp"
end
@tag task_id: 2
test "gif" do
assert FileSniffer.type_from_binary(@gif_file) == "image/gif"
end
@tag task_id: 2
test "jpg" do
assert FileSniffer.type_from_binary(@jpg_file) == "image/jpg"
end
@tag task_id: 2
test "png" do
assert FileSniffer.type_from_binary(@png_file) == "image/png"
end
@tag task_id: 2
test "exe" do
assert FileSniffer.type_from_binary(@exe_file) == "application/octet-stream"
end
end
describe "verify valid files" do
@tag task_id: 3
test "bmp" do
assert FileSniffer.verify(@bmp_file, "bmp") == {:ok, "image/bmp"}
end
@tag task_id: 3
test "gif" do
assert FileSniffer.verify(@gif_file, "gif") == {:ok, "image/gif"}
end
@tag task_id: 3
test "jpg" do
assert FileSniffer.verify(@jpg_file, "jpg") == {:ok, "image/jpg"}
end
@tag task_id: 3
test "png" do
assert FileSniffer.verify(@png_file, "png") == {:ok, "image/png"}
end
@tag task_id: 3
test "exe" do
assert FileSniffer.verify(@exe_file, "exe") == {:ok, "application/octet-stream"}
end
end
describe "reject invalid files" do
@tag task_id: 3
test "bmp" do
assert FileSniffer.verify(@exe_file, "bmp") ==
{:error, "Warning, file format and file extension do not match."}
end
@tag task_id: 3
test "gif" do
assert FileSniffer.verify(@exe_file, "gif") ==
{:error, "Warning, file format and file extension do not match."}
end
@tag task_id: 3
test "jpg" do
assert FileSniffer.verify(@exe_file, "jpg") ==
{:error, "Warning, file format and file extension do not match."}
end
@tag task_id: 3
test "png" do
assert FileSniffer.verify(@exe_file, "png") ==
{:error, "Warning, file format and file extension do not match."}
end
@tag task_id: 3
test "exe" do
assert FileSniffer.verify(@png_file, "exe") ==
{:error, "Warning, file format and file extension do not match."}
end
end
end
| 26.04065 | 86 | 0.614112 |
93ededdf358f60808ae2510d572ebd9a3f1c67dc | 1,559 | ex | Elixir | clients/retail/lib/google_api/retail/v2/model/google_cloud_retail_v2beta_remove_fulfillment_places_metadata.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/retail/lib/google_api/retail/v2/model/google_cloud_retail_v2beta_remove_fulfillment_places_metadata.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/retail/lib/google_api/retail/v2/model/google_cloud_retail_v2beta_remove_fulfillment_places_metadata.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Retail.V2.Model.GoogleCloudRetailV2betaRemoveFulfillmentPlacesMetadata do
@moduledoc """
Metadata related to the progress of the RemoveFulfillmentPlaces operation. Currently empty because there is no meaningful metadata populated from the RemoveFulfillmentPlaces method.
## Attributes
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{}
end
defimpl Poison.Decoder,
for: GoogleApi.Retail.V2.Model.GoogleCloudRetailV2betaRemoveFulfillmentPlacesMetadata do
def decode(value, options) do
GoogleApi.Retail.V2.Model.GoogleCloudRetailV2betaRemoveFulfillmentPlacesMetadata.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Retail.V2.Model.GoogleCloudRetailV2betaRemoveFulfillmentPlacesMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.170213 | 183 | 0.782553 |
93edf095a75c7301151283ff1a6f0bc07c170abf | 702 | ex | Elixir | lib/iona/document.ex | mad42/iona | 784cb4884f8dd158f618c15fa56d914f9be786c2 | [
"Apache-2.0"
] | 12 | 2019-11-26T14:52:53.000Z | 2022-03-15T07:54:26.000Z | lib/iona/document.ex | mad42/iona | 784cb4884f8dd158f618c15fa56d914f9be786c2 | [
"Apache-2.0"
] | 12 | 2019-11-25T17:24:35.000Z | 2021-07-19T17:50:52.000Z | lib/iona/document.ex | mad42/iona | 784cb4884f8dd158f618c15fa56d914f9be786c2 | [
"Apache-2.0"
] | 7 | 2019-11-25T17:25:39.000Z | 2021-05-22T05:07:24.000Z | defmodule Iona.Document do
@moduledoc false
defstruct [:format, :output_path]
@type t :: %__MODULE__{format: Iona.supported_format_t(), output_path: Path.t()}
@spec read(document :: t) :: {:ok, binary} | {:error, File.posix()}
def read(%{output_path: path}), do: File.read(path)
@spec read!(document :: t) :: binary
def read!(%{output_path: path}), do: File.read!(path)
@spec write(document :: t, destination :: Path.t()) :: :ok | {:error, File.posix()}
def write(%{output_path: path}, destination), do: File.cp(path, destination)
@spec write!(document :: t, destination :: Path.t()) :: :ok
def write!(%{output_path: path}, destination), do: File.cp!(path, destination)
end
| 35.1 | 85 | 0.653846 |
93ee150386a58e7c39ea480b5309d9b0a8f76832 | 4,369 | exs | Elixir | test/lib/kitten_blue/jwk/google_test.exs | enerick/elixir-kitten-blue | 4e7884d65c188dfc9afe0efa93fc21deb4237b26 | [
"MIT"
] | null | null | null | test/lib/kitten_blue/jwk/google_test.exs | enerick/elixir-kitten-blue | 4e7884d65c188dfc9afe0efa93fc21deb4237b26 | [
"MIT"
] | null | null | null | test/lib/kitten_blue/jwk/google_test.exs | enerick/elixir-kitten-blue | 4e7884d65c188dfc9afe0efa93fc21deb4237b26 | [
"MIT"
] | null | null | null | defmodule KittenBlue.JWK.GoogleTest do
use ExUnit.Case
doctest KittenBlue.JWK.Google
import Mock
@google_jwks_body """
{
\"keys\": [
{
\"kty\": \"RSA\",
\"alg\": \"RS256\",
\"use\": \"sig\",
\"kid\": \"4ef5118b0800bd60a4194186dcb538fc66e5eb34\",
\"n\": \"4ZSPB8TO7y3xZF_GxB_JSx_yBEtNs0mDilLvesSLLypYmxt4U7Dxk-vLAf1IVRwaZeeqQRIhrKJjljIqd33tVwfAp5PinjUm7lHi-ufZ_VNQw3uJA5_3tmkMWaLcvdRcILFMlVfBcESp-R5mcF6-bMeYH0n3D5CCJKspIqDERD1gQxfVxWDzafyrqkIROXKEtv3rMe7Z9Yc4mBsL02G6dDKVbjSxvkZ14wMykXEnGkfIiTUSiH8Qm1rdniZigPv2Pa2uSnJ94V-tIDHigjkXR7Cfun4Z38KZdSDRNgJr-m41Pu-plX98j59iGvVyaKP24ZbukGIJRPHYn06xkQeoWw\",
\"e\": \"AQAB\"
},
{
\"kty\": \"RSA\",
\"alg\": \"RS256\",
\"use\": \"sig\",
\"kid\": \"4129db2ea1860d2e871ee48506287fb05b04ca3f\",
\"n\": \"sxorUSxfZZjQL1mDr1rtbNGJE9lbVMiBmNZFqLhnQaefTfqMO3YgSlb_cptw5wS2Dn4phGNzjBaO1Hg5572mEqsmPl5z9MmybIOuqWXxYyIiCGWH3hoR2VPJ-1bN-SdszHb4ZWadXCCYqnHS216nrvHZK8vJyQ7XCchw43O00LC5Iwi2eKspQEj8YDQSZFsd7Mp2ULhKXVPyKeLH06aenBZZFwgjw8bow7MXS4uUkg4NOeH2iHNxclOYycg6Z87QrTVzHGBo9r-6s1XRTFh-rqcZC8RnR62wkPqB2AEHctOof_ZtaaDTZ1Xw7db8dRhhCnFkpiK_1d8c9N2Vm7Frxw\",
\"e\": \"AQAB\"
}
]
}
"""
test "fetch!" do
with_mock HTTPoison,
get: fn _ ->
{:ok, %HTTPoison.Response{status_code: 200, body: @google_jwks_body}}
end do
assert KittenBlue.JWK.Google.fetch!() ==
[
%KittenBlue.JWK{
alg: "RS256",
key: %JOSE.JWK{
fields: %{
"alg" => "RS256",
"kid" => "4ef5118b0800bd60a4194186dcb538fc66e5eb34",
"use" => "sig"
},
keys: :undefined,
kty:
{:jose_jwk_kty_rsa,
{:RSAPublicKey,
28_476_875_648_721_430_364_188_748_069_991_806_407_446_391_450_373_045_237_923_762_311_151_009_162_921_226_253_790_824_442_505_385_585_760_732_916_607_116_438_838_248_229_723_204_601_135_715_042_657_593_479_636_219_565_745_251_068_995_383_455_309_324_246_029_645_697_720_081_638_829_054_979_172_310_837_551_569_227_970_489_185_383_795_840_331_251_273_817_798_301_830_005_422_761_396_312_919_579_380_879_507_526_326_553_332_110_468_129_972_850_911_213_822_427_291_482_233_788_412_930_127_022_336_316_623_384_602_807_587_333_085_533_862_008_937_303_422_811_962_712_539_911_685_812_228_824_633_770_949_283_643_459_223_554_618_469_656_343_403_152_537_435_626_750_336_345_544_118_558_104_593_194_249_902_094_334_930_123_144_035_611_712_340_631_611_229_262_692_299_252_575_582_560_206_205_278_645_742_069_502_946_607_521_835_099,
65537}}
},
kid: "4ef5118b0800bd60a4194186dcb538fc66e5eb34"
},
%KittenBlue.JWK{
alg: "RS256",
key: %JOSE.JWK{
fields: %{
"alg" => "RS256",
"kid" => "4129db2ea1860d2e871ee48506287fb05b04ca3f",
"use" => "sig"
},
keys: :undefined,
kty:
{:jose_jwk_kty_rsa,
{:RSAPublicKey,
22_609_561_106_030_035_864_482_994_811_877_141_824_726_126_803_777_462_187_648_248_944_200_098_073_331_236_741_294_232_586_553_300_034_895_012_108_018_434_924_729_133_961_311_183_119_141_914_600_651_954_926_309_301_332_274_897_870_471_122_898_299_742_307_430_511_282_554_878_657_777_308_136_016_225_973_120_369_034_252_221_550_856_774_547_365_225_662_288_681_658_668_758_322_854_479_413_570_330_389_061_522_515_472_701_665_508_175_326_183_008_659_994_223_993_772_082_779_679_322_909_193_619_920_243_402_323_372_013_460_399_491_079_488_825_891_466_897_860_506_499_022_502_474_569_809_346_311_328_649_517_115_778_556_011_555_295_770_068_761_196_334_945_203_754_564_406_086_391_916_223_828_979_710_434_236_708_178_064_890_005_597_548_004_735_093_378_516_718_512_005_576_664_304_324_911_503_655_030_914_082_941_717_001_104_327,
65537}}
},
kid: "4129db2ea1860d2e871ee48506287fb05b04ca3f"
}
]
end
end
end
| 58.253333 | 848 | 0.700847 |
93ee21dbf8c744fba86ab611acadd6bc7ff804bb | 1,359 | ex | Elixir | clients/api_gateway/lib/google_api/api_gateway/v1alpha2/model/apigateway_cancel_operation_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/api_gateway/lib/google_api/api_gateway/v1alpha2/model/apigateway_cancel_operation_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/api_gateway/lib/google_api/api_gateway/v1alpha2/model/apigateway_cancel_operation_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.APIGateway.V1alpha2.Model.ApigatewayCancelOperationRequest do
@moduledoc """
The request message for Operations.CancelOperation.
## Attributes
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{}
end
defimpl Poison.Decoder, for: GoogleApi.APIGateway.V1alpha2.Model.ApigatewayCancelOperationRequest do
def decode(value, options) do
GoogleApi.APIGateway.V1alpha2.Model.ApigatewayCancelOperationRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.APIGateway.V1alpha2.Model.ApigatewayCancelOperationRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.357143 | 100 | 0.779249 |
93ee24a1b909cda5fd035422d9e5e35e243d06eb | 1,960 | ex | Elixir | deps/plug_wait1/lib/plug/adapters/wait1/handler.ex | conorfoley/dota_hero_combos | d75a3f0673fc1f1d0845c9d5c692c0605d3b445d | [
"MIT"
] | null | null | null | deps/plug_wait1/lib/plug/adapters/wait1/handler.ex | conorfoley/dota_hero_combos | d75a3f0673fc1f1d0845c9d5c692c0605d3b445d | [
"MIT"
] | null | null | null | deps/plug_wait1/lib/plug/adapters/wait1/handler.ex | conorfoley/dota_hero_combos | d75a3f0673fc1f1d0845c9d5c692c0605d3b445d | [
"MIT"
] | null | null | null | defmodule Plug.Adapters.Wait1.Handler do
@moduledoc false
alias :cowboy_req, as: Request
alias Plug.Adapters.Wait1.Protocol
def init({transport, :http}, req, {plug, opts, onconnection}) when transport in [:tcp, :ssl] do
case Request.header("upgrade", req) do
{upgrade, _} when upgrade in ["Websocket", "websocket"] ->
case onconnection.(req) do
{:ok, req} ->
case Request.parse_header("sec-websocket-protocol", req) do
{:ok, protocols, req} when is_list(protocols) ->
req = select_protocol(protocols, req)
{:upgrade, :protocol, :cowboy_websocket, req, {plug, opts}}
{:ok, _, req} ->
{:upgrade, :protocol, :cowboy_websocket, req, {plug, opts}}
end
{:halt, req} ->
{:shutdown, req, {plug, opts, onconnection}}
end
_ ->
{:upgrade, :protocol, Protocol, req, {transport, plug, opts}}
end
end
defdelegate websocket_init(transport, req, opts), to: Protocol
defdelegate websocket_handle(msg, req, state), to: Protocol
defdelegate websocket_info(msg, req, state), to: Protocol
defdelegate websocket_terminate(reason, req, state), to: Protocol
defp select_protocol([], req) do
req
end
defp select_protocol(["wait1" | rest], req) do
select_protocol(rest, Request.set_resp_header("sec-websocket-protocol", "wait1", req))
end
defp select_protocol([<<"wait1|t", token :: binary>> | rest], req) do
headers = [{"authorization", "Bearer " <> token} | Request.get(:headers, req)]
select_protocol(rest, Request.set([headers: headers], req))
end
defp select_protocol([<<"wait1|b", basic :: binary>> | rest], req) do
headers = [{"authorization", "Basic " <> basic} | Request.get(:headers, req)]
select_protocol(rest, Request.set([headers: headers], req))
end
defp select_protocol([_ | rest], req) do
select_protocol(rest, req)
end
end
| 39.2 | 97 | 0.632143 |
93ee95b8183a37a33f39b383756a035c214e0f41 | 1,546 | ex | Elixir | clients/elixir/generated/lib/swaggy_jenkins/model/branch_impl.ex | PankTrue/swaggy-jenkins | aca35a7cca6e1fcc08bd399e05148942ac2f514b | [
"MIT"
] | 23 | 2017-08-01T12:25:26.000Z | 2022-01-25T03:44:11.000Z | clients/elixir/generated/lib/swaggy_jenkins/model/branch_impl.ex | PankTrue/swaggy-jenkins | aca35a7cca6e1fcc08bd399e05148942ac2f514b | [
"MIT"
] | 35 | 2017-06-14T03:28:15.000Z | 2022-02-14T10:25:54.000Z | clients/elixir/generated/lib/swaggy_jenkins/model/branch_impl.ex | PankTrue/swaggy-jenkins | aca35a7cca6e1fcc08bd399e05148942ac2f514b | [
"MIT"
] | 11 | 2017-08-31T19:00:20.000Z | 2021-12-19T12:04:12.000Z | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule SwaggyJenkins.Model.BranchImpl do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:"_class",
:"displayName",
:"estimatedDurationInMillis",
:"fullDisplayName",
:"fullName",
:"name",
:"organization",
:"parameters",
:"permissions",
:"weatherScore",
:"pullRequest",
:"_links",
:"latestRun"
]
@type t :: %__MODULE__{
:"_class" => String.t,
:"displayName" => String.t,
:"estimatedDurationInMillis" => integer(),
:"fullDisplayName" => String.t,
:"fullName" => String.t,
:"name" => String.t,
:"organization" => String.t,
:"parameters" => [StringParameterDefinition],
:"permissions" => BranchImplpermissions,
:"weatherScore" => integer(),
:"pullRequest" => String.t,
:"_links" => BranchImpllinks,
:"latestRun" => PipelineRunImpl
}
end
defimpl Poison.Decoder, for: SwaggyJenkins.Model.BranchImpl do
import SwaggyJenkins.Deserializer
def decode(value, options) do
value
|> deserialize(:"parameters", :list, SwaggyJenkins.Model.StringParameterDefinition, options)
|> deserialize(:"permissions", :struct, SwaggyJenkins.Model.BranchImplpermissions, options)
|> deserialize(:"_links", :struct, SwaggyJenkins.Model.BranchImpllinks, options)
|> deserialize(:"latestRun", :struct, SwaggyJenkins.Model.PipelineRunImpl, options)
end
end
| 28.109091 | 96 | 0.666235 |
93ee9b1b8b1dfaa8b2f644750e4f9c60f80df423 | 195 | exs | Elixir | emporium/test/controllers/page_controller_test.exs | sbezugliy/couchdb_elixir | 2fdcb1617918324315a2217b2d1567e9a2c68558 | [
"Apache-2.0"
] | null | null | null | emporium/test/controllers/page_controller_test.exs | sbezugliy/couchdb_elixir | 2fdcb1617918324315a2217b2d1567e9a2c68558 | [
"Apache-2.0"
] | null | null | null | emporium/test/controllers/page_controller_test.exs | sbezugliy/couchdb_elixir | 2fdcb1617918324315a2217b2d1567e9a2c68558 | [
"Apache-2.0"
] | null | null | null | defmodule Emporium.PageControllerTest do
use Emporium.ConnCase
test "GET /", %{conn: conn} do
conn = get conn, "/"
assert html_response(conn, 200) =~ "Welcome to Phoenix!"
end
end
| 21.666667 | 60 | 0.676923 |
93eecae4ef424ee097728827d0d59bd3de692718 | 1,606 | ex | Elixir | clients/cloud_debugger/lib/google_api/cloud_debugger/v2/model/git_source_context.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/cloud_debugger/lib/google_api/cloud_debugger/v2/model/git_source_context.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/cloud_debugger/lib/google_api/cloud_debugger/v2/model/git_source_context.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudDebugger.V2.Model.GitSourceContext do
@moduledoc """
A GitSourceContext denotes a particular revision in a third party Git
repository (e.g. GitHub).
## Attributes
* `revisionId` (*type:* `String.t`, *default:* `nil`) - Git commit hash.
required.
* `url` (*type:* `String.t`, *default:* `nil`) - Git repository URL.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:revisionId => String.t(),
:url => String.t()
}
field(:revisionId)
field(:url)
end
defimpl Poison.Decoder, for: GoogleApi.CloudDebugger.V2.Model.GitSourceContext do
def decode(value, options) do
GoogleApi.CloudDebugger.V2.Model.GitSourceContext.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudDebugger.V2.Model.GitSourceContext do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 30.884615 | 81 | 0.720423 |
93eee1a8516030e979216d3b0ee14804820f0395 | 2,029 | ex | Elixir | apps/gitgud_web/lib/gitgud_web/views/helpers/navigation_helpers.ex | chulkilee/gitgud | 7a9b1023ff986ca08fb821a5e7658904a6061ba3 | [
"MIT"
] | null | null | null | apps/gitgud_web/lib/gitgud_web/views/helpers/navigation_helpers.ex | chulkilee/gitgud | 7a9b1023ff986ca08fb821a5e7658904a6061ba3 | [
"MIT"
] | null | null | null | apps/gitgud_web/lib/gitgud_web/views/helpers/navigation_helpers.ex | chulkilee/gitgud | 7a9b1023ff986ca08fb821a5e7658904a6061ba3 | [
"MIT"
] | null | null | null | defmodule GitGud.Web.NavigationHelpers do
@moduledoc """
Conveniences for routing and navigation.
"""
import Phoenix.HTML.Tag
import Phoenix.Controller, only: [controller_module: 1, action_name: 1]
import GitGud.Web.Router, only: [__routes__: 0]
@doc """
Returns `true` if `conn` matches the given route `helper`; otherwhise return `false`.
"""
@spec current_route?(Plug.Conn.t, atom, []) :: boolean
def current_route?(conn, helper, action \\ [])
def current_route?(conn, helper, []) do
controller_module(conn) == helper_controller(helper)
end
@spec current_route?(Plug.Conn.t, atom, [only: [atom]]) :: boolean
def current_route?(conn, helper, only: actions) when is_list(actions) do
current_route?(conn, helper) && action_name(conn) in actions
end
@spec current_route?(Plug.Conn.t, atom, [except: [atom]]) :: boolean
def current_route?(conn, helper, except: actions) when is_list(actions) do
current_route?(conn, helper) && action_name(conn) not in actions
end
@spec current_route?(Plug.Conn.t, atom, atom) :: boolean
def current_route?(conn, helper, action) when is_atom(action) do
current_route?(conn, helper) && action_name(conn) == action
end
@doc """
Renders a navigation item for the given `helper` and `action`.
"""
@spec navigation_item(Plug.Conn.t, atom, keyword | atom, atom, keyword, [do: term]) :: binary
def navigation_item(conn, helper, action \\ [], tag \\ :li, attrs \\ [], [do: block]) do
class = "is-active"
attrs = if current_route?(conn, helper, action),
do: Keyword.update(attrs, :class, class, &("#{&1} #{class}")),
else: attrs
content_tag(tag, block, attrs)
end
#
# Helpers
#
for route <- Enum.uniq_by(Enum.filter(__routes__(), &is_binary(&1.helper)), &(&1.helper)) do
helper = String.to_atom(route.helper)
defp helper_controller(unquote(helper)), do: unquote(route.plug)
end
defp helper_controller(helper), do: raise ArgumentError, message: "invalid helper #{inspect helper}"
end
| 34.982759 | 102 | 0.682602 |
93eef701d66b8e527ed607552fe447c72b05d419 | 323 | ex | Elixir | lib/jumubase_web/views/generators/pdf_generator/engine.ex | richeterre/jumubase-phoenix | 7584f890af117d496971b5284bf9de798e22266f | [
"MIT"
] | 2 | 2019-01-20T07:03:30.000Z | 2019-04-11T10:20:14.000Z | lib/jumubase_web/views/generators/pdf_generator/engine.ex | richeterre/jumubase-phoenix | 7584f890af117d496971b5284bf9de798e22266f | [
"MIT"
] | 6 | 2018-09-20T05:52:14.000Z | 2019-04-23T19:27:39.000Z | lib/jumubase_web/views/generators/pdf_generator/engine.ex | richeterre/jumubase-phoenix | 7584f890af117d496971b5284bf9de798e22266f | [
"MIT"
] | null | null | null | defmodule JumubaseWeb.PDFGenerator.Engine do
@type performance :: Jumubase.Showtime.Performance.t()
@type contest :: Jumubase.Foundation.Contest.t()
@callback jury_sheets([performance], integer) :: binary
@callback jury_table([performance]) :: binary
@callback certificates([performance], contest) :: binary
end
| 35.888889 | 58 | 0.758514 |
93ef107198c5750b2cb1d60af76b67e092690635 | 274 | exs | Elixir | test/tic_tac_toe_web/views/layout_view_test.exs | bryanchun/tic_tac_toe | cad397ce72857669a685337911c7ae692cbd1695 | [
"MIT"
] | null | null | null | test/tic_tac_toe_web/views/layout_view_test.exs | bryanchun/tic_tac_toe | cad397ce72857669a685337911c7ae692cbd1695 | [
"MIT"
] | 1 | 2021-05-22T02:09:13.000Z | 2021-05-22T02:09:13.000Z | test/tic_tac_toe_web/views/layout_view_test.exs | devrafaelantunes/tic_tac_toe_liveview | 9055aff27b45e5d34898bb57018c5a90ea6a8c9e | [
"MIT"
] | null | null | null | defmodule TicTacToeWeb.LayoutViewTest do
use TicTacToeWeb.ConnCase, async: true
# When testing helpers, you may want to import Phoenix.HTML and
# use functions such as safe_to_string() to convert the helper
# result into an HTML string.
# import Phoenix.HTML
end
| 30.444444 | 65 | 0.770073 |
93ef1c8f946a4e30c3ef18571a4af0368514445c | 1,437 | ex | Elixir | lib/cartel/pusher/gcm.ex | lucacorti/cartel | da86c1428b2c5ce1d8859c40e74e3499bf09dfb4 | [
"MIT"
] | 17 | 2016-03-09T19:44:55.000Z | 2022-01-31T20:23:28.000Z | lib/cartel/pusher/gcm.ex | lucacorti/cartel | da86c1428b2c5ce1d8859c40e74e3499bf09dfb4 | [
"MIT"
] | 18 | 2016-03-09T21:10:52.000Z | 2020-11-17T09:05:17.000Z | lib/cartel/pusher/gcm.ex | lucacorti/cartel | da86c1428b2c5ce1d8859c40e74e3499bf09dfb4 | [
"MIT"
] | 3 | 2016-03-11T22:08:11.000Z | 2016-05-19T05:32:17.000Z | defmodule Cartel.Pusher.Gcm do
@moduledoc """
Google GCM interface worker
"""
use GenServer
use Cartel.Pusher, message_module: Cartel.Message.Gcm
alias Cartel.HTTP
alias HTTP.{Request, Response}
@gcm_server_url "https://gcm-http.googleapis.com/gcm/send"
@doc """
Starts the pusher
"""
@spec start_link(%{key: String.t()}) :: GenServer.on_start()
def start_link(args), do: GenServer.start_link(__MODULE__, args, [])
@impl Cartel.Pusher
def handle_push(pid, message, payload) do
GenServer.call(pid, {:push, message, payload})
end
@impl GenServer
def init(conf), do: {:ok, conf}
@impl GenServer
def handle_call({:push, _message, payload}, _from, state) do
request =
@gcm_server_url
|> Request.new("POST")
|> Request.set_body(payload)
|> Request.put_header({"content-type", "application/json"})
|> Request.put_header({"authorization", "key=" <> state[:key]})
case HTTP.request(%HTTP{}, request) do
{:ok, _, %Response{status: code}} when code >= 400 ->
{:reply, {:error, :unauthorized}, state}
{:ok, _, %Response{body: body}} ->
case Jason.decode!(body) do
%{"results" => [%{"message_id" => _id}]} ->
{:reply, :ok, state}
%{"results" => [%{"error" => error}]} ->
{:reply, {:error, error}, state}
end
{:error, reason} ->
{:error, reason}
end
end
end
| 26.127273 | 70 | 0.597773 |
93ef237ab4a31d8e5b333fb2184fa4c9cecbf95f | 244 | ex | Elixir | lib/hl7/2.5/datatypes/vid.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.5/datatypes/vid.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.5/datatypes/vid.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | defmodule HL7.V2_5.DataTypes.Vid do
@moduledoc false
alias HL7.V2_5.{DataTypes}
use HL7.DataType,
fields: [
version_id: nil,
internationalization_code: DataTypes.Ce,
international_version_id: DataTypes.Ce
]
end
| 20.333333 | 46 | 0.704918 |
93ef2c2ad232639e459d0de0890be53ef33513f3 | 2,255 | ex | Elixir | lib/ex_venture/release_tasks.ex | christhekeele/ex_venture | 4f4b329f50a133e219969f9823144a4cb9bf738d | [
"MIT"
] | 610 | 2017-08-09T15:20:25.000Z | 2022-03-27T15:49:07.000Z | lib/ex_venture/release_tasks.ex | christhekeele/ex_venture | 4f4b329f50a133e219969f9823144a4cb9bf738d | [
"MIT"
] | 69 | 2017-09-23T04:02:30.000Z | 2022-03-19T21:08:21.000Z | lib/ex_venture/release_tasks.ex | christhekeele/ex_venture | 4f4b329f50a133e219969f9823144a4cb9bf738d | [
"MIT"
] | 85 | 2017-09-23T04:07:11.000Z | 2021-11-20T06:44:56.000Z | # Loosely from https://github.com/bitwalker/distillery/blob/master/docs/Running%20Migrations.md
defmodule ExVenture.ReleaseTasks do
@moduledoc false
@start_apps [
:crypto,
:ssl,
:postgrex,
:ecto,
:ecto_sql,
:bamboo,
:ranch,
:phoenix,
:gettext
]
@repos [
ExVenture.Repo
]
def startup() do
IO.puts("Loading ex_venture...")
# Load the code for ex_venture, but don't start it
Application.load(:ex_venture)
IO.puts("Starting dependencies..")
# Start apps necessary for executing migrations
Enum.each(@start_apps, &Application.ensure_all_started/1)
# Start the Repo(s) for ex_venture
IO.puts("Starting repos..")
Enum.each(@repos, & &1.start_link(pool_size: 2))
end
def startup_extra() do
{:ok, _pid} = Web.Endpoint.start_link()
{:ok, _pid} = ExVenture.Config.Cache.start_link([])
end
end
defmodule ExVenture.ReleaseTasks.Migrate do
@moduledoc """
Migrate the database
"""
alias ExVenture.ReleaseTasks
alias ExVenture.Repo
@apps [
:ex_venture
]
@doc """
Migrate the database
"""
def run() do
ReleaseTasks.startup()
Enum.each(@apps, &run_migrations_for/1)
IO.puts("Success!")
end
def priv_dir(app), do: "#{:code.priv_dir(app)}"
defp run_migrations_for(app) do
IO.puts("Running migrations for #{app}")
Ecto.Migrator.run(Repo, migrations_path(app), :up, all: true)
end
defp migrations_path(app), do: Path.join([priv_dir(app), "repo", "migrations"])
end
defmodule ExVenture.ReleaseTasks.Seeds do
@moduledoc """
Seed the database
NOTE: This should only be used in docker compose
"""
alias ExVenture.ReleaseTasks
@apps [
:ex_venture
]
@doc """
Migrate the database
"""
def run() do
ReleaseTasks.startup()
ReleaseTasks.startup_extra()
Enum.each(@apps, &run_seeds_for/1)
IO.puts("Success!")
end
def priv_dir(app), do: :code.priv_dir(app)
defp run_seeds_for(app) do
# Run the seed script if it exists
seed_script = seeds_path(app)
if File.exists?(seed_script) do
IO.puts("Running seed script..")
Code.eval_file(seed_script)
end
end
defp seeds_path(app), do: Path.join([priv_dir(app), "repo", "seeds.exs"])
end
| 20.5 | 95 | 0.661641 |
93ef3661d022dc84104f1350f72a17de1a07f326 | 198 | ex | Elixir | lib/phone/gy.ex | net/phone | 18e1356d2f8d32fe3f95638c3c44bceab0164fb2 | [
"Apache-2.0"
] | null | null | null | lib/phone/gy.ex | net/phone | 18e1356d2f8d32fe3f95638c3c44bceab0164fb2 | [
"Apache-2.0"
] | null | null | null | lib/phone/gy.ex | net/phone | 18e1356d2f8d32fe3f95638c3c44bceab0164fb2 | [
"Apache-2.0"
] | null | null | null | defmodule Phone.GY do
@moduledoc false
use Helper.Country
def regex, do: ~r/^(592)()(.{8})/
def country, do: "Guyana"
def a2, do: "GY"
def a3, do: "GUY"
matcher :regex, ["592"]
end
| 15.230769 | 35 | 0.59596 |
93ef3ecbfec490ce996c675eb24d01b11d169727 | 2,103 | ex | Elixir | lib/ex_twilio/config.ex | john-griffin/ex_twilio | 75c018094fbfafdd140c9a90d6306b8d952c1a75 | [
"MIT"
] | null | null | null | lib/ex_twilio/config.ex | john-griffin/ex_twilio | 75c018094fbfafdd140c9a90d6306b8d952c1a75 | [
"MIT"
] | null | null | null | lib/ex_twilio/config.ex | john-griffin/ex_twilio | 75c018094fbfafdd140c9a90d6306b8d952c1a75 | [
"MIT"
] | null | null | null | defmodule ExTwilio.Config do
@moduledoc """
Stores configuration variables used to communicate with Twilio's API.
All settings also accept `{:system, "ENV_VAR_NAME"}` to read their
values from environment variables at runtime.
"""
@doc """
Returns the Twilio Account SID. Set it in `mix.exs`:
config :ex_twilio, account_sid: "YOUR_ACCOUNT_SID"
"""
def account_sid, do: from_env(:ex_twilio, :account_sid)
@doc """
Returns the Twilio Auth Token for your account. Set it in `mix.exs`:
config :ex_twilio, auth_token: "YOUR_AUTH_TOKEN"
"""
def auth_token, do: from_env(:ex_twilio, :auth_token)
@doc """
Returns the domain of the Twilio API. This will default to "api.twilio.com",
but can be overridden using the following setting in `mix.exs`:
config :ex_twilio, api_domain: "other.twilio.com"
"""
def api_domain, do: from_env(:ex_twilio, :api_domain, "api.twilio.com")
@doc """
Returns the version of the API that ExTwilio is going to talk to. Set it in
`mix.exs`:
config :ex_twilio, api_version: "2015-05-06"
"""
def api_version, do: Application.get_env(:ex_twilio, :api_version) || "2010-04-01"
def workspace_sid, do: Application.get_env(:ex_twilio, :workspace_sid) || "12345"
@doc """
Return the combined base URL of the Twilio API, using the configuration
settings given.
"""
def base_url(), do: "https://#{api_domain()}/#{api_version()}"
def task_router_url(), do: "https://taskrouter.twilio.com/v1"
def task_router_websocket_base_url(), do: "https://event-bridge.twilio.com/v1/wschannels"
def programmable_chat_url(), do: "https://chat.twilio.com/v2"
@doc """
A light wrapper around `Application.get_env/2`, providing automatic support for
`{:system, "VAR"}` tuples.
"""
def from_env(otp_app, key, default \\ nil)
def from_env(otp_app, key, default) do
otp_app
|> Application.get_env(key, default)
|> read_from_system(default)
end
defp read_from_system({:system, env}, default), do: System.get_env(env) || default
defp read_from_system(value, _default), do: value
end
| 31.863636 | 91 | 0.697575 |
93ef505c9b69022d8ae668dcba199d35a184568b | 364 | exs | Elixir | .credo.exs | facto/tgdb | 35f1fa4e9c78c7bb8d3138314bfd0dd07e49b16d | [
"MIT"
] | 2 | 2018-04-09T07:34:37.000Z | 2020-03-08T06:50:03.000Z | .credo.exs | tomasz-tomczyk/igdb | 98d777798503f427e6c302da0251e819b9583c65 | [
"MIT"
] | null | null | null | .credo.exs | tomasz-tomczyk/igdb | 98d777798503f427e6c302da0251e819b9583c65 | [
"MIT"
] | null | null | null | %{
configs: [
%{
name: "default",
files: %{
included: ~w{config lib test}
},
strict: true,
color: true,
checks: [
{Credo.Check.Readability.MaxLineLength, max_length: 80},
{Credo.Check.Readability.ParenthesesOnZeroArityDefs, false},
{Credo.Check.Refactor.MapInto, false}
]
}
]
}
| 20.222222 | 68 | 0.543956 |
93ef5e3a5c6bc1563556d7b0d73d70cfcb319be1 | 1,804 | exs | Elixir | clients/slides/mix.exs | EVLedger/elixir-google-api | 61edef19a5e2c7c63848f7030c6d8d651e4593d4 | [
"Apache-2.0"
] | null | null | null | clients/slides/mix.exs | EVLedger/elixir-google-api | 61edef19a5e2c7c63848f7030c6d8d651e4593d4 | [
"Apache-2.0"
] | null | null | null | clients/slides/mix.exs | EVLedger/elixir-google-api | 61edef19a5e2c7c63848f7030c6d8d651e4593d4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Slides.Mixfile do
use Mix.Project
@version "0.12.1"
def project() do
[
app: :google_api_slides,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/slides"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.2"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Google Slides API client library. Reads and writes Google Slides presentations.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/slides",
"Homepage" => "https://developers.google.com/slides/"
}
]
end
end
| 26.925373 | 97 | 0.65133 |
93ef7df14658e3f5f044706a290a913cf08ab819 | 942 | ex | Elixir | lib/ex_mustang/responders/urban_dictionary.ex | theodowling/ex_mustang | 0bf0d96c0e33e8291eb4d911bfdcdc57902fa78d | [
"Apache-2.0"
] | 69 | 2016-08-17T06:50:58.000Z | 2021-01-26T00:43:34.000Z | lib/ex_mustang/responders/urban_dictionary.ex | onixus74/ex_mustang | e7c8ae51027d717c20784abad76b846198a980b9 | [
"Apache-2.0"
] | 55 | 2016-08-16T01:05:39.000Z | 2020-10-08T15:53:35.000Z | lib/ex_mustang/responders/urban_dictionary.ex | onixus74/ex_mustang | e7c8ae51027d717c20784abad76b846198a980b9 | [
"Apache-2.0"
] | 10 | 2017-01-20T19:45:43.000Z | 2020-09-30T20:32:23.000Z | defmodule ExMustang.Responders.UrbanDictionary do
@moduledoc """
Grabs word of the day from urban dictionary
"""
use Hedwig.Responder
import ExMustang.Utils, only: [useragent: 0]
@feed_url "http://feeds.urbandictionary.com/UrbanWordOfTheDay"
@usage """
urbandictionary | udict - Gets word of the day from urbandictionary
"""
hear ~r/^udict$/i, msg do
reply(msg, get_ub_wotd())
end
hear ~r/^urbandictionary$/i, msg do
reply(msg, get_ub_wotd())
end
def get_ub_wotd() do
case HTTPoison.get(@feed_url, [useragent()]) do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
case Fiet.parse(body) do
{:ok, %Fiet.Feed{items: items}} ->
"Word of the day is `#{hd(items).title}`\n#{hd(items).link}"
_ ->
"Urban Dictionary not available right now"
end
_ ->
"Urban Dictionary not available right now"
end
end
end
| 24.789474 | 72 | 0.629512 |
93ef97fb9616aaed57134471bde81a793eb4ca9a | 65 | ex | Elixir | lib/store_card_web/views/page_view.ex | manojsamanta/stripe-store-card | 5acc474240fe0eb85cfa8a3ef1d696d12694ad14 | [
"MIT"
] | null | null | null | lib/store_card_web/views/page_view.ex | manojsamanta/stripe-store-card | 5acc474240fe0eb85cfa8a3ef1d696d12694ad14 | [
"MIT"
] | null | null | null | lib/store_card_web/views/page_view.ex | manojsamanta/stripe-store-card | 5acc474240fe0eb85cfa8a3ef1d696d12694ad14 | [
"MIT"
] | null | null | null | defmodule StoreCardWeb.PageView do
use StoreCardWeb, :view
end
| 16.25 | 34 | 0.815385 |
93efd00aa2939e9e06fda16ded99042416dec058 | 406 | ex | Elixir | lib/lightbulb.ex | thomasvolk/lightbulb | f8d512c62073f814846762093076cf16a1e100be | [
"Apache-2.0"
] | null | null | null | lib/lightbulb.ex | thomasvolk/lightbulb | f8d512c62073f814846762093076cf16a1e100be | [
"Apache-2.0"
] | null | null | null | lib/lightbulb.ex | thomasvolk/lightbulb | f8d512c62073f814846762093076cf16a1e100be | [
"Apache-2.0"
] | null | null | null | defmodule Lightbulb do
use Application
def start(_type, []) do
Lightbulb.Supervisor.start_link()
end
def get_nodes() do
Lightbulb.Registry.get_nodes()
end
def subscribe() do
Lightbulb.Registry.subscribe()
end
def unsubscribe() do
Lightbulb.Registry.unsubscribe()
end
end
defmodule Lightbulb.IpAddress do
def to_string(ip), do: Kernel.to_string(:inet.ntoa(ip))
end
| 16.24 | 57 | 0.714286 |
93f00466c9fa3df6ddf323e612dd148f33158f95 | 1,859 | ex | Elixir | lib/ratatouille/renderer/element/progress_bar.ex | CyberFlameGO/ratatouille | cc7b6a37e0b1757cd89cc5084343814a79dd86dc | [
"MIT"
] | 504 | 2019-01-13T21:53:21.000Z | 2022-03-31T20:58:21.000Z | lib/ratatouille/renderer/element/progress_bar.ex | iboard/ratatouille | cc7b6a37e0b1757cd89cc5084343814a79dd86dc | [
"MIT"
] | 28 | 2019-01-26T21:00:23.000Z | 2021-12-28T19:06:15.000Z | lib/ratatouille/renderer/element/progress_bar.ex | iboard/ratatouille | cc7b6a37e0b1757cd89cc5084343814a79dd86dc | [
"MIT"
] | 21 | 2019-02-21T09:08:27.000Z | 2021-12-20T15:51:10.000Z | defmodule Ratatouille.Renderer.Element.ProgressBar do
@moduledoc false
@behaviour Ratatouille.Renderer
alias ExTermbox.Position
alias Ratatouille.Renderer.{Box, Canvas, Element, Text}
@fill_char "█"
def render(%Canvas{render_box: box} = canvas, %Element{attributes: attrs}, _render_fn) do
width = Box.width(box)
bar_pieces = build_bar(width, attrs)
bar_pieces
|> Enum.reduce(canvas, fn {shift, str, attrs}, %Canvas{render_box: box} = canvas ->
position = Position.translate_x(box.top_left, shift)
Text.render(canvas, position, str, attrs)
end)
|> Canvas.consume_rows(1)
end
defp build_bar(width, %{percentage: percentage} = attrs) do
percentage =
percentage
|> max(0)
|> min(100)
text_position = Map.get(attrs, :text_position, :right)
text_color = Map.get(attrs, :text_color, :default)
on_color = Map.get(attrs, :on_color, :default)
off_color = Map.get(attrs, :off_color, :default)
width = if(text_position == :none, do: width, else: width - 6)
on = ceil(width / 100.0 * percentage)
off = width - on
on_string = String.duplicate(@fill_char, on)
off_string =
case off_color do
:default -> ""
_ -> String.duplicate(@fill_char, off)
end
case text_position do
:none ->
[
{0, on_string, color: on_color},
{on, off_string, color: off_color}
]
:left ->
[
{0, String.pad_leading("#{percentage} % ", 6), color: text_color},
{6, on_string, color: on_color},
{6 + on, off_string, color: off_color}
]
_ ->
[
{0, on_string, color: on_color},
{on, off_string, color: off_color},
{on + off, String.pad_leading("#{percentage} %", 6), color: text_color}
]
end
end
end
| 26.557143 | 91 | 0.602474 |
93f00ccea7a58cdfaf0786d4527b9201a3d39f8a | 95 | exs | Elixir | apps/tai/test/tai/venue_adapters/bitmex/transform_position_test.exs | chrism2671/tai-1 | 847827bd23908adfad4a82c83d5295bdbc022796 | [
"MIT"
] | 1 | 2019-12-19T05:16:26.000Z | 2019-12-19T05:16:26.000Z | apps/tai/test/tai/venue_adapters/bitmex/transform_position_test.exs | CalebOts/tai | 1603bfefa390eac89085ba18fd259b2e9fcac6c0 | [
"MIT"
] | null | null | null | apps/tai/test/tai/venue_adapters/bitmex/transform_position_test.exs | CalebOts/tai | 1603bfefa390eac89085ba18fd259b2e9fcac6c0 | [
"MIT"
] | 1 | 2020-05-03T23:32:11.000Z | 2020-05-03T23:32:11.000Z | defmodule Tai.VenueAdapters.Bitmex.TransformPositionTest do
use ExUnit.Case, async: true
end
| 23.75 | 59 | 0.831579 |
93f01317963d0a6b0062b527ba6f09c74d86de98 | 2,323 | exs | Elixir | triangle/test/triangle_test.exs | rapidfireworks/exercism.ex | 7739c60db0510099fe8d37fd6bd76eee37623d05 | [
"MIT"
] | null | null | null | triangle/test/triangle_test.exs | rapidfireworks/exercism.ex | 7739c60db0510099fe8d37fd6bd76eee37623d05 | [
"MIT"
] | null | null | null | triangle/test/triangle_test.exs | rapidfireworks/exercism.ex | 7739c60db0510099fe8d37fd6bd76eee37623d05 | [
"MIT"
] | null | null | null | defmodule TriangleTest do
use ExUnit.Case
# @tag :pending
test "equilateral triangles have equal sides" do
assert Triangle.kind(2, 2, 2) == {:ok, :equilateral}
end
# @tag :pending
test "larger equilateral triangles also have equal sides" do
assert Triangle.kind(10, 10, 10) == {:ok, :equilateral}
end
# @tag :pending
test "isosceles triangles have last two sides equal" do
assert Triangle.kind(3, 4, 4) == {:ok, :isosceles}
end
# @tag :pending
test "isosceles triangles have first and last sides equal" do
assert Triangle.kind(4, 3, 4) == {:ok, :isosceles}
end
# @tag :pending
test "isosceles triangles have two first sides equal" do
assert Triangle.kind(4, 4, 3) == {:ok, :isosceles}
end
# @tag :pending
test "isosceles triangles have in fact exactly two sides equal" do
assert Triangle.kind(10, 10, 2) == {:ok, :isosceles}
end
# @tag :pending
test "scalene triangles have no equal sides" do
assert Triangle.kind(3, 4, 5) == {:ok, :scalene}
end
# @tag :pending
test "scalene triangles have no equal sides at a larger scale too" do
assert Triangle.kind(10, 11, 12) == {:ok, :scalene}
end
# @tag :pending
test "scalene triangles have no equal sides in descending order either" do
assert Triangle.kind(5, 4, 2) == {:ok, :scalene}
end
# @tag :pending
test "very small triangles are legal" do
assert Triangle.kind(0.4, 0.6, 0.3) == {:ok, :scalene}
end
# @tag :pending
test "triangles with no size are illegal" do
assert Triangle.kind(0, 0, 0) == {:error, "all side lengths must be positive"}
end
# @tag :pending
test "triangles with negative sides are illegal" do
assert Triangle.kind(3, 4, -5) == {:error, "all side lengths must be positive"}
end
# @tag :pending
test "triangles violating triangle inequality are illegal" do
assert Triangle.kind(1, 1, 3) == {:error, "side lengths violate triangle inequality"}
end
# @tag :pending
test "triangles violating triangle inequality are illegal 2" do
assert Triangle.kind(2, 4, 2) == {:error, "side lengths violate triangle inequality"}
end
# @tag :pending
test "triangles violating triangle inequality are illegal 3" do
assert Triangle.kind(7, 3, 2) == {:error, "side lengths violate triangle inequality"}
end
end
| 29.405063 | 89 | 0.671976 |
93f014ca1f138cc4bf31fe2842491b3ef8b61413 | 1,630 | exs | Elixir | src/074/problem074.exs | fredericojordan/project-euler | 75c3f519d5a6ad610362b6904f8fa4d1cde05448 | [
"MIT"
] | 9 | 2018-05-06T04:43:08.000Z | 2020-12-01T20:51:34.000Z | src/074/problem074.exs | fredericojordan/project-euler | 75c3f519d5a6ad610362b6904f8fa4d1cde05448 | [
"MIT"
] | null | null | null | src/074/problem074.exs | fredericojordan/project-euler | 75c3f519d5a6ad610362b6904f8fa4d1cde05448 | [
"MIT"
] | null | null | null | #!/usr/bin/env elixir
defmodule Problem074 do
@moduledoc """
The number 145 is well known for the property that the sum of the factorial of its digits is equal to 145:
1! + 4! + 5! = 1 + 24 + 120 = 145
Perhaps less well known is 169, in that it produces the longest chain of numbers that link back to 169; it turns out
that there are only three such loops that exist:
169 → 363601 → 1454 → 169
871 → 45361 → 871
872 → 45362 → 872
It is not difficult to prove that EVERY starting number will eventually get stuck in a loop. For example,
69 → 363600 → 1454 → 169 → 363601 (→ 1454)
78 → 45360 → 871 → 45361 (→ 871)
540 → 145 (→ 145)
Starting with 69 produces a chain of five non-repeating terms, but the longest non-repeating chain with a starting
number below one million is sixty terms.
How many chains, with a starting number below one million, contain exactly sixty non-repeating terms?
"""
defp factorial(0), do: 1
defp factorial(x) when is_integer(x) and x > 0, do: x * factorial(x-1)
defp factorial_digit_sum(x) do
x
|> Integer.digits()
|> Enum.map(&factorial/1)
|> Enum.sum()
end
defp factorial_digit_sum_loop(x) do
x
|> Stream.iterate(&factorial_digit_sum/1)
|> Enum.reduce_while([], fn x, acc ->
if Enum.member?(acc, x) do
{:halt, acc}
else
{:cont, acc ++ [x]}
end
end)
end
def solve do
1..1_000_000
|> Stream.map(&factorial_digit_sum_loop/1)
|> Stream.map(&Enum.count/1)
|> Stream.filter(&(&1 == 60))
|> Enum.count()
end
end
IO.puts Problem074.solve
| 27.627119 | 118 | 0.641718 |
93f03b054b3a8fc0c7b0ef14d838b0f47ad3c429 | 591 | exs | Elixir | bench/shortuuid_bench.exs | gpedic/ecto_shortuuid | 8e6be9b3853b1bf27173a6359aecd23776f4b112 | [
"MIT"
] | 23 | 2019-02-22T09:22:45.000Z | 2022-03-18T15:27:43.000Z | bench/shortuuid_bench.exs | gpedic/ecto_shortuuid | 8e6be9b3853b1bf27173a6359aecd23776f4b112 | [
"MIT"
] | 1 | 2019-12-12T14:45:10.000Z | 2020-02-20T21:21:53.000Z | bench/shortuuid_bench.exs | gpedic/ecto_shortuuid | 8e6be9b3853b1bf27173a6359aecd23776f4b112 | [
"MIT"
] | 3 | 2019-11-28T18:31:07.000Z | 2021-02-21T17:09:06.000Z | defmodule EctoShortUUIDBench do
use Benchfella
bench "generate/0" do
Ecto.ShortUUID.generate()
nil
end
# bench "bingenerate/0" do
# Ecto.ShortUUID.bingenerate()
# nil
# end
bench "cast/1 UUID" do
Ecto.ShortUUID.cast("0160280f-1d70-1568-b097-7bdca2801de3")
end
bench "cast/1 ShortUUID" do
Ecto.ShortUUID.cast("ggAZXFogjWbVWpbbQ4DxF2")
end
bench "dump/1" do
Ecto.ShortUUID.dump("ggAZXFogjWbVWpbbQ4DxF2")
end
bench "load/1" do
Ecto.ShortUUID.load(<<1, 96, 40, 15, 29, 112, 21, 104, 176, 151, 123, 220, 162, 128, 29, 227>>)
end
end | 20.37931 | 99 | 0.666667 |
93f041e9042bf6bf18a51101d4372d2b64cea673 | 822 | exs | Elixir | test/yourbot_web/controllers/user_session_controller_test.exs | ConnorRigby/yourbot | eea40e63b0f93963ed14b7efab9ecbe898ab11dd | [
"Apache-2.0"
] | 3 | 2021-11-08T15:19:19.000Z | 2021-11-11T03:18:35.000Z | test/yourbot_web/controllers/user_session_controller_test.exs | ConnorRigby/yourbot | eea40e63b0f93963ed14b7efab9ecbe898ab11dd | [
"Apache-2.0"
] | null | null | null | test/yourbot_web/controllers/user_session_controller_test.exs | ConnorRigby/yourbot | eea40e63b0f93963ed14b7efab9ecbe898ab11dd | [
"Apache-2.0"
] | null | null | null | defmodule YourBotWeb.UserSessionControllerTest do
use YourBotWeb.ConnCase, async: true
import YourBot.AccountsFixtures
setup do
%{user: user_fixture()}
end
describe "DELETE /users/log_out" do
test "logs the user out", %{conn: conn, user: user} do
conn = conn |> log_in_user(user) |> delete(Routes.user_session_path(conn, :delete))
assert redirected_to(conn) == "/"
refute get_session(conn, :user_token)
assert get_flash(conn, :info) =~ "Logged out successfully"
end
test "succeeds even if the user is not logged in", %{conn: conn} do
conn = delete(conn, Routes.user_session_path(conn, :delete))
assert redirected_to(conn) == "/"
refute get_session(conn, :user_token)
assert get_flash(conn, :info) =~ "Logged out successfully"
end
end
end
| 31.615385 | 89 | 0.681265 |
93f0b28e0a9879b1996a921a8ca1946fab6cb5cb | 778 | ex | Elixir | test/support/channel_case.ex | futurice/tribevibe-server | d51fa08528e6391ee50c98adf8696ca35af2c53d | [
"MIT"
] | null | null | null | test/support/channel_case.ex | futurice/tribevibe-server | d51fa08528e6391ee50c98adf8696ca35af2c53d | [
"MIT"
] | null | null | null | test/support/channel_case.ex | futurice/tribevibe-server | d51fa08528e6391ee50c98adf8696ca35af2c53d | [
"MIT"
] | null | null | null | defmodule TribevibeWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common datastructures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
# The default endpoint for testing
@endpoint TribevibeWeb.Endpoint
end
end
setup _tags do
:ok
end
end
| 22.882353 | 58 | 0.727506 |
93f0d301e617fe8ac921c2328c5aca45d2396a11 | 394 | ex | Elixir | lib/media_sample/guardian_serializer.ex | kenta-aktsk/media_sample | a0368236bd81c18ac136a54aa7383332a5e3cf36 | [
"MIT"
] | 99 | 2016-04-19T11:11:57.000Z | 2021-12-12T14:38:02.000Z | lib/media_sample/guardian_serializer.ex | ogamw/media_sample | a0368236bd81c18ac136a54aa7383332a5e3cf36 | [
"MIT"
] | 48 | 2016-04-06T02:28:46.000Z | 2016-05-31T06:56:56.000Z | lib/media_sample/guardian_serializer.ex | ogamw/media_sample | a0368236bd81c18ac136a54aa7383332a5e3cf36 | [
"MIT"
] | 32 | 2016-04-19T11:12:00.000Z | 2021-03-25T18:19:52.000Z | defmodule MediaSample.GuardianSerializer do
@behaviour Guardian.Serializer
alias MediaSample.{Repo, User}
def for_token(user = %User{}), do: {:ok, "User:#{user.id}"}
def for_token(_), do: {:error, "Unknown resource type"}
def from_token("User:" <> id), do: {:ok, User |> User.valid |> Repo.get(String.to_integer(id))}
def from_token(_), do: {:error, "Unknown resource type"}
end
| 32.833333 | 97 | 0.680203 |
93f0ff491d24f7a25b991f1f705342ca79f43510 | 2,043 | ex | Elixir | clients/data_labeling/lib/google_api/data_labeling/v1beta1/model/google_cloud_datalabeling_v1alpha1_label_image_oriented_bounding_box_operation_metadata.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/data_labeling/lib/google_api/data_labeling/v1beta1/model/google_cloud_datalabeling_v1alpha1_label_image_oriented_bounding_box_operation_metadata.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/data_labeling/lib/google_api/data_labeling/v1beta1/model/google_cloud_datalabeling_v1alpha1_label_image_oriented_bounding_box_operation_metadata.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DataLabeling.V1beta1.Model.GoogleCloudDatalabelingV1alpha1LabelImageOrientedBoundingBoxOperationMetadata do
@moduledoc """
Details of a LabelImageOrientedBoundingBox operation metadata.
## Attributes
* `basicConfig` (*type:* `GoogleApi.DataLabeling.V1beta1.Model.GoogleCloudDatalabelingV1alpha1HumanAnnotationConfig.t`, *default:* `nil`) - Basic human annotation config.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:basicConfig =>
GoogleApi.DataLabeling.V1beta1.Model.GoogleCloudDatalabelingV1alpha1HumanAnnotationConfig.t()
| nil
}
field(:basicConfig,
as: GoogleApi.DataLabeling.V1beta1.Model.GoogleCloudDatalabelingV1alpha1HumanAnnotationConfig
)
end
defimpl Poison.Decoder,
for:
GoogleApi.DataLabeling.V1beta1.Model.GoogleCloudDatalabelingV1alpha1LabelImageOrientedBoundingBoxOperationMetadata do
def decode(value, options) do
GoogleApi.DataLabeling.V1beta1.Model.GoogleCloudDatalabelingV1alpha1LabelImageOrientedBoundingBoxOperationMetadata.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for:
GoogleApi.DataLabeling.V1beta1.Model.GoogleCloudDatalabelingV1alpha1LabelImageOrientedBoundingBoxOperationMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.224138 | 174 | 0.780225 |
93f141416bd6aacafc9475f435dae960182abc4a | 1,023 | ex | Elixir | RAEM/raem/test/support/conn_case.ex | pedromcorreia/Rumo-ao-ensino-superior | be0b9bf417604bdf8a349fde8a8a1c0aaf4c4cdb | [
"MIT"
] | null | null | null | RAEM/raem/test/support/conn_case.ex | pedromcorreia/Rumo-ao-ensino-superior | be0b9bf417604bdf8a349fde8a8a1c0aaf4c4cdb | [
"MIT"
] | null | null | null | RAEM/raem/test/support/conn_case.ex | pedromcorreia/Rumo-ao-ensino-superior | be0b9bf417604bdf8a349fde8a8a1c0aaf4c4cdb | [
"MIT"
] | 2 | 2018-02-24T19:56:21.000Z | 2018-02-26T00:16:41.000Z | defmodule RaemWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common datastructures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
import RaemWeb.Router.Helpers
# The default endpoint for testing
@endpoint RaemWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Raem.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Raem.Repo, {:shared, self()})
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 26.230769 | 66 | 0.715543 |
93f145417d9d40c29474b33d268890212a62d565 | 328 | exs | Elixir | test/controllers/page_controller_test.exs | kupferwerk/ci_visuals | 95b6d56b074d6dbc0aad33764fb1dae38f1e3822 | [
"MIT"
] | null | null | null | test/controllers/page_controller_test.exs | kupferwerk/ci_visuals | 95b6d56b074d6dbc0aad33764fb1dae38f1e3822 | [
"MIT"
] | null | null | null | test/controllers/page_controller_test.exs | kupferwerk/ci_visuals | 95b6d56b074d6dbc0aad33764fb1dae38f1e3822 | [
"MIT"
] | null | null | null | defmodule CiVisuals.PageControllerTest do
use CiVisuals.ConnCase
test "Socket" do
color = Colors.HSL.white
rgb_color = Conversions.hsl_to_rgb color
s = Socket.TCP.connect! "localhost", 8888
Enum.each 1..60, fn x_ ->
Socket.Stream.send! s, <<rgb_color.r, rgb_color.g, rgb_color.b>>
end
end
end
| 21.866667 | 70 | 0.685976 |
93f1458fd672fb09ab23cbdef9222d87e288fafb | 107 | ex | Elixir | lib/dark_dev.ex | dark-elixir/dark_dev | 496681639161cff1f6e17c87bf924ab38846ea05 | [
"Apache-2.0"
] | null | null | null | lib/dark_dev.ex | dark-elixir/dark_dev | 496681639161cff1f6e17c87bf924ab38846ea05 | [
"Apache-2.0"
] | null | null | null | lib/dark_dev.ex | dark-elixir/dark_dev | 496681639161cff1f6e17c87bf924ab38846ea05 | [
"Apache-2.0"
] | null | null | null | defmodule DarkDev do
@moduledoc """
Documentation for `DarkDev`.
"""
@moduledoc since: "1.0.0"
end
| 15.285714 | 30 | 0.654206 |
93f15607bebbb9e38f134012637d6802e0d9e02e | 337 | ex | Elixir | lib/event/supervisor.ex | ammbot/event-nanny | 101608149b78a1b70728a3c2ec3996359b0ac760 | [
"Apache-2.0"
] | 4 | 2016-04-23T19:37:25.000Z | 2016-09-04T03:10:57.000Z | lib/event/supervisor.ex | ammbot/event-nanny | 101608149b78a1b70728a3c2ec3996359b0ac760 | [
"Apache-2.0"
] | null | null | null | lib/event/supervisor.ex | ammbot/event-nanny | 101608149b78a1b70728a3c2ec3996359b0ac760 | [
"Apache-2.0"
] | null | null | null | defmodule EventNanny.Event.Supervisor do
use Supervisor
def start_link(args) do
Supervisor.start_link __MODULE__, args, [name: __MODULE__]
end
def init(args) do
children = [worker(EventNanny.Event.Server, [args], restart: :transient)]
opts = [strategy: :simple_one_for_one]
supervise(children, opts)
end
end
| 22.466667 | 77 | 0.724036 |
93f17ad888590c78b6bec0a9b2025ef5143aaf4f | 1,322 | exs | Elixir | mix.exs | kianmeng/shippex | 0e4e562bdf1070c3fd39c9fa21bddaf51c9ea718 | [
"MIT"
] | null | null | null | mix.exs | kianmeng/shippex | 0e4e562bdf1070c3fd39c9fa21bddaf51c9ea718 | [
"MIT"
] | null | null | null | mix.exs | kianmeng/shippex | 0e4e562bdf1070c3fd39c9fa21bddaf51c9ea718 | [
"MIT"
] | null | null | null | defmodule Shippex.Mixfile do
use Mix.Project
def project do
[
app: :shippex,
version: "0.9.0",
elixir: "~> 1.9",
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
description: description(),
package: package(),
deps: deps()
]
end
def application do
[extra_applications: [:eex, :logger]]
end
defp description do
"""
Shippex is an abstraction of commonly used features in shipping with various
carriers. It provides a (hopefully) pleasant API to work with carrier-
provided web interfaces for fetching rates and printing shipping labels.
"""
end
defp package do
[
name: :shippex,
files: [
"lib/shippex.ex",
"lib/shippex",
"priv/iso-3166-2.json",
"mix.exs",
"README.md",
"LICENSE"
],
maintainers: ["Nick Kezhaya"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/whitepaperclip/shippex"}
]
end
defp deps do
[
{:ex_doc, ">= 0.0.0", only: :doc},
{:httpoison, ">= 0.0.0"},
{:sweet_xml, ">= 0.0.0"},
{:html_entities, ">= 0.0.0"},
{:jason, "~> 1.2", optional: true},
{:decimal, "~> 1.3"},
{:csv, "~> 2.4", optional: true, only: [:dev]}
]
end
end
| 22.793103 | 80 | 0.54236 |
93f196c0500cc05db8f8dc153f0dd0e5c37f7c9f | 1,960 | ex | Elixir | apps/tai/lib/tai/orders/services/execute_order_callback.ex | ccamateur/tai | 41c4b3e09dafc77987fa3f6b300c15461d981e16 | [
"MIT"
] | 276 | 2018-01-16T06:36:06.000Z | 2021-03-20T21:48:01.000Z | apps/tai/lib/tai/orders/services/execute_order_callback.ex | ccamateur/tai | 41c4b3e09dafc77987fa3f6b300c15461d981e16 | [
"MIT"
] | 73 | 2018-10-05T18:45:06.000Z | 2021-02-08T05:46:33.000Z | apps/tai/lib/tai/orders/services/execute_order_callback.ex | ccamateur/tai | 41c4b3e09dafc77987fa3f6b300c15461d981e16 | [
"MIT"
] | 43 | 2018-06-09T09:54:51.000Z | 2021-03-07T07:35:17.000Z | defmodule Tai.Orders.Services.ExecuteOrderCallback do
require Logger
alias Tai.Orders.{
Order,
OrderCallbackStore,
Transitions
}
@type order :: Order.t()
@type transition :: Transitions.transition()
@spec call(order | nil, order, transition | nil) :: :ok | {:error, :noproc}
def call(previous, current, transition) do
callback_result =
current
|> find_callback
|> execute_callback(previous, current, transition)
|> case do
{:error, :noproc} = error -> error
_ -> :ok
end
broadcast_order_updated(current.client_id, transition)
callback_result
end
defp find_callback(order) do
with {:ok, order_callback} <- OrderCallbackStore.find(order.client_id) do
order_callback.callback
else
{:error, :not_found} ->
fn _, current, _ ->
Logger.warn("order callback not found for client_id: #{current.client_id}")
end
end
end
defp execute_callback(callback, previous, current, transition) when is_function(callback) do
callback.(previous, current, transition)
end
defp execute_callback({dest, data}, previous, current, transition)
when is_atom(dest) or is_pid(dest) do
msg = {:order_updated, previous, current, transition, data}
send_msg(dest, msg)
end
defp execute_callback(dest, previous, current, transition) when is_atom(dest) or is_pid(dest) do
msg = {:order_updated, previous, current, transition}
send_msg(dest, msg)
end
@topic_prefix "order_updated"
defp broadcast_order_updated(client_id, transition) do
topics = ["#{@topic_prefix}:*", "#{@topic_prefix}:#{client_id}"]
msg = {:order_updated, client_id, transition}
topics
|> Enum.each(fn topic ->
Phoenix.PubSub.broadcast(Tai.PubSub, topic, msg)
end)
end
defp send_msg(dest, msg) do
try do
send(dest, msg)
rescue
_e in ArgumentError -> {:error, :noproc}
end
end
end
| 26.486486 | 98 | 0.666327 |
93f196daaf57ec7fdd15d11e6e7725351fcd307f | 92 | ex | Elixir | lib/hey_cake/mailer.ex | rozerosie/heycake | d080531705c0402fa53696d02307d6c08d25a60a | [
"MIT"
] | 1 | 2020-06-03T07:45:17.000Z | 2020-06-03T07:45:17.000Z | lib/hey_cake/mailer.ex | rozerosie/heycake | d080531705c0402fa53696d02307d6c08d25a60a | [
"MIT"
] | 6 | 2021-06-17T23:13:19.000Z | 2021-08-31T21:15:21.000Z | lib/hey_cake/mailer.ex | rozerosie/heycake | d080531705c0402fa53696d02307d6c08d25a60a | [
"MIT"
] | 2 | 2020-06-03T22:08:06.000Z | 2022-03-11T22:13:36.000Z | defmodule HeyCake.Mailer do
@moduledoc false
use Bamboo.Mailer, otp_app: :hey_cake
end
| 15.333333 | 39 | 0.771739 |
93f1a868a9b2fc2f2fc60de1984e30d0c7da9662 | 137 | exs | Elixir | test/adify/tool/installation_strategy/workflow/op_test.exs | ericsullivan/adify | 25b842498ccfbf612e42d8b46530aad1ffb1af5a | [
"MIT"
] | 5 | 2019-07-14T22:24:32.000Z | 2020-11-25T20:36:11.000Z | test/adify/tool/installation_strategy/workflow/op_test.exs | ericsullivan/adify | 25b842498ccfbf612e42d8b46530aad1ffb1af5a | [
"MIT"
] | 43 | 2018-01-18T15:16:30.000Z | 2021-01-23T22:12:17.000Z | test/adify/tool/installation_strategy/workflow/op_test.exs | ericsullivan/adify | 25b842498ccfbf612e42d8b46530aad1ffb1af5a | [
"MIT"
] | 2 | 2019-05-28T17:50:27.000Z | 2020-03-23T21:00:02.000Z | defmodule Adify.Tool.InstallationStrategy.Workflow.OpTest do
use ExUnit.Case
doctest Adify.Tool.InstallationStrategy.Workflow.Op
end
| 27.4 | 60 | 0.846715 |
93f1d0a29ebc00240e4346596f74f4854d224c45 | 2,404 | ex | Elixir | lib/vapor.ex | cheerfulstoic/vapor | 326a9e8fe0bba706b9726e136b4a51726f9d99f7 | [
"Apache-2.0"
] | null | null | null | lib/vapor.ex | cheerfulstoic/vapor | 326a9e8fe0bba706b9726e136b4a51726f9d99f7 | [
"Apache-2.0"
] | null | null | null | lib/vapor.ex | cheerfulstoic/vapor | 326a9e8fe0bba706b9726e136b4a51726f9d99f7 | [
"Apache-2.0"
] | null | null | null | defmodule Vapor do
@moduledoc """
Vapor provides mechanisms for handling runtime configuration in your system.
"""
alias Vapor.{
Store,
Watch
}
@type key :: String.t() | list()
@type type :: :string | :int | :float | :bool
@type value :: String.t() | integer | float | boolean
@doc """
Fetches a value from the config under the key provided.
## Example
VaporExample.Config.get(:key)
"""
@callback get(key :: key) :: term() | nil
@doc """
Set the value under the key in the store.
## Example
VaporExample.Config.set(:key, "value")
"""
@callback set(key :: key, value :: value) :: {:ok, value}
@doc """
Optional callback. Called when the configuration server starts. Passes the map
of the reified values.
"""
@callback init([{key, value}]) :: :ok
defmacro __using__(_opts) do
quote do
@behaviour Vapor
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]},
type: :supervisor,
restart: :permanent,
shutdown: 500
}
end
def set(key, value) do
GenServer.call(__MODULE__, {:set, key, value})
end
def get(key) do
case :ets.lookup(__MODULE__, key) do
[] ->
nil
[{^key, value}] ->
value
end
end
def init(_values) do
:ok
end
def handle_change(_values) do
:ok
end
defoverridable [init: 1, handle_change: 1]
end
end
@doc """
Starts a configuration store and any watches.
"""
def start_link(module, config, opts) do
if opts[:name] do
name = Keyword.fetch!(opts, :name)
Supervisor.start_link(__MODULE__, {module, config}, name: :"#{name}_sup")
else
raise Vapor.ConfigurationError, "must supply a `:name` argument"
end
end
@doc """
Stops the configuration store and any watches.
"""
def stop(name) do
Supervisor.stop(:"#{name}_sup")
end
@doc false
def init({module, config}) do
table_opts = [
:set,
:public,
:named_table,
read_concurrency: true,
]
^module = :ets.new(module, table_opts)
children = [
{Watch.Supervisor, [name: Watch.Supervisor.sup_name(module)]},
{Store, {module, config}}
]
Supervisor.init(children, strategy: :one_for_one)
end
end
| 20.904348 | 80 | 0.578203 |
93f1fb4c8fe5539ebbd346e3c0ee4acbb7a594cc | 956 | ex | Elixir | apps/api/web/controllers/pids_controller.ex | elicopter/core | 7731dc7558dea39bd1c473ab9e512c9db9e1b2c9 | [
"MIT"
] | 39 | 2016-11-01T07:21:51.000Z | 2021-02-05T20:19:02.000Z | apps/api/web/controllers/pids_controller.ex | elicopter/core | 7731dc7558dea39bd1c473ab9e512c9db9e1b2c9 | [
"MIT"
] | null | null | null | apps/api/web/controllers/pids_controller.ex | elicopter/core | 7731dc7558dea39bd1c473ab9e512c9db9e1b2c9 | [
"MIT"
] | null | null | null | defmodule Api.PIDsController do
use Api.Web, :controller
@pids [Brain.PitchRatePIDController, Brain.RollRatePIDController, Brain.YawRatePIDController, Brain.PitchAnglePIDController, Brain.RollAnglePIDController]
def index(conn, _params) do
pids = @pids |> Enum.reduce([], fn (pid_controller, acc) ->
{:ok, snapshot} = Brain.PIDController.snapshot(pid_controller);
[snapshot | acc]
end)
render conn, "index.json", %{pids: pids}
end
def show(conn, %{"name" => pid_controller_name}) do
{:ok, snapshot} = Brain.PIDController.snapshot(Module.concat("Brain", pid_controller_name));
render conn, "show.json", %{pid: snapshot}
end
def create(conn, %{"name" => pid_controller_name, "parameter" => parameter, "value" => value} = params) do
:ok = GenServer.cast(Module.concat("Brain", pid_controller_name), {:tune, Map.put(%{}, parameter |> String.to_atom, value)});
render conn, "create.json", %{}
end
end
| 39.833333 | 156 | 0.692469 |
93f25df949a53c789fadefcdf44e72683d3558de | 1,661 | exs | Elixir | mix.exs | exrny/paginator | 40a2cb7f3b6c2a9ba818e78d4d1c98c8efadc616 | [
"MIT"
] | null | null | null | mix.exs | exrny/paginator | 40a2cb7f3b6c2a9ba818e78d4d1c98c8efadc616 | [
"MIT"
] | null | null | null | mix.exs | exrny/paginator | 40a2cb7f3b6c2a9ba818e78d4d1c98c8efadc616 | [
"MIT"
] | null | null | null | defmodule Paginator.Mixfile do
use Mix.Project
@version "1.0.4"
def project do
[
app: :paginator,
version: @version,
elixir: "~> 1.5",
elixirc_options: [warnings_as_errors: System.get_env("CI") == "true"],
elixirc_paths: elixirc_paths(Mix.env()),
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
consolidate_protocols: Mix.env() != :test,
deps: deps(),
# Hex
description: description(),
package: package(),
# Docs
name: "Paginator",
source_url: "https://github.com/duffelhq/paginator",
homepage_url: "https://github.com/duffelhq/paginator",
docs: [
source_ref: "v#{@version}",
main: "Paginator",
canonical: "http://hexdocs.pm/paginator",
source_url: "https://github.com/duffelhq/paginator"
]
]
end
def application do
[extra_applications: [:logger]]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp deps do
[
{:calendar, "~> 1.0.0", only: :test},
{:ecto, "~> 3.0"},
{:ecto_sql, "~> 3.0"},
{:ex_doc, "~> 0.18", only: :dev, runtime: false},
{:ex_machina, "~> 2.1", only: :test},
{:inch_ex, "~> 2.0", only: [:dev, :test]},
{:postgrex, "~> 0.13", optional: true},
{:plug_crypto, "~> 1.2.0"}
]
end
defp description do
"""
Cursor based pagination for Elixir Ecto.
"""
end
defp package do
[
maintainers: ["Steve Domin"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/duffelhq/paginator"}
]
end
end
| 24.072464 | 76 | 0.554485 |
93f2657d9c3e20d40b0667335baa68d7f38bb184 | 13,731 | ex | Elixir | lib/table_rex/renderer/text.ex | namjae/table_rex | 70b2cb8a4d630648055f0ab1b8a9668a42a34087 | [
"MIT"
] | null | null | null | lib/table_rex/renderer/text.ex | namjae/table_rex | 70b2cb8a4d630648055f0ab1b8a9668a42a34087 | [
"MIT"
] | null | null | null | lib/table_rex/renderer/text.ex | namjae/table_rex | 70b2cb8a4d630648055f0ab1b8a9668a42a34087 | [
"MIT"
] | null | null | null | defmodule TableRex.Renderer.Text do
@moduledoc """
Renderer module which handles outputting ASCII-style tables for display.
"""
alias TableRex.Cell
alias TableRex.Table
alias TableRex.Renderer.Text.Meta
@behaviour TableRex.Renderer
# horizontal_styles: [:all, :header, :frame:, :off]
# vertical_styles: [:all, :frame, :off]
# Which horizontal/vertical styles render a specific separator.
@render_horizontal_frame_styles [:all, :frame, :header]
@render_vertical_frame_styles [:all, :frame]
@render_column_separators_styles [:all]
@render_row_separators_styles [:all]
@doc """
Provides a level of sane defaults for the Text rendering module.
"""
def default_options do
%{
horizontal_style: :header,
vertical_style: :all,
horizontal_symbol: "-",
vertical_symbol: "|",
intersection_symbol: "+",
top_frame_symbol: "-",
title_separator_symbol: "-",
header_separator_symbol: "-",
bottom_frame_symbol: "-"
}
end
@doc """
Implementation of the TableRex.Renderer behaviour.
Available styling options.
`horizontal_styles` controls horizontal separators and can be one of:
* `:all`: display separators between and around every row.
* `:header`: display outer and header horizontal separators only.
* `:frame`: display outer horizontal separators only.
* `:off`: display no horizontal separators.
`vertical_styles` controls vertical separators and can be one of:
* `:all`: display between and around every column.
* `:frame`: display outer vertical separators only.
* `:off`: display no vertical separators.
"""
def render(table = %Table{}, opts) do
{col_widths, row_heights} = max_dimensions(table)
# Calculations that would otherwise be carried out multiple times are done once and their
# results are stored in the %Meta{} struct which is then passed through the pipeline.
render_horizontal_frame? = opts[:horizontal_style] in @render_horizontal_frame_styles
render_vertical_frame? = opts[:vertical_style] in @render_vertical_frame_styles
render_column_separators? = opts[:vertical_style] in @render_column_separators_styles
render_row_separators? = opts[:horizontal_style] in @render_row_separators_styles
table_width = table_width(col_widths, vertical_frame?: render_vertical_frame?)
intersections = intersections(table_width, col_widths, vertical_style: opts[:vertical_style])
meta = %Meta{
col_widths: col_widths,
row_heights: row_heights,
table_width: table_width,
intersections: intersections,
render_horizontal_frame?: render_horizontal_frame?,
render_vertical_frame?: render_vertical_frame?,
render_column_separators?: render_column_separators?,
render_row_separators?: render_row_separators?
}
rendered =
{table, meta, opts, []}
|> render_top_frame
|> render_title
|> render_title_separator
|> render_header
|> render_header_separator
|> render_rows
|> render_bottom_frame
|> render_to_string
{:ok, rendered}
end
defp render_top_frame({table, %Meta{render_horizontal_frame?: false} = meta, opts, rendered}) do
{table, meta, opts, rendered}
end
defp render_top_frame({%Table{title: title} = table, meta, opts, rendered})
when is_binary(title) do
intersections = if meta.render_vertical_frame?, do: [0, meta.table_width - 1], else: []
line =
render_line(
meta.table_width,
intersections,
opts[:top_frame_symbol],
opts[:intersection_symbol]
)
{table, meta, opts, [line | rendered]}
end
defp render_top_frame({table, meta, opts, rendered}) do
line =
render_line(
meta.table_width,
meta.intersections,
opts[:top_frame_symbol],
opts[:intersection_symbol]
)
{table, meta, opts, [line | rendered]}
end
defp render_title({%Table{title: nil} = table, meta, opts, rendered}) do
{table, meta, opts, rendered}
end
defp render_title({%Table{title: title} = table, meta, opts, rendered}) do
inner_width = Meta.inner_width(meta)
line = do_render_cell(title, inner_width)
line =
if meta.render_vertical_frame? do
line |> frame_with(opts[:vertical_symbol])
else
line
end
{table, meta, opts, [line | rendered]}
end
defp render_title_separator({%Table{title: nil} = table, meta, opts, rendered}) do
{table, meta, opts, rendered}
end
defp render_title_separator(
{table, meta, %{horizontal_style: horizontal_style} = opts, rendered}
)
when horizontal_style in [:all, :header] do
line =
render_line(
meta.table_width,
meta.intersections,
opts[:title_separator_symbol],
opts[:intersection_symbol]
)
{table, meta, opts, [line | rendered]}
end
defp render_title_separator({table, %Meta{render_vertical_frame?: true} = meta, opts, rendered}) do
line = render_line(meta.table_width, [0, meta.table_width - 1], " ", opts[:vertical_symbol])
{table, meta, opts, [line | rendered]}
end
defp render_title_separator(
{table, %Meta{render_vertical_frame?: false} = meta, opts, rendered}
) do
{table, meta, opts, ["" | rendered]}
end
defp render_header({%Table{header_row: []} = table, meta, opts, rendered}) do
{table, meta, opts, rendered}
end
defp render_header({%Table{header_row: header_row} = table, meta, opts, rendered}) do
separator = if meta.render_column_separators?, do: opts[:vertical_symbol], else: " "
line = render_cell_row(table, meta, header_row, separator)
line =
if meta.render_vertical_frame? do
line |> frame_with(opts[:vertical_symbol])
else
line
end
{table, meta, opts, [line | rendered]}
end
defp render_header_separator({%Table{header_row: []} = table, meta, opts, rendered}) do
{table, meta, opts, rendered}
end
defp render_header_separator(
{table, meta, %{horizontal_style: horizontal_style} = opts, rendered}
)
when horizontal_style in [:all, :header] do
line =
render_line(
meta.table_width,
meta.intersections,
opts[:header_separator_symbol],
opts[:intersection_symbol]
)
{table, meta, opts, [line | rendered]}
end
defp render_header_separator(
{table, %Meta{render_vertical_frame?: true} = meta, opts, rendered}
) do
line = render_line(meta.table_width, [0, meta.table_width - 1], " ", opts[:vertical_symbol])
{table, meta, opts, [line | rendered]}
end
defp render_header_separator(
{table, %Meta{render_vertical_frame?: false} = meta, opts, rendered}
) do
{table, meta, opts, ["" | rendered]}
end
defp render_rows({%Table{rows: rows} = table, meta, opts, rendered}) do
separator = if meta.render_column_separators?, do: opts[:vertical_symbol], else: " "
lines = Enum.map(rows, &render_cell_row(table, meta, &1, separator))
lines =
if meta.render_vertical_frame? do
Enum.map(lines, &frame_with(&1, opts[:vertical_symbol]))
else
lines
end
lines =
if meta.render_row_separators? do
row_separator =
render_line(
meta.table_width,
meta.intersections,
opts[:horizontal_symbol],
opts[:intersection_symbol]
)
Enum.intersperse(lines, row_separator)
else
lines
end
rendered = lines ++ rendered
{table, meta, opts, rendered}
end
defp render_bottom_frame({table, %Meta{render_horizontal_frame?: false} = meta, opts, rendered}) do
{table, meta, opts, rendered}
end
defp render_bottom_frame({table, meta, opts, rendered}) do
line =
render_line(
meta.table_width,
meta.intersections,
opts[:bottom_frame_symbol],
opts[:intersection_symbol]
)
{table, meta, opts, [line | rendered]}
end
defp render_line(table_width, intersections, separator_symbol, intersection_symbol) do
for n <- 0..(table_width - 1) do
if n in intersections, do: intersection_symbol, else: separator_symbol
end
|> Enum.join()
end
defp render_cell_row(%Table{} = table, %Meta{} = meta, row, separator) do
row
|> Enum.with_index()
|> Enum.map(&render_cell(table, meta, &1))
|> Enum.intersperse(separator)
|> Enum.join()
end
defp render_cell(%Table{} = table, %Meta{} = meta, {%Cell{} = cell, col_index}) do
col_width = Meta.col_width(meta, col_index)
col_padding = Table.get_column_meta(table, col_index, :padding)
cell_align = Map.get(cell, :align) || Table.get_column_meta(table, col_index, :align)
cell_color = Map.get(cell, :color) || Table.get_column_meta(table, col_index, :color)
do_render_cell(cell.rendered_value, col_width, col_padding, align: cell_align)
|> format_with_color(cell.rendered_value, cell_color)
end
defp do_render_cell(value, inner_width) do
do_render_cell(value, inner_width, 0, align: :center)
end
defp do_render_cell(value, inner_width, _padding, align: :center) do
value_len = String.length(strip_ansi_color_codes(value))
post_value = ((inner_width - value_len) / 2) |> round
pre_value = inner_width - (post_value + value_len)
String.duplicate(" ", pre_value) <> value <> String.duplicate(" ", post_value)
end
defp do_render_cell(value, inner_width, padding, align: align) do
value_len = String.length(strip_ansi_color_codes(value))
alt_side_padding = inner_width - value_len - padding
{pre_value, post_value} =
case align do
:left ->
{padding, alt_side_padding}
:right ->
{alt_side_padding, padding}
end
String.duplicate(" ", pre_value) <> value <> String.duplicate(" ", post_value)
end
defp intersections(_table_width, _col_widths, vertical_style: :off), do: []
defp intersections(table_width, _col_widths, vertical_style: :frame) do
[0, table_width - 1]
|> Enum.into(MapSet.new())
end
defp intersections(table_width, col_widths, vertical_style: :all) do
col_widths = ordered_col_widths(col_widths)
inner_intersections =
Enum.reduce(col_widths, [0], fn x, [acc_h | _] = acc ->
[acc_h + x + 1 | acc]
end)
([0, table_width - 1] ++ inner_intersections)
|> Enum.into(MapSet.new())
end
defp max_dimensions(%Table{} = table) do
{col_widths, row_heights} =
[table.header_row | table.rows]
|> Enum.with_index()
|> Enum.reduce({%{}, %{}}, &reduce_row_maximums(table, &1, &2))
num_columns = Map.size(col_widths)
# Infer padding on left and right of title
title_padding =
[0, num_columns - 1]
|> Enum.map(&Table.get_column_meta(table, &1, :padding))
|> Enum.sum()
# Compare table body width with title width
col_separators_widths = num_columns - 1
body_width = (col_widths |> Map.values() |> Enum.sum()) + col_separators_widths
title_width = if(is_nil(table.title), do: 0, else: String.length(table.title)) + title_padding
# Add extra padding equally to all columns if required to match body and title width.
revised_col_widths =
if body_width >= title_width do
col_widths
else
extra_padding = ((title_width - body_width) / num_columns) |> Float.ceil() |> round
Enum.into(col_widths, %{}, fn {k, v} -> {k, v + extra_padding} end)
end
{revised_col_widths, row_heights}
end
defp reduce_row_maximums(%Table{} = table, {row, row_index}, {col_widths, row_heights}) do
row
|> Enum.with_index()
|> Enum.reduce({col_widths, row_heights}, &reduce_cell_maximums(table, &1, &2, row_index))
end
defp reduce_cell_maximums(
%Table{} = table,
{cell, col_index},
{col_widths, row_heights},
row_index
) do
padding = Table.get_column_meta(table, col_index, :padding)
{width, height} = content_dimensions(cell.rendered_value, padding)
col_widths = Map.update(col_widths, col_index, width, &Enum.max([&1, width]))
row_heights = Map.update(row_heights, row_index, height, &Enum.max([&1, height]))
{col_widths, row_heights}
end
defp content_dimensions(value, padding) when is_binary(value) and is_number(padding) do
lines =
value
|> strip_ansi_color_codes()
|> String.split("\n")
height = Enum.count(lines)
width = lines |> Enum.map(&String.length/1) |> Enum.max()
{width + padding * 2, height}
end
defp table_width(%{} = col_widths, vertical_frame?: vertical_frame?) do
width =
col_widths
|> Map.values()
|> Enum.intersperse(1)
|> Enum.sum()
if vertical_frame?, do: width + 2, else: width
end
defp ordered_col_widths(%{} = col_widths) do
col_widths
|> Enum.into([])
|> Enum.sort()
|> Enum.map(&elem(&1, 1))
end
defp frame_with(string, frame) do
frame <> string <> frame
end
defp render_to_string({_, _, _, rendered_lines}) when is_list(rendered_lines) do
rendered_lines
|> Enum.map(&String.trim_trailing/1)
|> Enum.reverse()
|> Enum.join("\n")
|> Kernel.<>("\n")
end
defp format_with_color(text, _, nil), do: text
defp format_with_color(text, value, color) when is_function(color) do
[color.(text, value) | IO.ANSI.reset()]
|> IO.ANSI.format_fragment(true)
end
defp format_with_color(text, _, color) do
[[color | text] | IO.ANSI.reset()]
|> IO.ANSI.format_fragment(true)
end
defp strip_ansi_color_codes(text) do
Regex.replace(~r|\e\[\d+m|u, text, "")
end
end
| 30.925676 | 101 | 0.660695 |
93f26b37a451fd928e5d5e79475c58cadde2655a | 2,028 | ex | Elixir | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/list_impression_metrics_response.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/list_impression_metrics_response.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/list_impression_metrics_response.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.AdExchangeBuyer.V2beta1.Model.ListImpressionMetricsResponse do
@moduledoc """
Response message for listing the metrics that are measured in number of impressions.
## Attributes
- impressionMetricsRows (List[ImpressionMetricsRow]): List of rows, each containing a set of impression metrics. Defaults to: `null`.
- nextPageToken (String): A token to retrieve the next page of results. Pass this value in the ListImpressionMetricsRequest.pageToken field in the subsequent call to the accounts.filterSets.impressionMetrics.list method to retrieve the next page of results. Defaults to: `null`.
"""
defstruct [
:"impressionMetricsRows",
:"nextPageToken"
]
end
defimpl Poison.Decoder, for: GoogleApi.AdExchangeBuyer.V2beta1.Model.ListImpressionMetricsResponse do
import GoogleApi.AdExchangeBuyer.V2beta1.Deserializer
def decode(value, options) do
value
|> deserialize(:"impressionMetricsRows", :list, GoogleApi.AdExchangeBuyer.V2beta1.Model.ImpressionMetricsRow, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AdExchangeBuyer.V2beta1.Model.ListImpressionMetricsResponse do
def encode(value, options) do
GoogleApi.AdExchangeBuyer.V2beta1.Deserializer.serialize_non_nil(value, options)
end
end
| 40.56 | 280 | 0.783531 |
93f27052b05cae129531830fc2e86d0fedce1991 | 782 | exs | Elixir | test/examples/skip_example_test.exs | MeneDev/espec | ec4b3d579c5192999e930224a8a2650bb1fdf0bc | [
"Apache-2.0"
] | 807 | 2015-03-25T14:00:19.000Z | 2022-03-24T08:08:15.000Z | test/examples/skip_example_test.exs | MeneDev/espec | ec4b3d579c5192999e930224a8a2650bb1fdf0bc | [
"Apache-2.0"
] | 254 | 2015-03-27T10:12:25.000Z | 2021-07-12T01:40:15.000Z | test/examples/skip_example_test.exs | MeneDev/espec | ec4b3d579c5192999e930224a8a2650bb1fdf0bc | [
"Apache-2.0"
] | 85 | 2015-04-02T10:25:19.000Z | 2021-01-30T21:30:43.000Z | defmodule SkipExampleTest do
use ExUnit.Case, async: true
defmodule SomeSpec do
use ESpec
it do: "Example"
it "skipped", [skip: true], do: "skipped"
xit(do: "skipped")
xit("skipped", do: "skipped")
xit("skipped", [some: :option], do: "skipped")
xexample(do: "skipped")
xexample("skipped", do: "skipped")
xexample("skipped", [some: :option], do: "skipped")
xspecify(do: "skipped")
xspecify("skipped", do: "skipped")
xspecify("skipped", [some: :option], do: "skipped")
end
test "runs only 1" do
results = ESpec.SuiteRunner.run_examples(SomeSpec.examples(), true)
assert(length(Enum.filter(results, &(&1.status == :success))) == 1)
assert(length(Enum.filter(results, &(&1.status == :pending))) == 10)
end
end
| 26.965517 | 72 | 0.626598 |
93f275210ed35fb7b20ec549f33fcacb3ccc3904 | 12,047 | exs | Elixir | apps/reaper/test/integration/reaper/reaper_full_test.exs | PillarTechnology/smartcitiesdata | 9420a26820e38267513cd1bfa82c7f5583222bb1 | [
"Apache-2.0"
] | null | null | null | apps/reaper/test/integration/reaper/reaper_full_test.exs | PillarTechnology/smartcitiesdata | 9420a26820e38267513cd1bfa82c7f5583222bb1 | [
"Apache-2.0"
] | null | null | null | apps/reaper/test/integration/reaper/reaper_full_test.exs | PillarTechnology/smartcitiesdata | 9420a26820e38267513cd1bfa82c7f5583222bb1 | [
"Apache-2.0"
] | null | null | null | defmodule Reaper.FullTest do
use ExUnit.Case
use Divo
use Tesla
use Placebo
require Logger
alias SmartCity.TestDataGenerator, as: TDG
import SmartCity.TestHelper
import SmartCity.Event, only: [dataset_update: 0]
@endpoints Application.get_env(:reaper, :elsa_brokers)
@brod_endpoints Enum.map(@endpoints, fn {host, port} -> {to_charlist(host), port} end)
@output_topic_prefix Application.get_env(:reaper, :output_topic_prefix)
@instance Reaper.Application.instance()
@redix Reaper.Application.redis_client()
@pre_existing_dataset_id "00000-0000"
@partial_load_dataset_id "11111-1112"
@json_file_name "vehicle_locations.json"
@nested_data_file_name "nested_data.json"
@gtfs_file_name "gtfs-realtime.pb"
@csv_file_name "random_stuff.csv"
@xml_file_name "xml_sample.xml"
setup_all do
Temp.track!()
Application.put_env(:reaper, :download_dir, Temp.mkdir!())
# NOTE: using Bypass in setup all b/c we have no expectations.
# If we add any, we'll need to move this, per https://github.com/pspdfkit-labs/bypass#example
bypass = Bypass.open()
bypass
|> TestUtils.bypass_file(@gtfs_file_name)
|> TestUtils.bypass_file(@json_file_name)
|> TestUtils.bypass_file(@nested_data_file_name)
|> TestUtils.bypass_file(@csv_file_name)
|> TestUtils.bypass_file(@xml_file_name)
eventually(fn ->
{type, result} = get("http://localhost:#{bypass.port}/#{@csv_file_name}")
type == :ok and result.status == 200
end)
{:ok, bypass: bypass}
end
setup do
Redix.command(@redix, ["FLUSHALL"])
:ok
end
describe "pre-existing dataset" do
setup %{bypass: bypass} do
pre_existing_dataset =
TDG.create_dataset(%{
id: @pre_existing_dataset_id,
technical: %{
cadence: "once",
sourceUrl: "http://localhost:#{bypass.port}/#{@json_file_name}",
sourceFormat: "json",
schema: [
%{name: "latitude"},
%{name: "vehicle_id"},
%{name: "update_time"},
%{name: "longitude"}
]
}
})
Brook.Event.send(@instance, dataset_update(), :reaper, pre_existing_dataset)
:ok
end
test "configures and ingests a json-source that was added before reaper started" do
expected =
TestUtils.create_data(%{
dataset_id: @pre_existing_dataset_id,
payload: %{
"latitude" => 39.9613,
"vehicle_id" => 41_015,
"update_time" => "2019-02-14T18:53:23.498889+00:00",
"longitude" => -83.0074
}
})
topic = "#{@output_topic_prefix}-#{@pre_existing_dataset_id}"
eventually(fn ->
results = TestUtils.get_data_messages_from_kafka(topic, @endpoints)
last_one = List.last(results)
assert expected == last_one
end)
end
end
describe "partial-existing dataset" do
setup %{bypass: bypass} do
{:ok, pid} = Agent.start_link(fn -> %{has_raised: false, invocations: 0} end)
allow Elsa.produce(any(), any(), any()),
meck_options: [:passthrough],
exec: fn topic, messages, options ->
case Agent.get(pid, fn s -> {s.has_raised, s.invocations} end) do
{false, count} when count >= 2 ->
Agent.update(pid, fn _ -> %{has_raised: true, invocations: count + 1} end)
raise "Bring this thing down!"
{_, count} ->
Agent.update(pid, fn s -> %{s | invocations: count + 1} end)
:meck.passthrough([topic, messages, options])
end
end
Bypass.stub(bypass, "GET", "/partial.csv", fn conn ->
data =
1..10_000
|> Enum.map(fn _ -> random_string(10) end)
|> Enum.join("\n")
Plug.Conn.send_resp(conn, 200, data)
end)
pre_existing_dataset =
TDG.create_dataset(%{
id: @partial_load_dataset_id,
technical: %{
cadence: "once",
sourceUrl: "http://localhost:#{bypass.port}/partial.csv",
sourceFormat: "csv",
sourceType: "ingest",
schema: [%{name: "name", type: "string"}]
}
})
Brook.Event.send(@instance, dataset_update(), :reaper, pre_existing_dataset)
:ok
end
@tag capture_log: true
test "configures and ingests a csv datasource that was partially loaded before reaper restarted", %{bypass: _bypass} do
topic = "#{@output_topic_prefix}-#{@partial_load_dataset_id}"
eventually(
fn ->
result = :brod.resolve_offset(@brod_endpoints, topic, 0)
assert {:ok, 10_000} == result
end,
2_000,
50
)
end
end
describe "No pre-existing datasets" do
test "configures and ingests a gtfs source", %{bypass: bypass} do
dataset_id = "12345-6789"
topic = "#{@output_topic_prefix}-#{dataset_id}"
gtfs_dataset =
TDG.create_dataset(%{
id: dataset_id,
technical: %{
cadence: "once",
sourceUrl: "http://localhost:#{bypass.port}/#{@gtfs_file_name}",
sourceFormat: "gtfs"
}
})
Brook.Event.send(@instance, dataset_update(), :reaper, gtfs_dataset)
eventually(fn ->
results = TestUtils.get_data_messages_from_kafka(topic, @endpoints)
assert [%{payload: %{"id" => "1004"}} | _] = results
end)
end
test "configures and ingests a json source", %{bypass: bypass} do
dataset_id = "23456-7891"
topic = "#{@output_topic_prefix}-#{dataset_id}"
json_dataset =
TDG.create_dataset(%{
id: dataset_id,
technical: %{
cadence: "once",
sourceUrl: "http://localhost:#{bypass.port}/#{@json_file_name}",
sourceFormat: "json"
}
})
Brook.Event.send(@instance, dataset_update(), :reaper, json_dataset)
eventually(fn ->
results = TestUtils.get_data_messages_from_kafka(topic, @endpoints)
assert [%{payload: %{"vehicle_id" => 51_127}} | _] = results
end)
end
test "configures and ingests a csv source", %{bypass: bypass} do
dataset_id = "34567-8912"
topic = "#{@output_topic_prefix}-#{dataset_id}"
csv_dataset =
TDG.create_dataset(%{
id: dataset_id,
technical: %{
cadence: "once",
sourceUrl: "http://localhost:#{bypass.port}/#{@csv_file_name}",
sourceFormat: "csv",
sourceType: "ingest",
schema: [%{name: "id"}, %{name: "name"}, %{name: "pet"}]
}
})
Brook.Event.send(@instance, dataset_update(), :reaper, csv_dataset)
eventually(fn ->
results = TestUtils.get_data_messages_from_kafka(topic, @endpoints)
assert [%{payload: %{"name" => "Austin"}} | _] = results
assert false == File.exists?(dataset_id)
end)
end
test "configures and ingests a hosted dataset", %{bypass: bypass} do
dataset_id = "1-22-333-4444"
hosted_dataset =
TDG.create_dataset(%{
id: dataset_id,
technical: %{
cadence: "once",
sourceUrl: "http://localhost:#{bypass.port}/#{@csv_file_name}",
sourceFormat: "csv",
sourceType: "host"
}
})
Brook.Event.send(@instance, dataset_update(), :reaper, hosted_dataset)
eventually(fn ->
expected = File.read!("test/support/#{@csv_file_name}")
case ExAws.S3.get_object(
Application.get_env(:reaper, :hosted_file_bucket),
"#{hosted_dataset.technical.orgName}/#{hosted_dataset.technical.dataName}.csv"
)
|> ExAws.request() do
{:ok, resp} ->
assert Map.get(resp, :body) == expected
_other ->
Logger.info("File not uploaded yet")
flunk("File should have been uploaded")
end
end)
{:ok, _, messages} = Elsa.fetch(@endpoints, "event-stream", partition: 0)
assert Enum.any?(messages, fn %Elsa.Message{key: key} -> key == "file:ingest:end" end)
end
end
describe "One time Ingest" do
@tag timeout: 120_000
test "cadence of once is only processed once", %{bypass: bypass} do
dataset_id = "only-once"
topic = "#{@output_topic_prefix}-#{dataset_id}"
csv_dataset =
TDG.create_dataset(%{
id: dataset_id,
technical: %{
cadence: "once",
sourceUrl: "http://localhost:#{bypass.port}/#{@csv_file_name}",
sourceFormat: "csv",
sourceType: "ingest",
schema: [%{name: "id"}, %{name: "name"}, %{name: "pet"}]
}
})
Brook.Event.send(@instance, dataset_update(), :reaper, csv_dataset)
eventually(
fn ->
results = TestUtils.get_data_messages_from_kafka(topic, @endpoints)
assert [%{payload: %{"name" => "Austin"}} | _] = results
end,
1_000,
60
)
end
end
describe "Schema Stage" do
test "fills nested nils", %{bypass: bypass} do
dataset_id = "alzenband"
topic = "#{@output_topic_prefix}-#{dataset_id}"
json_dataset =
TDG.create_dataset(%{
id: dataset_id,
technical: %{
cadence: "once",
sourceUrl: "http://localhost:#{bypass.port}/#{@nested_data_file_name}",
sourceFormat: "json",
schema: [
%{name: "id", type: "string"},
%{
name: "grandParent",
type: "map",
subSchema: [
%{
name: "parentMap",
type: "map",
subSchema: [%{name: "fieldA", type: "string"}, %{name: "fieldB", type: "string"}]
}
]
}
]
}
})
Brook.Event.send(@instance, dataset_update(), :reaper, json_dataset)
eventually(fn ->
results = TestUtils.get_data_messages_from_kafka(topic, @endpoints)
assert 3 == length(results)
assert Enum.at(results, 0).payload == %{
"id" => nil,
"grandParent" => %{"parentMap" => %{"fieldA" => nil, "fieldB" => nil}}
}
assert Enum.at(results, 1).payload == %{
"id" => "2",
"grandParent" => %{"parentMap" => %{"fieldA" => "Bob", "fieldB" => "Purple"}}
}
assert Enum.at(results, 2).payload == %{
"id" => "3",
"grandParent" => %{"parentMap" => %{"fieldA" => "Joe", "fieldB" => nil}}
}
end)
end
end
describe "xml dataset" do
setup %{bypass: bypass} do
pre_existing_dataset =
TDG.create_dataset(%{
id: @pre_existing_dataset_id,
technical: %{
cadence: "once",
sourceUrl: "http://localhost:#{bypass.port}/#{@xml_file_name}",
sourceFormat: "xml",
schema: [
%{name: "first_name", selector: "//person/firstName/text()"}
],
topLevelSelector: "top/middle/rows/person"
}
})
Brook.Event.send(@instance, dataset_update(), :reaper, pre_existing_dataset)
:ok
end
test "is processed successfully" do
expected =
TestUtils.create_data(%{
dataset_id: @pre_existing_dataset_id,
payload: %{
"first_name" => "John"
}
})
topic = "#{@output_topic_prefix}-#{@pre_existing_dataset_id}"
eventually(fn ->
results = TestUtils.get_data_messages_from_kafka(topic, @endpoints)
last_one = List.last(results)
assert expected == last_one
end)
end
end
defp random_string(length) do
:crypto.strong_rand_bytes(length)
|> Base.url_encode64()
|> binary_part(0, length)
end
end
| 30.1175 | 123 | 0.557068 |
93f296da8fb84036298b0588b0b33ca00893a6cc | 8,774 | exs | Elixir | test/mojito_test.exs | marcdel/mojito | ce1ba49fdfeaf2098953e5d188b37ce8082ab737 | [
"MIT"
] | null | null | null | test/mojito_test.exs | marcdel/mojito | ce1ba49fdfeaf2098953e5d188b37ce8082ab737 | [
"MIT"
] | null | null | null | test/mojito_test.exs | marcdel/mojito | ce1ba49fdfeaf2098953e5d188b37ce8082ab737 | [
"MIT"
] | null | null | null | defmodule MojitoTest do
use ExSpec, async: true
doctest Mojito
doctest Mojito.Utils
alias Mojito.{Error, Headers}
context "url validation" do
it "fails on url without protocol" do
assert({:error, _} = Mojito.request(:get, "localhost/path"))
assert({:error, _} = Mojito.request(:get, "/localhost/path"))
assert({:error, _} = Mojito.request(:get, "//localhost/path"))
assert({:error, _} = Mojito.request(:get, "localhost//path"))
end
it "fails on url with bad protocol" do
assert({:error, _} = Mojito.request(:get, "garbage://localhost/path"))
assert({:error, _} = Mojito.request(:get, "ftp://localhost/path"))
end
it "fails on url without hostname" do
assert({:error, _} = Mojito.request(:get, "http://"))
end
it "fails on blank url" do
assert({:error, err} = Mojito.request(:get, ""))
assert(is_binary(err.message))
end
it "fails on nil url" do
assert({:error, err} = Mojito.request(:get, nil))
assert(is_binary(err.message))
end
end
context "method validation" do
it "fails on blank method" do
assert({:error, err} = Mojito.request("", "https://cool.com"))
assert(is_binary(err.message))
end
it "fails on nil method" do
assert({:error, err} = Mojito.request(nil, "https://cool.com"))
assert(is_binary(err.message))
end
end
context "local server tests" do
@http_port Application.get_env(:mojito, :test_server_http_port)
@https_port Application.get_env(:mojito, :test_server_https_port)
defp head(path, opts \\ []) do
Mojito.head(
"http://localhost:#{@http_port}#{path}",
[],
opts
)
end
defp get(path, opts \\ []) do
Mojito.get(
"http://localhost:#{@http_port}#{path}",
[],
opts
)
end
defp get_with_user(path, user, opts \\ []) do
Mojito.get(
"http://#{user}@localhost:#{@http_port}#{path}",
[],
opts
)
end
defp get_with_user_and_pass(path, user, pass, opts \\ []) do
Mojito.get(
"http://#{user}:#{pass}@localhost:#{@http_port}#{path}",
[],
opts
)
end
defp post(path, body_obj, opts \\ []) do
body = Jason.encode!(body_obj)
headers = [{"content-type", "application/json"}]
Mojito.post(
"http://localhost:#{@http_port}#{path}",
headers,
body,
opts
)
end
defp put(path, body_obj, opts \\ []) do
body = Jason.encode!(body_obj)
headers = [{"content-type", "application/json"}]
Mojito.put(
"http://localhost:#{@http_port}#{path}",
headers,
body,
opts
)
end
defp patch(path, body_obj, opts \\ []) do
body = Jason.encode!(body_obj)
headers = [{"content-type", "application/json"}]
Mojito.patch(
"http://localhost:#{@http_port}#{path}",
headers,
body,
opts
)
end
defp delete(path, opts \\ []) do
Mojito.delete(
"http://localhost:#{@http_port}#{path}",
[],
opts
)
end
defp options(path, opts \\ []) do
Mojito.options(
"http://localhost:#{@http_port}#{path}",
[],
opts
)
end
defp get_ssl(path, opts \\ []) do
Mojito.get(
"https://localhost:#{@https_port}#{path}",
[],
[transport_opts: [verify: :verify_none]] ++ opts
)
end
it "accepts kwlist input" do
assert(
{:ok, response} =
Mojito.request(method: :get, url: "http://localhost:#{@http_port}/")
)
end
it "accepts pool: true" do
assert(
{:ok, response} =
Mojito.request(
method: :get,
url: "http://localhost:#{@http_port}/",
opts: [pool: true]
)
)
end
it "accepts pool: false" do
assert(
{:ok, response} =
Mojito.request(
method: :get,
url: "http://localhost:#{@http_port}/",
opts: [pool: false]
)
)
end
it "accepts pool: pid" do
child_spec = Mojito.Pool.Single.child_spec()
{:ok, pool_pid} = Supervisor.start_child(Mojito.Supervisor, child_spec)
assert(
{:ok, response} =
Mojito.request(
method: :get,
url: "http://localhost:#{@http_port}/",
opts: [pool: pool_pid]
)
)
end
it "can make HTTP requests" do
assert({:ok, response} = get("/"))
assert(200 == response.status_code)
assert("Hello world!" == response.body)
assert("12" == Headers.get(response.headers, "content-length"))
end
it "can make HTTPS requests" do
assert({:ok, response} = get_ssl("/"))
assert(200 == response.status_code)
assert("Hello world!" == response.body)
assert("12" == Headers.get(response.headers, "content-length"))
end
it "handles timeouts" do
assert({:ok, _} = get("/", timeout: 100))
assert({:error, %Error{reason: :timeout}} = get("/wait1", timeout: 100))
end
it "handles timeouts even on long requests" do
port = Application.get_env(:mojito, :test_server_http_port)
{:ok, conn} = Mojito.Conn.connect("http://localhost:#{port}")
mint_conn =
Map.put(conn.conn, :request, %{
ref: nil,
state: :status,
method: :get,
version: nil,
status: nil,
headers_buffer: [],
content_length: nil,
connection: [],
transfer_encoding: [],
body: nil,
})
conn = %{conn | conn: mint_conn}
pid = self()
spawn(fn ->
socket = conn.conn.socket
Process.sleep(30)
send(pid, {:tcp, socket, "HTTP/1.1 200 OK\r\nserver: Cowboy"})
Process.sleep(30)
send(pid, {:tcp, socket, "\r\ndate: Thu, 25 Apr 2019 10:48:25"})
Process.sleep(30)
send(pid, {:tcp, socket, " GMT\r\ncontent-length: 12\r\ncache-"})
Process.sleep(30)
send(pid, {:tcp, socket, "control: max-age=0, private, must-"})
Process.sleep(30)
send(pid, {:tcp, socket, "revalidate\r\n\r\nHello world!"})
end)
assert(
{:error, %{reason: :timeout}} =
Mojito.Request.Single.receive_response(
conn,
%Mojito.Response{},
100
)
)
end
it "handles requests after a timeout" do
assert({:error, %{reason: :timeout}} = get("/wait?d=10", timeout: 1))
Process.sleep(100)
assert({:ok, %{body: "Hello Alice!"}} = get("?name=Alice"))
end
it "handles URL query params" do
assert({:ok, %{body: "Hello Alice!"}} = get("/?name=Alice"))
assert({:ok, %{body: "Hello Alice!"}} = get("?name=Alice"))
end
it "can post data" do
assert({:ok, response} = post("/post", %{name: "Charlie"}))
resp_body = response.body |> Jason.decode!()
assert("Charlie" == resp_body["name"])
end
it "handles user+pass in URL" do
assert({:ok, %{status_code: 500}} = get("/auth"))
assert(
{:ok, %{status_code: 200} = response} = get_with_user("/auth", "hi")
)
assert(%{"user" => "hi", "pass" => nil} = Jason.decode!(response.body))
assert(
{:ok, %{status_code: 200} = response} =
get_with_user_and_pass("/auth", "hi", "mom")
)
assert(%{"user" => "hi", "pass" => "mom"} = Jason.decode!(response.body))
end
it "can make HEAD request" do
assert({:ok, response} = head("/"))
assert(200 == response.status_code)
assert("" == response.body)
assert("12" == Headers.get(response.headers, "content-length"))
end
it "can make PATCH request" do
assert({:ok, response} = patch("/patch", %{name: "Charlie"}))
resp_body = response.body |> Jason.decode!()
assert("Charlie" == resp_body["name"])
end
it "can make PUT request" do
assert({:ok, response} = put("/put", %{name: "Charlie"}))
resp_body = response.body |> Jason.decode!()
assert("Charlie" == resp_body["name"])
end
it "can make DELETE request" do
assert({:ok, response} = delete("/delete"))
assert(200 == response.status_code)
end
it "can make OPTIONS request" do
assert({:ok, response} = options("/"))
assert(
"OPTIONS, GET, HEAD, POST, PATCH, PUT, DELETE" ==
Headers.get(response.headers, "allow")
)
end
end
context "external tests" do
it "can make HTTPS requests using proper cert chain by default" do
assert({:ok, _} = Mojito.request(:get, "https://github.com"))
end
end
end
| 26.91411 | 79 | 0.542398 |
93f2be9298f8b16940921d5b81de700eb5ae5e0d | 1,222 | exs | Elixir | test/test_helper.exs | Fullrate/my_acs | 52b179267eb450b49dcc1c4a368605c62e7f2abe | [
"Unlicense"
] | 5 | 2016-11-24T07:34:29.000Z | 2019-09-06T15:55:35.000Z | test/test_helper.exs | Fullrate/my_acs | 52b179267eb450b49dcc1c4a368605c62e7f2abe | [
"Unlicense"
] | null | null | null | test/test_helper.exs | Fullrate/my_acs | 52b179267eb450b49dcc1c4a368605c62e7f2abe | [
"Unlicense"
] | 4 | 2017-11-29T05:07:10.000Z | 2021-12-06T10:35:03.000Z | ExUnit.start
defmodule RequestSenders do
# sends a POST request, and eats the response and returns it
def sendFile(file, sessioncookie \\ []) do
{ :ok, data } = File.read(file)
sendStr(data, sessioncookie)
end
# sends a POST request, and eats the response and returns it
def sendStr(str, sessioncookie \\ []) do
port=Application.fetch_env!(:acs_ex, :acs_port)
resp = case sessioncookie do
[] -> HTTPoison.post("http://localhost:#{port}/", str, %{"Content-type" => "text/xml"})
[s] -> HTTPoison.post("http://localhost:#{port}/", str, %{"Content-type" => "text/xml"}, [hackney: [cookie: [s]]])
end
case resp do
{:ok,r} -> sessioncookie = case List.keyfind(r.headers,"set-cookie",0) do
{"set-cookie",s} -> [s]
_ -> []
end
{:ok,r,sessioncookie}
{:error,r} -> {:error,r,[]}
end
end
def readFixture!(file) do
{ :ok, data } = File.read(file)
String.trim_trailing(data,"\n")
end
def fixture_path do
Path.expand("fixtures", __DIR__)
end
def fixture_path(file_path) do
Path.join fixture_path(), file_path
end
end
| 30.55 | 120 | 0.56874 |
93f2c70f11c61f6a3f3f0e29b19a989b2885ed5f | 1,960 | ex | Elixir | clients/dialogflow/lib/google_api/dialogflow/v3/model/google_cloud_dialogflow_cx_v3beta1_delete_document_operation_metadata.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/dialogflow/lib/google_api/dialogflow/v3/model/google_cloud_dialogflow_cx_v3beta1_delete_document_operation_metadata.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/dialogflow/lib/google_api/dialogflow/v3/model/google_cloud_dialogflow_cx_v3beta1_delete_document_operation_metadata.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3beta1DeleteDocumentOperationMetadata do
@moduledoc """
Metadata for DeleteDocument operation.
## Attributes
* `genericMetadata` (*type:* `GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3beta1GenericKnowledgeOperationMetadata.t`, *default:* `nil`) - The generic information of the operation.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:genericMetadata =>
GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3beta1GenericKnowledgeOperationMetadata.t()
| nil
}
field(:genericMetadata,
as:
GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3beta1GenericKnowledgeOperationMetadata
)
end
defimpl Poison.Decoder,
for: GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3beta1DeleteDocumentOperationMetadata do
def decode(value, options) do
GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3beta1DeleteDocumentOperationMetadata.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3beta1DeleteDocumentOperationMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.385965 | 193 | 0.771939 |
93f2ca08ac8557e93ac6e8b4cd30e8b7879b9ae4 | 2,808 | ex | Elixir | clients/apigee/lib/google_api/apigee/v1/model/google_cloud_apigee_v1_sync_authorization.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/apigee/lib/google_api/apigee/v1/model/google_cloud_apigee_v1_sync_authorization.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/apigee/lib/google_api/apigee/v1/model/google_cloud_apigee_v1_sync_authorization.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1SyncAuthorization do
@moduledoc """
## Attributes
* `etag` (*type:* `String.t`, *default:* `nil`) - Entity tag (ETag) used for optimistic concurrency control as a way to help prevent simultaneous updates from overwriting each other. For example, when you call [getSyncAuthorization](getSyncAuthorization) an ETag is returned in the response. Pass that ETag when calling the [setSyncAuthorization](setSyncAuthorization) to ensure that you are updating the correct version. If you don't pass the ETag in the call to `setSyncAuthorization`, then the existing authorization is overwritten indiscriminately. **Note**: We strongly recommend that you use the ETag in the read-modify-write cycle to avoid race conditions.
* `identities` (*type:* `list(String.t)`, *default:* `nil`) - Required. Array of service accounts to grant access to control plane resources, each specified using the following format: `serviceAccount:` service-account-name. The service-account-name is formatted like an email address. For example: `my-synchronizer-manager-service_account@my_project_id.iam.gserviceaccount.com` You might specify multiple service accounts, for example, if you have multiple environments and wish to assign a unique service account to each one. The service accounts must have **Apigee Synchronizer Manager** role. See also [Create service accounts](https://cloud.google.com/apigee/docs/hybrid/latest/sa-about#create-the-service-accounts).
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:etag => String.t(),
:identities => list(String.t())
}
field(:etag)
field(:identities, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1SyncAuthorization do
def decode(value, options) do
GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1SyncAuthorization.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1SyncAuthorization do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 56.16 | 725 | 0.768519 |
93f2f82492862b145deed8079af4177baceb8e09 | 3,470 | ex | Elixir | lib/club/brands/aggregates/brand.ex | vheathen/club.wallprint.pro | d58d2409d8879d23ed4d60fe3b9c2e1bd82e924d | [
"MIT"
] | null | null | null | lib/club/brands/aggregates/brand.ex | vheathen/club.wallprint.pro | d58d2409d8879d23ed4d60fe3b9c2e1bd82e924d | [
"MIT"
] | 34 | 2019-11-10T11:31:37.000Z | 2019-11-27T21:26:48.000Z | lib/club/brands/aggregates/brand.ex | vheathen/club.wallprint.pro | d58d2409d8879d23ed4d60fe3b9c2e1bd82e924d | [
"MIT"
] | null | null | null | defmodule Club.Brands.Aggregates.Brand do
@derive Jason.Encoder
defstruct uuid: nil,
name: nil,
url: "",
product_count: 0,
products: [],
deleted?: false
alias Club.Brands.Aggregates.Brand
alias Club.Brands.Commands.{
AddBrand,
RenameBrand,
UpdateBrandUrl,
LinkNewProductWithBrand,
UnlinkProductFromBrand,
DeleteBrand
}
alias Club.Brands.Events.{
BrandAdded,
BrandRenamed,
BrandUrlUpdated,
NewProductWithBrandLinked,
ProductFromBrandUnlinked,
BrandDeleted
}
# AddBrand
def execute(%Brand{uuid: nil}, %AddBrand{} = cmd), do: BrandAdded.new(cmd)
def execute(%Brand{}, %AddBrand{}), do: {:error, :brand_already_exists}
# if Brand doesn't exists we should return error for all but AddBrand commands
def execute(%Brand{uuid: nil}, _), do: {:error, :brand_doesnt_exist}
# DeleteBrand
def execute(%Brand{deleted?: true}, %DeleteBrand{}), do: nil
def execute(%Brand{product_count: count}, %DeleteBrand{}) when count > 0,
do: {:error, :brand_has_linked_products}
def execute(%Brand{}, %DeleteBrand{} = cmd), do: BrandDeleted.new(cmd)
# if Brand deleted we should return error for all but DeleteBrand commands
def execute(%Brand{deleted?: true}, _), do: {:error, :brand_has_been_deleted}
# RenameBrand
def execute(%Brand{name: name}, %RenameBrand{name: name}), do: nil
def execute(%Brand{uuid: uuid}, %RenameBrand{brand_uuid: uuid} = cmd), do: BrandRenamed.new(cmd)
# UpdateBrandUrl
def execute(%Brand{url: url}, %UpdateBrandUrl{url: url}), do: nil
def execute(%Brand{uuid: uuid}, %UpdateBrandUrl{brand_uuid: uuid} = cmd),
do: BrandUrlUpdated.new(cmd)
# LinkNewProductWithBrand
def execute(
%Brand{products: products},
%LinkNewProductWithBrand{product_uuid: product_uuid} = cmd
) do
case Enum.any?(products, &(&1 == product_uuid)) do
true -> nil
_ -> NewProductWithBrandLinked.new(cmd)
end
end
# UnlinkProductFromBrand
def execute(
%Brand{products: products},
%UnlinkProductFromBrand{product_uuid: product_uuid} = cmd
) do
case Enum.any?(products, &(&1 == product_uuid)) do
true -> ProductFromBrandUnlinked.new(cmd)
_ -> nil
end
end
# state mutators
def apply(%Brand{} = brand, %BrandAdded{} = event) do
%Brand{
brand
| uuid: event.brand_uuid,
name: event.name,
url: event.url
}
end
def apply(%Brand{} = brand, %BrandRenamed{name: name}) do
%Brand{
brand
| name: name
}
end
def apply(%Brand{} = brand, %BrandUrlUpdated{url: url}) do
%Brand{
brand
| url: url
}
end
def apply(
%Brand{product_count: product_count, products: products} = brand,
%NewProductWithBrandLinked{
product_uuid: product_uuid
}
) do
%Brand{
brand
| product_count: product_count + 1,
products: [product_uuid | products]
}
end
def apply(
%Brand{product_count: product_count, products: products} = brand,
%ProductFromBrandUnlinked{
product_uuid: product_uuid
}
) do
%Brand{
brand
| product_count: product_count - 1,
products: products -- [product_uuid]
}
end
def apply(
%Brand{} = brand,
%BrandDeleted{}
) do
%Brand{
brand
| deleted?: true
}
end
end
| 24.097222 | 98 | 0.626513 |
93f2f9b08c916748654d71d4ed16bd9b3164128e | 1,499 | ex | Elixir | lib/serum/html_tree_helper.ex | afontaine/Serum | f2c410d086c235332e047afdc6a3c6e6c6f6fca8 | [
"MIT"
] | 1 | 2019-06-21T10:55:33.000Z | 2019-06-21T10:55:33.000Z | lib/serum/html_tree_helper.ex | afontaine/Serum | f2c410d086c235332e047afdc6a3c6e6c6f6fca8 | [
"MIT"
] | null | null | null | lib/serum/html_tree_helper.ex | afontaine/Serum | f2c410d086c235332e047afdc6a3c6e6c6f6fca8 | [
"MIT"
] | null | null | null | defmodule Serum.HtmlTreeHelper do
@moduledoc "Provides useful functions for working with HTML trees."
@type tree :: binary() | tag() | [binary() | tag()]
@type tag :: {binary(), [attribute()], [tree()]}
@type attribute :: {binary(), binary()}
@type tree_fun :: (tree() -> tree())
@type acc_tree_fun :: (tree(), term() -> {tree(), term()})
@doc """
Performs a post-order traversal of the given HTML tree.
"""
@spec traverse(tree(), tree_fun()) :: tree()
def traverse(tree, fun)
def traverse({tag_name, attrs, children}, fun) do
new_children = traverse(children, fun)
fun.({tag_name, attrs, new_children})
end
def traverse(tags, fun) when is_list(tags) do
Enum.map(tags, &traverse(&1, fun))
end
def traverse(x, _fun), do: x
@doc """
Performs a post-order traversal of the given HTML tree with an accumulator.
"""
@spec traverse(tree(), term(), acc_tree_fun()) :: {tree(), term()}
def traverse(tree, acc, fun)
def traverse({tag_name, attrs, children}, acc, fun) do
{new_children, new_acc} = traverse(children, acc, fun)
fun.({tag_name, attrs, new_children}, new_acc)
end
def traverse(tags, acc, fun) when is_list(tags) do
{new_tags, new_acc} =
Enum.reduce(tags, {[], acc}, fn tag, {list, acc} ->
{new_tag, new_acc2} = traverse(tag, acc, fun)
{[new_tag | list], new_acc2}
end)
{new_tags |> Enum.reverse() |> List.flatten(), new_acc}
end
def traverse(x, acc, _fun), do: {x, acc}
end
| 28.283019 | 77 | 0.624416 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.