hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7352137cd069094c2f822fcdac059d1370469eae | 2,863 | ex | Elixir | lib/kandesk_web.ex | seb3s/kandesk | 420474733ca0be258d74c36add44c3c36b691a58 | [
"MIT"
] | 158 | 2020-09-01T13:21:59.000Z | 2022-03-31T12:41:03.000Z | lib/kandesk_web.ex | seb3s/kandesk | 420474733ca0be258d74c36add44c3c36b691a58 | [
"MIT"
] | 8 | 2020-09-01T21:09:05.000Z | 2022-01-20T21:46:18.000Z | lib/kandesk_web.ex | seb3s/kandesk | 420474733ca0be258d74c36add44c3c36b691a58 | [
"MIT"
] | 18 | 2020-09-01T13:23:45.000Z | 2022-02-15T09:47:39.000Z | defmodule KandeskWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use KandeskWeb, :controller
use KandeskWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: KandeskWeb
import Plug.Conn
import KandeskWeb.Gettext
alias KandeskWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/kandesk_web/templates",
namespace: KandeskWeb
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_flash: 1, get_flash: 2, view_module: 1]
# Include shared imports and aliases for views
unquote(view_helpers())
end
end
def subtemplate_view do
quote do
use Phoenix.View,
root: "lib/kandesk_web/templates",
namespace: KandeskWeb,
pattern: "**/*"
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_flash: 1, get_flash: 2, view_module: 1]
# Include shared imports and aliases for views
unquote(view_helpers())
end
end
def live_view do
quote do
use Phoenix.LiveView,
layout: {KandeskWeb.LayoutView, "live.html"}
unquote(view_helpers())
end
end
def live_component do
quote do
use Phoenix.LiveComponent
unquote(view_helpers())
end
end
def mailer_view do
quote do
use Phoenix.View,
root: "lib/kandesk_web/templates",
namespace: KandeskWeb
use Phoenix.HTML
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
import Phoenix.LiveView.Router
end
end
def channel do
quote do
use Phoenix.Channel
import KandeskWeb.Gettext
end
end
defp view_helpers do
quote do
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
# Import LiveView helpers (live_render, live_component, live_patch, etc)
import Phoenix.LiveView.Helpers
# Import basic rendering functionality (render, render_layout, etc)
import Phoenix.View
import KandeskWeb.ErrorHelpers
import KandeskWeb.Gettext
alias KandeskWeb.Router.Helpers, as: Routes
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 22.722222 | 83 | 0.674118 |
73521fe35edf13ecd7cb032529eaafdfa6bd25bd | 2,193 | ex | Elixir | charts/lib/charts/stacked_column_chart/base_chart_impl.ex | Refined-Process/charts_ex | 6d23eff5903496b7a4518a3208486a15a3354ae2 | [
"MIT"
] | null | null | null | charts/lib/charts/stacked_column_chart/base_chart_impl.ex | Refined-Process/charts_ex | 6d23eff5903496b7a4518a3208486a15a3354ae2 | [
"MIT"
] | null | null | null | charts/lib/charts/stacked_column_chart/base_chart_impl.ex | Refined-Process/charts_ex | 6d23eff5903496b7a4518a3208486a15a3354ae2 | [
"MIT"
] | null | null | null | defimpl Charts.StackedColumnChart, for: Charts.BaseChart do
alias Charts.BaseChart
alias Charts.StackedColumnChart.{MultiColumn, Rectangle}
alias Charts.ColumnChart.Dataset
def columns(%BaseChart{dataset: nil}), do: []
def columns(%BaseChart{dataset: dataset}), do: columns(dataset)
def columns(%Dataset{data: []}), do: []
def columns(%Dataset{data: data, axes: %{magnitude_axis: %{max: max}}}) do
width = 100.0 / Enum.count(data)
margin = width / 4.0
data
|> Enum.with_index()
|> Enum.map(fn {datum, index} ->
offset = index * width
column_height = (Map.values(datum.values) |> Enum.sum()) / max * 100
%MultiColumn{
width: width,
column_height: column_height,
offset: offset,
label: datum.name,
column_width: width / 2.0,
column_offset: offset + margin,
parts: datum.values
}
end)
end
def rectangles(chart) do
chart
|> columns()
|> rectangles_from_columns()
end
defp rectangles_from_columns([]), do: []
defp rectangles_from_columns(multi_columns) do
multi_columns
|> Enum.flat_map(&build_rectangles_for_column(&1))
end
defp build_rectangles_for_column(column) do
column.parts
|> Enum.reject(fn {_color, height} -> height == 0 end)
|> Enum.reduce([], fn {color, height}, acc ->
percentage = height / Enum.sum(Map.values(column.parts)) * 100
rectangle_height = percentage / 100 * column.column_height
case acc do
[previous | _rectangles] ->
new_rectangle = %Rectangle{
x_offset: column.column_offset,
y_offset: previous.y_offset - rectangle_height,
fill_color: color,
width: column.width,
height: rectangle_height,
label: height
}
[new_rectangle | acc]
[] ->
new_rectangle = %Rectangle{
x_offset: column.column_offset,
y_offset: 100 - rectangle_height,
fill_color: color,
width: column.width,
height: rectangle_height,
label: height
}
[new_rectangle]
end
end)
end
end
| 27.4125 | 76 | 0.605107 |
7352da3083929839645839594646dacd5cf415be | 153 | exs | Elixir | config/config.exs | axelson/nerves_ssh | 4ccaa956bfcec21d024c2c7f0000e083ec224baf | [
"Apache-2.0"
] | null | null | null | config/config.exs | axelson/nerves_ssh | 4ccaa956bfcec21d024c2c7f0000e083ec224baf | [
"Apache-2.0"
] | null | null | null | config/config.exs | axelson/nerves_ssh | 4ccaa956bfcec21d024c2c7f0000e083ec224baf | [
"Apache-2.0"
] | null | null | null | use Mix.Config
config :nerves_runtime,
target: "host"
config :nerves_runtime, Nerves.Runtime.KV.Mock, %{"nerves_fw_devpath" => "/dev/will_not_work"}
| 21.857143 | 94 | 0.745098 |
7352efbaafb143cdf549e84c30259482c4343f52 | 124 | exs | Elixir | test/custom_hooks/simple_search_test.exs | ramansah/rummage_ecto | 0f24fdccfe504e3c5b8337698446c17fefc60766 | [
"MIT"
] | 1 | 2019-02-11T19:54:24.000Z | 2019-02-11T19:54:24.000Z | test/custom_hooks/simple_search_test.exs | ramansah/rummage_ecto | 0f24fdccfe504e3c5b8337698446c17fefc60766 | [
"MIT"
] | null | null | null | test/custom_hooks/simple_search_test.exs | ramansah/rummage_ecto | 0f24fdccfe504e3c5b8337698446c17fefc60766 | [
"MIT"
] | 2 | 2019-11-02T21:36:27.000Z | 2021-03-02T15:58:31.000Z | defmodule Rummage.Ecto.CustomHook.SimpleSearchTest do
use ExUnit.Case
doctest Rummage.Ecto.CustomHook.SimpleSearch
end
| 20.666667 | 53 | 0.83871 |
7352faf72ffd5552310f6011c1b118243d9d7daa | 21,399 | ex | Elixir | lib/codes/codes_c50.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_c50.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_c50.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | defmodule IcdCode.ICDCode.Codes_C50 do
alias IcdCode.ICDCode
def _C50011 do
%ICDCode{full_code: "C50011",
category_code: "C50",
short_code: "011",
full_name: "Malignant neoplasm of nipple and areola, right female breast",
short_name: "Malignant neoplasm of nipple and areola, right female breast",
category_name: "Malignant neoplasm of nipple and areola, right female breast"
}
end
def _C50012 do
%ICDCode{full_code: "C50012",
category_code: "C50",
short_code: "012",
full_name: "Malignant neoplasm of nipple and areola, left female breast",
short_name: "Malignant neoplasm of nipple and areola, left female breast",
category_name: "Malignant neoplasm of nipple and areola, left female breast"
}
end
def _C50019 do
%ICDCode{full_code: "C50019",
category_code: "C50",
short_code: "019",
full_name: "Malignant neoplasm of nipple and areola, unspecified female breast",
short_name: "Malignant neoplasm of nipple and areola, unspecified female breast",
category_name: "Malignant neoplasm of nipple and areola, unspecified female breast"
}
end
def _C50021 do
%ICDCode{full_code: "C50021",
category_code: "C50",
short_code: "021",
full_name: "Malignant neoplasm of nipple and areola, right male breast",
short_name: "Malignant neoplasm of nipple and areola, right male breast",
category_name: "Malignant neoplasm of nipple and areola, right male breast"
}
end
def _C50022 do
%ICDCode{full_code: "C50022",
category_code: "C50",
short_code: "022",
full_name: "Malignant neoplasm of nipple and areola, left male breast",
short_name: "Malignant neoplasm of nipple and areola, left male breast",
category_name: "Malignant neoplasm of nipple and areola, left male breast"
}
end
def _C50029 do
%ICDCode{full_code: "C50029",
category_code: "C50",
short_code: "029",
full_name: "Malignant neoplasm of nipple and areola, unspecified male breast",
short_name: "Malignant neoplasm of nipple and areola, unspecified male breast",
category_name: "Malignant neoplasm of nipple and areola, unspecified male breast"
}
end
def _C50111 do
%ICDCode{full_code: "C50111",
category_code: "C50",
short_code: "111",
full_name: "Malignant neoplasm of central portion of right female breast",
short_name: "Malignant neoplasm of central portion of right female breast",
category_name: "Malignant neoplasm of central portion of right female breast"
}
end
def _C50112 do
%ICDCode{full_code: "C50112",
category_code: "C50",
short_code: "112",
full_name: "Malignant neoplasm of central portion of left female breast",
short_name: "Malignant neoplasm of central portion of left female breast",
category_name: "Malignant neoplasm of central portion of left female breast"
}
end
def _C50119 do
%ICDCode{full_code: "C50119",
category_code: "C50",
short_code: "119",
full_name: "Malignant neoplasm of central portion of unspecified female breast",
short_name: "Malignant neoplasm of central portion of unspecified female breast",
category_name: "Malignant neoplasm of central portion of unspecified female breast"
}
end
def _C50121 do
%ICDCode{full_code: "C50121",
category_code: "C50",
short_code: "121",
full_name: "Malignant neoplasm of central portion of right male breast",
short_name: "Malignant neoplasm of central portion of right male breast",
category_name: "Malignant neoplasm of central portion of right male breast"
}
end
def _C50122 do
%ICDCode{full_code: "C50122",
category_code: "C50",
short_code: "122",
full_name: "Malignant neoplasm of central portion of left male breast",
short_name: "Malignant neoplasm of central portion of left male breast",
category_name: "Malignant neoplasm of central portion of left male breast"
}
end
def _C50129 do
%ICDCode{full_code: "C50129",
category_code: "C50",
short_code: "129",
full_name: "Malignant neoplasm of central portion of unspecified male breast",
short_name: "Malignant neoplasm of central portion of unspecified male breast",
category_name: "Malignant neoplasm of central portion of unspecified male breast"
}
end
def _C50211 do
%ICDCode{full_code: "C50211",
category_code: "C50",
short_code: "211",
full_name: "Malignant neoplasm of upper-inner quadrant of right female breast",
short_name: "Malignant neoplasm of upper-inner quadrant of right female breast",
category_name: "Malignant neoplasm of upper-inner quadrant of right female breast"
}
end
def _C50212 do
%ICDCode{full_code: "C50212",
category_code: "C50",
short_code: "212",
full_name: "Malignant neoplasm of upper-inner quadrant of left female breast",
short_name: "Malignant neoplasm of upper-inner quadrant of left female breast",
category_name: "Malignant neoplasm of upper-inner quadrant of left female breast"
}
end
def _C50219 do
%ICDCode{full_code: "C50219",
category_code: "C50",
short_code: "219",
full_name: "Malignant neoplasm of upper-inner quadrant of unspecified female breast",
short_name: "Malignant neoplasm of upper-inner quadrant of unspecified female breast",
category_name: "Malignant neoplasm of upper-inner quadrant of unspecified female breast"
}
end
def _C50221 do
%ICDCode{full_code: "C50221",
category_code: "C50",
short_code: "221",
full_name: "Malignant neoplasm of upper-inner quadrant of right male breast",
short_name: "Malignant neoplasm of upper-inner quadrant of right male breast",
category_name: "Malignant neoplasm of upper-inner quadrant of right male breast"
}
end
def _C50222 do
%ICDCode{full_code: "C50222",
category_code: "C50",
short_code: "222",
full_name: "Malignant neoplasm of upper-inner quadrant of left male breast",
short_name: "Malignant neoplasm of upper-inner quadrant of left male breast",
category_name: "Malignant neoplasm of upper-inner quadrant of left male breast"
}
end
def _C50229 do
%ICDCode{full_code: "C50229",
category_code: "C50",
short_code: "229",
full_name: "Malignant neoplasm of upper-inner quadrant of unspecified male breast",
short_name: "Malignant neoplasm of upper-inner quadrant of unspecified male breast",
category_name: "Malignant neoplasm of upper-inner quadrant of unspecified male breast"
}
end
def _C50311 do
%ICDCode{full_code: "C50311",
category_code: "C50",
short_code: "311",
full_name: "Malignant neoplasm of lower-inner quadrant of right female breast",
short_name: "Malignant neoplasm of lower-inner quadrant of right female breast",
category_name: "Malignant neoplasm of lower-inner quadrant of right female breast"
}
end
def _C50312 do
%ICDCode{full_code: "C50312",
category_code: "C50",
short_code: "312",
full_name: "Malignant neoplasm of lower-inner quadrant of left female breast",
short_name: "Malignant neoplasm of lower-inner quadrant of left female breast",
category_name: "Malignant neoplasm of lower-inner quadrant of left female breast"
}
end
def _C50319 do
%ICDCode{full_code: "C50319",
category_code: "C50",
short_code: "319",
full_name: "Malignant neoplasm of lower-inner quadrant of unspecified female breast",
short_name: "Malignant neoplasm of lower-inner quadrant of unspecified female breast",
category_name: "Malignant neoplasm of lower-inner quadrant of unspecified female breast"
}
end
def _C50321 do
%ICDCode{full_code: "C50321",
category_code: "C50",
short_code: "321",
full_name: "Malignant neoplasm of lower-inner quadrant of right male breast",
short_name: "Malignant neoplasm of lower-inner quadrant of right male breast",
category_name: "Malignant neoplasm of lower-inner quadrant of right male breast"
}
end
def _C50322 do
%ICDCode{full_code: "C50322",
category_code: "C50",
short_code: "322",
full_name: "Malignant neoplasm of lower-inner quadrant of left male breast",
short_name: "Malignant neoplasm of lower-inner quadrant of left male breast",
category_name: "Malignant neoplasm of lower-inner quadrant of left male breast"
}
end
def _C50329 do
%ICDCode{full_code: "C50329",
category_code: "C50",
short_code: "329",
full_name: "Malignant neoplasm of lower-inner quadrant of unspecified male breast",
short_name: "Malignant neoplasm of lower-inner quadrant of unspecified male breast",
category_name: "Malignant neoplasm of lower-inner quadrant of unspecified male breast"
}
end
def _C50411 do
%ICDCode{full_code: "C50411",
category_code: "C50",
short_code: "411",
full_name: "Malignant neoplasm of upper-outer quadrant of right female breast",
short_name: "Malignant neoplasm of upper-outer quadrant of right female breast",
category_name: "Malignant neoplasm of upper-outer quadrant of right female breast"
}
end
def _C50412 do
%ICDCode{full_code: "C50412",
category_code: "C50",
short_code: "412",
full_name: "Malignant neoplasm of upper-outer quadrant of left female breast",
short_name: "Malignant neoplasm of upper-outer quadrant of left female breast",
category_name: "Malignant neoplasm of upper-outer quadrant of left female breast"
}
end
def _C50419 do
%ICDCode{full_code: "C50419",
category_code: "C50",
short_code: "419",
full_name: "Malignant neoplasm of upper-outer quadrant of unspecified female breast",
short_name: "Malignant neoplasm of upper-outer quadrant of unspecified female breast",
category_name: "Malignant neoplasm of upper-outer quadrant of unspecified female breast"
}
end
def _C50421 do
%ICDCode{full_code: "C50421",
category_code: "C50",
short_code: "421",
full_name: "Malignant neoplasm of upper-outer quadrant of right male breast",
short_name: "Malignant neoplasm of upper-outer quadrant of right male breast",
category_name: "Malignant neoplasm of upper-outer quadrant of right male breast"
}
end
def _C50422 do
%ICDCode{full_code: "C50422",
category_code: "C50",
short_code: "422",
full_name: "Malignant neoplasm of upper-outer quadrant of left male breast",
short_name: "Malignant neoplasm of upper-outer quadrant of left male breast",
category_name: "Malignant neoplasm of upper-outer quadrant of left male breast"
}
end
def _C50429 do
%ICDCode{full_code: "C50429",
category_code: "C50",
short_code: "429",
full_name: "Malignant neoplasm of upper-outer quadrant of unspecified male breast",
short_name: "Malignant neoplasm of upper-outer quadrant of unspecified male breast",
category_name: "Malignant neoplasm of upper-outer quadrant of unspecified male breast"
}
end
def _C50511 do
%ICDCode{full_code: "C50511",
category_code: "C50",
short_code: "511",
full_name: "Malignant neoplasm of lower-outer quadrant of right female breast",
short_name: "Malignant neoplasm of lower-outer quadrant of right female breast",
category_name: "Malignant neoplasm of lower-outer quadrant of right female breast"
}
end
def _C50512 do
%ICDCode{full_code: "C50512",
category_code: "C50",
short_code: "512",
full_name: "Malignant neoplasm of lower-outer quadrant of left female breast",
short_name: "Malignant neoplasm of lower-outer quadrant of left female breast",
category_name: "Malignant neoplasm of lower-outer quadrant of left female breast"
}
end
def _C50519 do
%ICDCode{full_code: "C50519",
category_code: "C50",
short_code: "519",
full_name: "Malignant neoplasm of lower-outer quadrant of unspecified female breast",
short_name: "Malignant neoplasm of lower-outer quadrant of unspecified female breast",
category_name: "Malignant neoplasm of lower-outer quadrant of unspecified female breast"
}
end
def _C50521 do
%ICDCode{full_code: "C50521",
category_code: "C50",
short_code: "521",
full_name: "Malignant neoplasm of lower-outer quadrant of right male breast",
short_name: "Malignant neoplasm of lower-outer quadrant of right male breast",
category_name: "Malignant neoplasm of lower-outer quadrant of right male breast"
}
end
def _C50522 do
%ICDCode{full_code: "C50522",
category_code: "C50",
short_code: "522",
full_name: "Malignant neoplasm of lower-outer quadrant of left male breast",
short_name: "Malignant neoplasm of lower-outer quadrant of left male breast",
category_name: "Malignant neoplasm of lower-outer quadrant of left male breast"
}
end
def _C50529 do
%ICDCode{full_code: "C50529",
category_code: "C50",
short_code: "529",
full_name: "Malignant neoplasm of lower-outer quadrant of unspecified male breast",
short_name: "Malignant neoplasm of lower-outer quadrant of unspecified male breast",
category_name: "Malignant neoplasm of lower-outer quadrant of unspecified male breast"
}
end
def _C50611 do
%ICDCode{full_code: "C50611",
category_code: "C50",
short_code: "611",
full_name: "Malignant neoplasm of axillary tail of right female breast",
short_name: "Malignant neoplasm of axillary tail of right female breast",
category_name: "Malignant neoplasm of axillary tail of right female breast"
}
end
def _C50612 do
%ICDCode{full_code: "C50612",
category_code: "C50",
short_code: "612",
full_name: "Malignant neoplasm of axillary tail of left female breast",
short_name: "Malignant neoplasm of axillary tail of left female breast",
category_name: "Malignant neoplasm of axillary tail of left female breast"
}
end
def _C50619 do
%ICDCode{full_code: "C50619",
category_code: "C50",
short_code: "619",
full_name: "Malignant neoplasm of axillary tail of unspecified female breast",
short_name: "Malignant neoplasm of axillary tail of unspecified female breast",
category_name: "Malignant neoplasm of axillary tail of unspecified female breast"
}
end
def _C50621 do
%ICDCode{full_code: "C50621",
category_code: "C50",
short_code: "621",
full_name: "Malignant neoplasm of axillary tail of right male breast",
short_name: "Malignant neoplasm of axillary tail of right male breast",
category_name: "Malignant neoplasm of axillary tail of right male breast"
}
end
def _C50622 do
%ICDCode{full_code: "C50622",
category_code: "C50",
short_code: "622",
full_name: "Malignant neoplasm of axillary tail of left male breast",
short_name: "Malignant neoplasm of axillary tail of left male breast",
category_name: "Malignant neoplasm of axillary tail of left male breast"
}
end
def _C50629 do
%ICDCode{full_code: "C50629",
category_code: "C50",
short_code: "629",
full_name: "Malignant neoplasm of axillary tail of unspecified male breast",
short_name: "Malignant neoplasm of axillary tail of unspecified male breast",
category_name: "Malignant neoplasm of axillary tail of unspecified male breast"
}
end
def _C50811 do
%ICDCode{full_code: "C50811",
category_code: "C50",
short_code: "811",
full_name: "Malignant neoplasm of overlapping sites of right female breast",
short_name: "Malignant neoplasm of overlapping sites of right female breast",
category_name: "Malignant neoplasm of overlapping sites of right female breast"
}
end
def _C50812 do
%ICDCode{full_code: "C50812",
category_code: "C50",
short_code: "812",
full_name: "Malignant neoplasm of overlapping sites of left female breast",
short_name: "Malignant neoplasm of overlapping sites of left female breast",
category_name: "Malignant neoplasm of overlapping sites of left female breast"
}
end
def _C50819 do
%ICDCode{full_code: "C50819",
category_code: "C50",
short_code: "819",
full_name: "Malignant neoplasm of overlapping sites of unspecified female breast",
short_name: "Malignant neoplasm of overlapping sites of unspecified female breast",
category_name: "Malignant neoplasm of overlapping sites of unspecified female breast"
}
end
def _C50821 do
%ICDCode{full_code: "C50821",
category_code: "C50",
short_code: "821",
full_name: "Malignant neoplasm of overlapping sites of right male breast",
short_name: "Malignant neoplasm of overlapping sites of right male breast",
category_name: "Malignant neoplasm of overlapping sites of right male breast"
}
end
def _C50822 do
%ICDCode{full_code: "C50822",
category_code: "C50",
short_code: "822",
full_name: "Malignant neoplasm of overlapping sites of left male breast",
short_name: "Malignant neoplasm of overlapping sites of left male breast",
category_name: "Malignant neoplasm of overlapping sites of left male breast"
}
end
def _C50829 do
%ICDCode{full_code: "C50829",
category_code: "C50",
short_code: "829",
full_name: "Malignant neoplasm of overlapping sites of unspecified male breast",
short_name: "Malignant neoplasm of overlapping sites of unspecified male breast",
category_name: "Malignant neoplasm of overlapping sites of unspecified male breast"
}
end
def _C50911 do
%ICDCode{full_code: "C50911",
category_code: "C50",
short_code: "911",
full_name: "Malignant neoplasm of unspecified site of right female breast",
short_name: "Malignant neoplasm of unspecified site of right female breast",
category_name: "Malignant neoplasm of unspecified site of right female breast"
}
end
def _C50912 do
%ICDCode{full_code: "C50912",
category_code: "C50",
short_code: "912",
full_name: "Malignant neoplasm of unspecified site of left female breast",
short_name: "Malignant neoplasm of unspecified site of left female breast",
category_name: "Malignant neoplasm of unspecified site of left female breast"
}
end
def _C50919 do
%ICDCode{full_code: "C50919",
category_code: "C50",
short_code: "919",
full_name: "Malignant neoplasm of unspecified site of unspecified female breast",
short_name: "Malignant neoplasm of unspecified site of unspecified female breast",
category_name: "Malignant neoplasm of unspecified site of unspecified female breast"
}
end
def _C50921 do
%ICDCode{full_code: "C50921",
category_code: "C50",
short_code: "921",
full_name: "Malignant neoplasm of unspecified site of right male breast",
short_name: "Malignant neoplasm of unspecified site of right male breast",
category_name: "Malignant neoplasm of unspecified site of right male breast"
}
end
def _C50922 do
%ICDCode{full_code: "C50922",
category_code: "C50",
short_code: "922",
full_name: "Malignant neoplasm of unspecified site of left male breast",
short_name: "Malignant neoplasm of unspecified site of left male breast",
category_name: "Malignant neoplasm of unspecified site of left male breast"
}
end
def _C50929 do
%ICDCode{full_code: "C50929",
category_code: "C50",
short_code: "929",
full_name: "Malignant neoplasm of unspecified site of unspecified male breast",
short_name: "Malignant neoplasm of unspecified site of unspecified male breast",
category_name: "Malignant neoplasm of unspecified site of unspecified male breast"
}
end
end
| 43.40568 | 98 | 0.662087 |
73530b17881bc2a9199107d9e2206b4f57c703e9 | 87 | ex | Elixir | lib/phoenix_postgres_react_web/views/page_view.ex | CTMoney/phoenix-postgres-react | b51c298fdcef339324a601dd874a82e1e0cc8e6e | [
"MIT"
] | null | null | null | lib/phoenix_postgres_react_web/views/page_view.ex | CTMoney/phoenix-postgres-react | b51c298fdcef339324a601dd874a82e1e0cc8e6e | [
"MIT"
] | 1 | 2021-03-09T11:33:04.000Z | 2021-03-09T11:33:04.000Z | lib/phoenix_postgres_react_web/views/page_view.ex | CTMoney/phoenix_ | b51c298fdcef339324a601dd874a82e1e0cc8e6e | [
"MIT"
] | null | null | null | defmodule PhoenixPostgresReactWeb.PageView do
use PhoenixPostgresReactWeb, :view
end
| 21.75 | 45 | 0.862069 |
7353328a56cb0de947d1687e13d35e1290e10262 | 468 | ex | Elixir | apps/plant_monitor_web/lib/plant_monitor_web/plugs/split_token_claims.ex | bartoszgorka/PlantMonitor | 23e18cd76c51bd8eee021ee98668926de885047b | [
"MIT"
] | 2 | 2019-01-25T21:21:56.000Z | 2021-02-24T08:18:51.000Z | apps/plant_monitor_web/lib/plant_monitor_web/plugs/split_token_claims.ex | bartoszgorka/PlantMonitor | 23e18cd76c51bd8eee021ee98668926de885047b | [
"MIT"
] | null | null | null | apps/plant_monitor_web/lib/plant_monitor_web/plugs/split_token_claims.ex | bartoszgorka/PlantMonitor | 23e18cd76c51bd8eee021ee98668926de885047b | [
"MIT"
] | null | null | null | defmodule PlantMonitorWeb.Plugs.SplitTokenClaims do
@moduledoc false
import Plug.Conn
def init(opts), do: opts
def call(conn, _params \\ %{}) do
conn
|> assign_user_id()
|> assign_scopes()
end
defp assign_user_id(conn) do
user_id = conn.assigns.claims.user_id
conn
|> assign(:user_id, user_id)
end
defp assign_scopes(conn) do
scopes = conn.assigns.claims.permissions
conn
|> assign(:scopes, scopes)
end
end
| 16.714286 | 51 | 0.668803 |
7353871ccfe1edefc9734c010455b78395144e57 | 18,665 | ex | Elixir | lib/aws/iot_analytics.ex | ahsandar/aws-elixir | 25de8b6c3a1401bde737cfc26b0679b14b058f23 | [
"Apache-2.0"
] | null | null | null | lib/aws/iot_analytics.ex | ahsandar/aws-elixir | 25de8b6c3a1401bde737cfc26b0679b14b058f23 | [
"Apache-2.0"
] | null | null | null | lib/aws/iot_analytics.ex | ahsandar/aws-elixir | 25de8b6c3a1401bde737cfc26b0679b14b058f23 | [
"Apache-2.0"
] | null | null | null | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.IoTAnalytics do
@moduledoc """
AWS IoT Analytics allows you to collect large amounts of device data,
process messages, and store them. You can then query the data and run
sophisticated analytics on it. AWS IoT Analytics enables advanced data
exploration through integration with Jupyter Notebooks and data
visualization through integration with Amazon QuickSight.
Traditional analytics and business intelligence tools are designed to
process structured data. IoT data often comes from devices that record
noisy processes (such as temperature, motion, or sound). As a result the
data from these devices can have significant gaps, corrupted messages, and
false readings that must be cleaned up before analysis can occur. Also, IoT
data is often only meaningful in the context of other data from external
sources.
AWS IoT Analytics automates the steps required to analyze data from IoT
devices. AWS IoT Analytics filters, transforms, and enriches IoT data
before storing it in a time-series data store for analysis. You can set up
the service to collect only the data you need from your devices, apply
mathematical transforms to process the data, and enrich the data with
device-specific metadata such as device type and location before storing
it. Then, you can analyze your data by running queries using the built-in
SQL query engine, or perform more complex analytics and machine learning
inference. AWS IoT Analytics includes pre-built models for common IoT use
cases so you can answer questions like which devices are about to fail or
which customers are at risk of abandoning their wearable devices.
"""
@doc """
Sends messages to a channel.
"""
def batch_put_message(client, input, options \\ []) do
path_ = "/messages/batch"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Cancels the reprocessing of data through the pipeline.
"""
def cancel_pipeline_reprocessing(client, pipeline_name, reprocessing_id, input, options \\ []) do
path_ = "/pipelines/#{URI.encode(pipeline_name)}/reprocessing/#{URI.encode(reprocessing_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Creates a channel. A channel collects data from an MQTT topic and archives
the raw, unprocessed messages before publishing the data to a pipeline.
"""
def create_channel(client, input, options \\ []) do
path_ = "/channels"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Creates a data set. A data set stores data retrieved from a data store by
applying a "queryAction" (a SQL query) or a "containerAction" (executing a
containerized application). This operation creates the skeleton of a data
set. The data set can be populated manually by calling
"CreateDatasetContent" or automatically according to a "trigger" you
specify.
"""
def create_dataset(client, input, options \\ []) do
path_ = "/datasets"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Creates the content of a data set by applying a "queryAction" (a SQL query)
or a "containerAction" (executing a containerized application).
"""
def create_dataset_content(client, dataset_name, input, options \\ []) do
path_ = "/datasets/#{URI.encode(dataset_name)}/content"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a data store, which is a repository for messages.
"""
def create_datastore(client, input, options \\ []) do
path_ = "/datastores"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Creates a pipeline. A pipeline consumes messages from a channel and allows
you to process the messages before storing them in a data store. You must
specify both a `channel` and a `datastore` activity and, optionally, as
many as 23 additional activities in the `pipelineActivities` array.
"""
def create_pipeline(client, input, options \\ []) do
path_ = "/pipelines"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Deletes the specified channel.
"""
def delete_channel(client, channel_name, input, options \\ []) do
path_ = "/channels/#{URI.encode(channel_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes the specified data set.
You do not have to delete the content of the data set before you perform
this operation.
"""
def delete_dataset(client, dataset_name, input, options \\ []) do
path_ = "/datasets/#{URI.encode(dataset_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes the content of the specified data set.
"""
def delete_dataset_content(client, dataset_name, input, options \\ []) do
path_ = "/datasets/#{URI.encode(dataset_name)}/content"
headers = []
{query_, input} =
[
{"versionId", "versionId"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes the specified data store.
"""
def delete_datastore(client, datastore_name, input, options \\ []) do
path_ = "/datastores/#{URI.encode(datastore_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes the specified pipeline.
"""
def delete_pipeline(client, pipeline_name, input, options \\ []) do
path_ = "/pipelines/#{URI.encode(pipeline_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Retrieves information about a channel.
"""
def describe_channel(client, channel_name, include_statistics \\ nil, options \\ []) do
path_ = "/channels/#{URI.encode(channel_name)}"
headers = []
query_ = []
query_ = if !is_nil(include_statistics) do
[{"includeStatistics", include_statistics} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves information about a data set.
"""
def describe_dataset(client, dataset_name, options \\ []) do
path_ = "/datasets/#{URI.encode(dataset_name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves information about a data store.
"""
def describe_datastore(client, datastore_name, include_statistics \\ nil, options \\ []) do
path_ = "/datastores/#{URI.encode(datastore_name)}"
headers = []
query_ = []
query_ = if !is_nil(include_statistics) do
[{"includeStatistics", include_statistics} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves the current settings of the AWS IoT Analytics logging options.
"""
def describe_logging_options(client, options \\ []) do
path_ = "/logging"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves information about a pipeline.
"""
def describe_pipeline(client, pipeline_name, options \\ []) do
path_ = "/pipelines/#{URI.encode(pipeline_name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves the contents of a data set as pre-signed URIs.
"""
def get_dataset_content(client, dataset_name, version_id \\ nil, options \\ []) do
path_ = "/datasets/#{URI.encode(dataset_name)}/content"
headers = []
query_ = []
query_ = if !is_nil(version_id) do
[{"versionId", version_id} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves a list of channels.
"""
def list_channels(client, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/channels"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists information about data set contents that have been created.
"""
def list_dataset_contents(client, dataset_name, max_results \\ nil, next_token \\ nil, scheduled_before \\ nil, scheduled_on_or_after \\ nil, options \\ []) do
path_ = "/datasets/#{URI.encode(dataset_name)}/contents"
headers = []
query_ = []
query_ = if !is_nil(scheduled_on_or_after) do
[{"scheduledOnOrAfter", scheduled_on_or_after} | query_]
else
query_
end
query_ = if !is_nil(scheduled_before) do
[{"scheduledBefore", scheduled_before} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves information about data sets.
"""
def list_datasets(client, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/datasets"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves a list of data stores.
"""
def list_datastores(client, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/datastores"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves a list of pipelines.
"""
def list_pipelines(client, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/pipelines"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the tags (metadata) which you have assigned to the resource.
"""
def list_tags_for_resource(client, resource_arn, options \\ []) do
path_ = "/tags"
headers = []
query_ = []
query_ = if !is_nil(resource_arn) do
[{"resourceArn", resource_arn} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Sets or updates the AWS IoT Analytics logging options.
Note that if you update the value of any `loggingOptions` field, it takes
up to one minute for the change to take effect. Also, if you change the
policy attached to the role you specified in the roleArn field (for
example, to correct an invalid policy) it takes up to 5 minutes for that
change to take effect.
"""
def put_logging_options(client, input, options \\ []) do
path_ = "/logging"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Simulates the results of running a pipeline activity on a message payload.
"""
def run_pipeline_activity(client, input, options \\ []) do
path_ = "/pipelineactivities/run"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Retrieves a sample of messages from the specified channel ingested during
the specified timeframe. Up to 10 messages can be retrieved.
"""
def sample_channel_data(client, channel_name, end_time \\ nil, max_messages \\ nil, start_time \\ nil, options \\ []) do
path_ = "/channels/#{URI.encode(channel_name)}/sample"
headers = []
query_ = []
query_ = if !is_nil(start_time) do
[{"startTime", start_time} | query_]
else
query_
end
query_ = if !is_nil(max_messages) do
[{"maxMessages", max_messages} | query_]
else
query_
end
query_ = if !is_nil(end_time) do
[{"endTime", end_time} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Starts the reprocessing of raw message data through the pipeline.
"""
def start_pipeline_reprocessing(client, pipeline_name, input, options \\ []) do
path_ = "/pipelines/#{URI.encode(pipeline_name)}/reprocessing"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Adds to or modifies the tags of the given resource. Tags are metadata which
can be used to manage a resource.
"""
def tag_resource(client, input, options \\ []) do
path_ = "/tags"
headers = []
{query_, input} =
[
{"resourceArn", "resourceArn"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, 204)
end
@doc """
Removes the given tags (metadata) from the resource.
"""
def untag_resource(client, input, options \\ []) do
path_ = "/tags"
headers = []
{query_, input} =
[
{"resourceArn", "resourceArn"},
{"tagKeys", "tagKeys"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Updates the settings of a channel.
"""
def update_channel(client, channel_name, input, options \\ []) do
path_ = "/channels/#{URI.encode(channel_name)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Updates the settings of a data set.
"""
def update_dataset(client, dataset_name, input, options \\ []) do
path_ = "/datasets/#{URI.encode(dataset_name)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Updates the settings of a data store.
"""
def update_datastore(client, datastore_name, input, options \\ []) do
path_ = "/datastores/#{URI.encode(datastore_name)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Updates the settings of a pipeline. You must specify both a `channel` and a
`datastore` activity and, optionally, as many as 23 additional activities
in the `pipelineActivities` array.
"""
def update_pipeline(client, pipeline_name, input, options \\ []) do
path_ = "/pipelines/#{URI.encode(pipeline_name)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, Poison.Parser.t(), Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "iotanalytics"}
host = build_host("iotanalytics", client)
url = host
|> build_url(path, client)
|> add_query(query)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode_payload(input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(method, url, payload, headers, options, success_status_code)
end
defp perform_request(method, url, payload, headers, options, nil) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, response}
{:ok, %HTTPoison.Response{status_code: status_code, body: body} = response}
when status_code == 200 or status_code == 202 or status_code == 204 ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp perform_request(method, url, payload, headers, options, success_status_code) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: ^success_status_code, body: ""} = response} ->
{:ok, %{}, response}
{:ok, %HTTPoison.Response{status_code: ^success_status_code, body: body} = response} ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, []) do
url
end
defp add_query(url, query) do
querystring = AWS.Util.encode_query(query)
"#{url}?#{querystring}"
end
defp encode_payload(input) do
if input != nil, do: Poison.Encoder.encode(input, %{}), else: ""
end
end
| 32.125645 | 161 | 0.65513 |
73538db8b6772d820a1a0b01a3b0ea6ae7558bf9 | 2,466 | ex | Elixir | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/callout_status_row.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/callout_status_row.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/callout_status_row.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AdExchangeBuyer.V2beta1.Model.CalloutStatusRow do
@moduledoc """
The number of impressions with the specified dimension values where the corresponding bid request or bid response was not successful, as described by the specified callout status.
## Attributes
* `calloutStatusId` (*type:* `integer()`, *default:* `nil`) - The ID of the callout status. See [callout-status-codes](https://developers.google.com/authorized-buyers/rtb/downloads/callout-status-codes).
* `impressionCount` (*type:* `GoogleApi.AdExchangeBuyer.V2beta1.Model.MetricValue.t`, *default:* `nil`) - The number of impressions for which there was a bid request or bid response with the specified callout status.
* `rowDimensions` (*type:* `GoogleApi.AdExchangeBuyer.V2beta1.Model.RowDimensions.t`, *default:* `nil`) - The values of all dimensions associated with metric values in this row.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:calloutStatusId => integer(),
:impressionCount => GoogleApi.AdExchangeBuyer.V2beta1.Model.MetricValue.t(),
:rowDimensions => GoogleApi.AdExchangeBuyer.V2beta1.Model.RowDimensions.t()
}
field(:calloutStatusId)
field(:impressionCount, as: GoogleApi.AdExchangeBuyer.V2beta1.Model.MetricValue)
field(:rowDimensions, as: GoogleApi.AdExchangeBuyer.V2beta1.Model.RowDimensions)
end
defimpl Poison.Decoder, for: GoogleApi.AdExchangeBuyer.V2beta1.Model.CalloutStatusRow do
def decode(value, options) do
GoogleApi.AdExchangeBuyer.V2beta1.Model.CalloutStatusRow.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AdExchangeBuyer.V2beta1.Model.CalloutStatusRow do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 46.528302 | 220 | 0.761152 |
7353a2f010854b7750cfcc850c503b225da200b0 | 16,632 | ex | Elixir | lib/new_relic/metric/metric_data.ex | binaryseed/elixir_agent | 25f1242c10516618d9ea3a9b18712e5bc41efad6 | [
"Apache-2.0"
] | null | null | null | lib/new_relic/metric/metric_data.ex | binaryseed/elixir_agent | 25f1242c10516618d9ea3a9b18712e5bc41efad6 | [
"Apache-2.0"
] | null | null | null | lib/new_relic/metric/metric_data.ex | binaryseed/elixir_agent | 25f1242c10516618d9ea3a9b18712e5bc41efad6 | [
"Apache-2.0"
] | null | null | null | defmodule NewRelic.Metric.MetricData do
# Heper functions for generating Metrics with the correct timeslice values
@moduledoc false
alias NewRelic.Metric
def transform({:custom, name}, count: count, value: value),
do: %Metric{
name: join(["Custom", name]),
call_count: count,
total_call_time: value,
min_call_time: value,
max_call_time: value
}
def transform(:http_dispatcher, duration_s: duration_s),
do: %Metric{
name: :HttpDispatcher,
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
}
def transform({:transaction, name},
type: :Web,
duration_s: duration_s,
total_time_s: total_time_s
),
do: [
%Metric{
name: "WebTransaction",
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
},
%Metric{
name: join(["WebTransaction", name]),
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
},
%Metric{
name: "WebTransactionTotalTime",
call_count: 1,
total_call_time: total_time_s,
total_exclusive_time: total_time_s,
min_call_time: total_time_s,
max_call_time: total_time_s
},
# Transaction breakdown doesn't handle Elixir's level of concurrency,
# sending just call count improves things
%Metric{
name: join(["WebTransactionTotalTime", name]),
call_count: 1
}
]
def transform({:transaction, name},
type: :Other,
duration_s: duration_s,
total_time_s: total_time_s
),
do: [
%Metric{
name: "OtherTransaction/all",
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
},
%Metric{
name: join(["OtherTransaction", name]),
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
},
%Metric{
name: "OtherTransactionTotalTime",
call_count: 1,
total_call_time: total_time_s,
total_exclusive_time: total_time_s,
min_call_time: total_time_s,
max_call_time: total_time_s
},
# Transaction breakdown doesn't handle Elixir's level of concurrency,
# sending just call count improves things
%Metric{
name: join(["OtherTransactionTotalTime", name]),
call_count: 1
}
]
def transform(
{:caller, type, account_id, app_id, transport_type},
duration_s: duration_s
),
do: %Metric{
name: join(["DurationByCaller", type, account_id, app_id, transport_type, "all"]),
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
}
def transform({:datastore, datastore, table, operation},
type: type,
scope: scope,
duration_s: duration_s
),
do: [
%Metric{
name: join(["Datastore/statement", datastore, table, operation]),
scope: join(["#{type}Transaction", scope]),
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
},
%Metric{
name: join(["Datastore/operation", datastore, operation]),
scope: join(["#{type}Transaction", scope]),
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
},
%Metric{
name: join(["Datastore", datastore, "all#{type}"]),
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
},
%Metric{
name: "Datastore/all#{type}",
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
}
]
def transform({:datastore, datastore, table, operation},
duration_s: duration_s
),
do: [
%Metric{
name: join(["Datastore/statement", datastore, table, operation]),
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
},
%Metric{
name: join(["Datastore/operation", datastore, operation]),
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
},
%Metric{
name: join(["Datastore", datastore, "all"]),
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
},
%Metric{
name: join(["Datastore", "all"]),
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
}
]
def transform({:datastore, datastore, operation},
type: type,
scope: scope,
duration_s: duration_s
),
do: [
%Metric{
name: join(["Datastore/operation", datastore, operation]),
scope: join(["#{type}Transaction", scope]),
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
},
%Metric{
name: join(["Datastore", datastore, "all#{type}"]),
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
},
%Metric{
name: join(["Datastore", "all#{type}"]),
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
}
]
def transform({:datastore, datastore, operation},
duration_s: duration_s
),
do: [
%Metric{
name: join(["Datastore/operation", datastore, operation]),
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
},
%Metric{
name: join(["Datastore", datastore, "all"]),
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
},
%Metric{
name: join(["Datastore", "all"]),
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
}
]
def transform({:external, url, component, method}, duration_s: duration_s) do
host = URI.parse(url).host
method = method |> to_string() |> String.upcase()
[
%Metric{
name: :"External/all",
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
},
%Metric{
name: join(["External", host, "all"]),
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
},
%Metric{
name: join(["External", host, component, method]),
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
}
]
end
def transform({:external, url, component, method},
type: type,
scope: scope,
duration_s: duration_s
) do
host = URI.parse(url).host
method = method |> to_string() |> String.upcase()
%Metric{
name: join(["External", host, component, method]),
scope: join(["#{type}Transaction", scope]),
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
}
end
def transform({:external, name}, duration_s: duration_s),
do: [
%Metric{
name: :"External/all",
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
},
%Metric{
name: join(["External", name, "all"]),
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
}
]
def transform({:external, name}, type: type, scope: scope, duration_s: duration_s),
do: %Metric{
name: join(["External", name]),
scope: join(["#{type}Transaction", scope]),
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
}
def transform(:external, type: type, duration_s: duration_s),
do: %Metric{
name: "External/all#{type}",
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
}
def transform({:function, function_name},
duration_s: duration_s,
exclusive_time_s: exclusive_time_s
),
do: %Metric{
name: join(["Function", function_name]),
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: exclusive_time_s,
min_call_time: duration_s,
max_call_time: duration_s
}
def transform({:function, function_name},
type: type,
scope: scope,
duration_s: duration_s,
exclusive_time_s: exclusive_time_s
),
do: %Metric{
name: join(["Function", function_name]),
scope: join(["#{type}Transaction", scope]),
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: exclusive_time_s,
min_call_time: duration_s,
max_call_time: duration_s
}
def transform(:error, type: type, error_count: error_count),
do: [
%Metric{
name: "Errors/all#{type}",
call_count: error_count
},
%Metric{
name: :"Errors/all",
call_count: error_count
}
]
def transform(:error, error_count: error_count),
do: %Metric{
name: :"Errors/all",
call_count: error_count
}
def transform(:memory, mb: memory_mb),
do: %Metric{
name: :"Memory/Physical",
call_count: 1,
total_call_time: memory_mb,
min_call_time: memory_mb,
max_call_time: memory_mb
}
def transform(:cpu, utilization: utilization),
do: %Metric{
name: :"CPU/User Time",
call_count: 1,
total_call_time: utilization,
min_call_time: utilization,
max_call_time: utilization
}
def transform(:apdex, apdex: :satisfying, threshold: t),
do: %Metric{name: :Apdex, call_count: 1, min_call_time: t, max_call_time: t}
def transform(:apdex, apdex: :tolerating, threshold: t),
do: %Metric{name: :Apdex, total_call_time: 1, min_call_time: t, max_call_time: t}
def transform(:apdex, apdex: :frustrating, threshold: t),
do: %Metric{name: :Apdex, total_exclusive_time: 1, min_call_time: t, max_call_time: t}
def transform({:supportability, :error_event}, error_count: error_count),
do: [
%Metric{
name: :"Supportability/Events/TransactionError/Sent",
call_count: error_count
},
%Metric{
name: :"Supportability/Events/TransactionError/Seen",
call_count: error_count
}
]
def transform({:supportability, :infinite_tracing}, spans_seen: spans_seen),
do: [
%Metric{
name: :"Supportability/InfiniteTracing/Span/Seen",
call_count: spans_seen
}
]
def transform({:supportability, :infinite_tracing}, harvest_size: harvest_size),
do: [
%Metric{
name: :"Supportability/InfiniteTracing/Span/Sent",
call_count: harvest_size
},
%Metric{
name: :"Supportability/Elixir/TelemetrySdk/Harvest/Span",
call_count: 1
},
%Metric{
name: :"Supportability/Harvest",
call_count: 1
}
]
def transform({:supportability, harvester},
events_seen: events_seen,
reservoir_size: reservoir_size
),
do: [
%Metric{
name: join(["Supportability/Elixir/Collector/HarvestSeen", harvester]),
call_count: 1,
total_call_time: events_seen
},
%Metric{
name: join(["Supportability/EventHarvest", harvester, "HarvestLimit"]),
call_count: 1,
total_call_time: reservoir_size
}
]
def transform({:supportability, harvester}, harvest_size: harvest_size),
do: [
%Metric{
name: join(["Supportability/Elixir/Collector/HarvestSize", harvester]),
call_count: 1,
total_call_time: harvest_size
},
%Metric{
name: :"Supportability/Harvest",
call_count: 1
}
]
def transform({:supportability, :agent, metric}, value: value),
do: %Metric{
name: join(["Supportability/ElixirAgent", metric]),
call_count: 1,
total_call_time: value,
min_call_time: value,
max_call_time: value
}
def transform({:supportability, :collector}, status: status),
do: %Metric{
name: join(["Supportability/Agent/Collector/HTTPError", status]),
call_count: 1
}
def transform(:supportability, [:trace_context, :accept, :success]),
do: %Metric{
name: :"Supportability/TraceContext/Accept/Success",
call_count: 1
}
def transform(:supportability, [:trace_context, :accept, :exception]),
do: %Metric{
name: :"Supportability/TraceContext/Accept/Exception",
call_count: 1
}
def transform(:supportability, [:trace_context, :tracestate, :non_new_relic]),
do: %Metric{
name: :"Supportability/TraceContext/TraceState/NoNrEntry",
call_count: 1
}
def transform(:supportability, [:trace_context, :tracestate, :invalid]),
do: %Metric{
name: :"Supportability/TraceContext/TraceState/Parse/Exception",
call_count: 1
}
def transform(:supportability, [:trace_context, :traceparent, :invalid]),
do: %Metric{
name: :"Supportability/TraceContext/TraceParent/Parse/Exception",
call_count: 1
}
def transform(:supportability, [:dt, :accept, :success]),
do: %Metric{
name: :"Supportability/DistributedTrace/AcceptPayload/Success",
call_count: 1
}
def transform(:supportability, [:dt, :accept, :parse_error]),
do: %Metric{
name: :"Supportability/DistributedTrace/AcceptPayload/ParseException",
call_count: 1
}
def transform(:supportability, [:transaction, :missing_attributes]),
do: %Metric{
name: :"Supportability/Transaction/MissingAttributes",
call_count: 1
}
def transform(:queue_time, duration_s: duration_s),
do: %Metric{
name: "WebFrontend/QueueTime",
call_count: 1,
total_call_time: duration_s,
total_exclusive_time: duration_s,
min_call_time: duration_s,
max_call_time: duration_s
}
defp join(segments), do: NewRelic.Util.metric_join(segments)
end
| 29.385159 | 90 | 0.603355 |
7353cb6e0627df31f41c8ae4f54130ff2f8e5c88 | 1,185 | exs | Elixir | exercism/elixir/accumulate/accumulate_test.exs | GimliLongBow/exercises | e06517eacccd37a889c5d68a702de7ffb7f4bf37 | [
"MIT"
] | 2 | 2017-05-19T18:31:38.000Z | 2017-05-19T18:31:41.000Z | exercism/elixir/accumulate/accumulate_test.exs | GimliLongBow/exercises | e06517eacccd37a889c5d68a702de7ffb7f4bf37 | [
"MIT"
] | null | null | null | exercism/elixir/accumulate/accumulate_test.exs | GimliLongBow/exercises | e06517eacccd37a889c5d68a702de7ffb7f4bf37 | [
"MIT"
] | null | null | null | if !System.get_env("EXERCISM_TEST_EXAMPLES") do
Code.load_file("accumulate.exs", __DIR__)
end
ExUnit.start
ExUnit.configure exclude: :pending, trace: true
defmodule AccumulateTest do
use ExUnit.Case
test "accumulate empty list" do
assert Accumulate.accumulate([], fn(n) -> n * n end) == []
end
# @tag :pending
test "accumulate square numbers" do
assert Accumulate.accumulate([1, 2, 3], fn(n) -> n * n end) == [1, 4, 9]
end
# @tag :pending
test "accumulate upcased strings" do
fun = fn(w) -> String.upcase(w) end
assert Accumulate.accumulate(["hello", "world"], fun) == ["HELLO", "WORLD"]
end
# @tag :pending
test "accumulate reversed strings" do
fun = fn(w) -> String.reverse(w) end
words = ~w(the quick brown fox etc)
expected = ["eht", "kciuq", "nworb", "xof", "cte"]
assert Accumulate.accumulate(words, fun) == expected
end
# @tag :pending
test "accumulate recursively" do
chars = ~w(a b c)
nums = ~w(1 2 3)
fun = fn(c) -> for num <- nums, do: c <> num end
expected = [["a1", "a2", "a3"], ["b1", "b2", "b3"], ["c1", "c2", "c3"]]
assert Accumulate.accumulate(chars, fun) == expected
end
end
| 27.55814 | 79 | 0.619409 |
7353ee1fc387e78d06f6345cebb210da82ec59ac | 1,767 | exs | Elixir | apps/nerves_hub_www/config/release.exs | chrisdambrosio/nerves_hub_web | 91b6b27bb54ef61864a8f77ac91a8a24451b382d | [
"Apache-2.0"
] | 111 | 2018-07-25T01:07:51.000Z | 2022-01-25T17:03:01.000Z | apps/nerves_hub_www/config/release.exs | chrisdambrosio/nerves_hub_web | 91b6b27bb54ef61864a8f77ac91a8a24451b382d | [
"Apache-2.0"
] | 361 | 2018-07-22T12:53:00.000Z | 2022-03-31T18:50:34.000Z | apps/nerves_hub_www/config/release.exs | chrisdambrosio/nerves_hub_web | 91b6b27bb54ef61864a8f77ac91a8a24451b382d | [
"Apache-2.0"
] | 54 | 2018-08-26T02:58:04.000Z | 2022-03-09T10:12:19.000Z | import Config
logger_level = System.get_env("LOG_LEVEL", "warn") |> String.to_atom()
config :logger, level: logger_level
host = System.fetch_env!("HOST")
port = 80
sync_nodes_optional =
case System.fetch_env("SYNC_NODES_OPTIONAL") do
{:ok, sync_nodes_optional} ->
sync_nodes_optional
|> String.trim()
|> String.split(" ")
|> Enum.map(&String.to_atom/1)
:error ->
[]
end
config :kernel,
sync_nodes_optional: sync_nodes_optional,
sync_nodes_timeout: 5000,
inet_dist_listen_min: 9100,
inet_dist_listen_max: 9155
if rollbar_access_token = System.get_env("ROLLBAR_ACCESS_TOKEN") do
config :rollbax, access_token: rollbar_access_token
else
config :rollbax, enabled: false
end
config :nerves_hub_web_core, NervesHubWebCore.Firmwares.Upload.S3,
bucket: System.fetch_env!("S3_BUCKET_NAME")
config :nerves_hub_web_core, NervesHubWebCore.Workers.FirmwaresTransferS3Ingress,
bucket: System.fetch_env!("S3_LOG_BUCKET_NAME")
config :ex_aws, region: System.fetch_env!("AWS_REGION")
config :nerves_hub_www, NervesHubWWWWeb.Endpoint,
secret_key_base: System.fetch_env!("SECRET_KEY_BASE"),
live_view: [signing_salt: System.fetch_env!("LIVE_VIEW_SIGNING_SALT")]
config :nerves_hub_web_core, NervesHubWebCore.Mailer,
adapter: Bamboo.SMTPAdapter,
server: System.fetch_env!("SES_SERVER"),
port: System.fetch_env!("SES_PORT"),
username: System.fetch_env!("SMTP_USERNAME"),
password: System.fetch_env!("SMTP_PASSWORD")
config :nerves_hub_web_core,
host: host,
port: port,
from_email: System.get_env("FROM_EMAIL", "[email protected]"),
allow_signups?: System.get_env("ALLOW_SIGNUPS", "true") |> String.to_atom()
config :nerves_hub_www, NervesHubWWWWeb.Endpoint, url: [host: host, port: port]
| 29.45 | 81 | 0.753254 |
7353faebcd60e47ca2607e577bca987a0d85f8b1 | 968 | ex | Elixir | lib/util.ex | jonasrichard/iris | eb4547ced7f7ff9305a4edfa1c32e8d45fa2aa00 | [
"Apache-2.0"
] | 1 | 2017-03-31T09:26:21.000Z | 2017-03-31T09:26:21.000Z | lib/util.ex | jonasrichard/iris | eb4547ced7f7ff9305a4edfa1c32e8d45fa2aa00 | [
"Apache-2.0"
] | 1 | 2017-05-03T06:30:09.000Z | 2017-05-03T06:30:09.000Z | lib/util.ex | jonasrichard/iris | eb4547ced7f7ff9305a4edfa1c32e8d45fa2aa00 | [
"Apache-2.0"
] | null | null | null | defmodule Iris.Util do
def now_to_utc() do
now_to_utc(:os.timestamp())
end
def now_to_utc(now) do
{{year, month, day}, {hour, minute, second}} = :calendar.now_to_datetime(now)
[
pad(year),
"-",
pad(month),
"-",
pad(day),
"T",
pad(hour),
":",
pad(minute),
":",
pad(second)
]
|> Enum.join()
end
def uuid_to_partition(uuid) do
uuid |> String.slice(0, 8) |> String.to_integer(16) |> rem(50)
end
def struct_to_json(struct) do
struct
|> Map.from_struct()
|> Map.put(:_struct_, struct.__struct__)
|> Jason.encode!()
end
def json_to_struct(json) do
map = Jason.decode!(json, keys: :atoms)
type = map._struct_ |> String.to_atom()
Map.put(map, :__struct__, type) |> Map.delete(:_struct_)
end
defp pad(i) when i > 9 do
Integer.to_string(i)
end
defp pad(i) do
Integer.to_string(i) |> String.pad_leading(2, "0")
end
end
| 19.36 | 81 | 0.573347 |
73541d67297a4f35890949acc6387201a34f9c6a | 1,764 | ex | Elixir | lib/logger/lib/logger/watcher.ex | bsmr-erlang/elixir | 0e72d4839cda97edce75ca0c537555ce4ead7a6a | [
"Apache-2.0"
] | null | null | null | lib/logger/lib/logger/watcher.ex | bsmr-erlang/elixir | 0e72d4839cda97edce75ca0c537555ce4ead7a6a | [
"Apache-2.0"
] | null | null | null | lib/logger/lib/logger/watcher.ex | bsmr-erlang/elixir | 0e72d4839cda97edce75ca0c537555ce4ead7a6a | [
"Apache-2.0"
] | null | null | null | defmodule Logger.Watcher do
@moduledoc false
require Logger
use GenServer
@doc """
Starts a watcher server.
This is useful when there is a need to start a handler
outside of the handler supervision tree.
"""
def start_link(triplet) do
GenServer.start_link(__MODULE__, triplet)
end
## Callbacks
@doc false
def init({mod, handler, args}) do
case :gen_event.delete_handler(mod, handler, :ok) do
{:error, :module_not_found} ->
res = :gen_event.add_sup_handler(mod, handler, args)
do_init(res, mod, handler)
_ ->
init({mod, handler, args})
end
end
defp do_init(res, mod, handler) do
case res do
:ok ->
{:ok, {mod, handler}}
{:error, :ignore} ->
# Can't return :ignore as a transient child under a one_for_one.
# Instead return ok and then immediately exit normally - using a fake
# message.
send(self(), {:gen_event_EXIT, handler, :normal})
{:ok, {mod, handler}}
{:error, reason} ->
{:stop, reason}
end
end
@doc false
def handle_info({:gen_event_EXIT, handler, reason}, {_, handler} = state)
when reason in [:normal, :shutdown] do
{:stop, reason, state}
end
def handle_info({:gen_event_EXIT, handler, reason}, {mod, handler} = state) do
message = [
":gen_event handler ",
inspect(handler),
" installed at ",
inspect(mod),
?\n,
"** (exit) ",
format_exit(reason)
]
_ = Logger.error(message)
{:stop, reason, state}
end
def handle_info(_msg, state) do
{:noreply, state}
end
defp format_exit({:EXIT, reason}), do: Exception.format_exit(reason)
defp format_exit(reason), do: Exception.format_exit(reason)
end
| 23.210526 | 80 | 0.615646 |
735445661704ea789758d33ddfecb2ef0487e562 | 5,428 | ex | Elixir | clients/tpu/lib/google_api/tpu/v1/model/node.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/tpu/lib/google_api/tpu/v1/model/node.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/tpu/lib/google_api/tpu/v1/model/node.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.TPU.V1.Model.Node do
@moduledoc """
A TPU instance.
## Attributes
* `acceleratorType` (*type:* `String.t`, *default:* `nil`) - The type of hardware accelerators associated with this node.
Required.
* `cidrBlock` (*type:* `String.t`, *default:* `nil`) - The CIDR block that the TPU node will use when selecting an IP address.
This CIDR block must be a /29 block; the Compute Engine networks API
forbids a smaller block, and using a larger block would be wasteful (a
node can only consume one IP address). Errors will occur if the CIDR block
has already been used for a currently existing TPU node, the CIDR block
conflicts with any subnetworks in the user's provided network, or the
provided network is peered with another network that is using that CIDR
block.
Required.
* `createTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. The time when the node was created.
* `description` (*type:* `String.t`, *default:* `nil`) - The user-supplied description of the TPU. Maximum of 512 characters.
* `health` (*type:* `String.t`, *default:* `nil`) - The health status of the TPU node.
* `healthDescription` (*type:* `String.t`, *default:* `nil`) - Output only. If this field is populated, it contains a description of why the TPU Node
is unhealthy.
* `ipAddress` (*type:* `String.t`, *default:* `nil`) - Output only. DEPRECATED! Use network_endpoints instead.
The network address for the TPU Node as visible to Compute Engine
instances.
* `labels` (*type:* `map()`, *default:* `nil`) - Resource labels to represent user-provided metadata.
* `name` (*type:* `String.t`, *default:* `nil`) - Output only. The immutable name of the TPU
* `network` (*type:* `String.t`, *default:* `nil`) - The name of a network they wish to peer the TPU node to. It must be a
preexisting Compute Engine network inside of the project on which this API
has been activated. If none is provided, "default" will be used.
* `networkEndpoints` (*type:* `list(GoogleApi.TPU.V1.Model.NetworkEndpoint.t)`, *default:* `nil`) - Output only. The network endpoints where TPU workers can be accessed and
sent work. It is recommended that Tensorflow clients of the node reach out
to the 0th entry in this map first.
* `port` (*type:* `String.t`, *default:* `nil`) - Output only. DEPRECATED! Use network_endpoints instead.
The network port for the TPU Node as visible to Compute Engine instances.
* `schedulingConfig` (*type:* `GoogleApi.TPU.V1.Model.SchedulingConfig.t`, *default:* `nil`) -
* `serviceAccount` (*type:* `String.t`, *default:* `nil`) - Output only. The service account used to run the tensor flow services within the node.
To share resources, including Google Cloud Storage data, with the
Tensorflow job running in the Node, this account must have permissions to
that data.
* `state` (*type:* `String.t`, *default:* `nil`) - Output only. The current state for the TPU Node.
* `tensorflowVersion` (*type:* `String.t`, *default:* `nil`) - The version of Tensorflow running in the Node.
Required.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:acceleratorType => String.t(),
:cidrBlock => String.t(),
:createTime => DateTime.t(),
:description => String.t(),
:health => String.t(),
:healthDescription => String.t(),
:ipAddress => String.t(),
:labels => map(),
:name => String.t(),
:network => String.t(),
:networkEndpoints => list(GoogleApi.TPU.V1.Model.NetworkEndpoint.t()),
:port => String.t(),
:schedulingConfig => GoogleApi.TPU.V1.Model.SchedulingConfig.t(),
:serviceAccount => String.t(),
:state => String.t(),
:tensorflowVersion => String.t()
}
field(:acceleratorType)
field(:cidrBlock)
field(:createTime, as: DateTime)
field(:description)
field(:health)
field(:healthDescription)
field(:ipAddress)
field(:labels, type: :map)
field(:name)
field(:network)
field(:networkEndpoints, as: GoogleApi.TPU.V1.Model.NetworkEndpoint, type: :list)
field(:port)
field(:schedulingConfig, as: GoogleApi.TPU.V1.Model.SchedulingConfig)
field(:serviceAccount)
field(:state)
field(:tensorflowVersion)
end
defimpl Poison.Decoder, for: GoogleApi.TPU.V1.Model.Node do
def decode(value, options) do
GoogleApi.TPU.V1.Model.Node.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.TPU.V1.Model.Node do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 48.035398 | 176 | 0.677966 |
73548a8a189527d067f079077380611f2ffbe1e9 | 116 | exs | Elixir | test/speak_ex_test.exs | fossabot/speak_ex | 71f7c482ce58d607c5cdc7e5a09bc36abed5ea3d | [
"MIT"
] | 47 | 2015-10-02T19:39:21.000Z | 2022-03-13T17:39:19.000Z | test/speak_ex_test.exs | nicolasva/speak_ex | db9567f44f16920fad80059b8534de7539bb92ab | [
"MIT"
] | 2 | 2017-03-21T01:52:17.000Z | 2017-10-18T08:01:44.000Z | test/speak_ex_test.exs | nicolasva/speak_ex | db9567f44f16920fad80059b8534de7539bb92ab | [
"MIT"
] | 5 | 2017-10-17T11:41:44.000Z | 2021-01-20T19:29:26.000Z | defmodule SpeakExTest do
use ExUnit.Case
doctest SpeakEx
test "the truth" do
assert 1 + 1 == 2
end
end
| 12.888889 | 24 | 0.672414 |
7354978d21b1f11581e0c72ffbd92658293f1939 | 1,005 | exs | Elixir | mix.exs | liberza/saxy | e15e48fca70c948568b28b9178f8d2bb5ef01e1f | [
"MIT"
] | null | null | null | mix.exs | liberza/saxy | e15e48fca70c948568b28b9178f8d2bb5ef01e1f | [
"MIT"
] | null | null | null | mix.exs | liberza/saxy | e15e48fca70c948568b28b9178f8d2bb5ef01e1f | [
"MIT"
] | null | null | null | defmodule Saxy.MixProject do
use Mix.Project
@version "0.9.1"
def project() do
[
app: :saxy,
version: @version,
elixir: "~> 1.3",
consolidate_protocols: Mix.env() != :test,
description: description(),
deps: deps(),
package: package(),
name: "Saxy",
docs: docs()
]
end
def application(), do: []
defp description() do
"Saxy is an XML parser and encoder in Elixir that focuses on speed and standard compliance."
end
defp package() do
[
maintainers: ["Cẩm Huỳnh"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/qcam/saxy"}
]
end
defp deps() do
[
{:ex_doc, "~> 0.16", only: :dev, runtime: false},
{:stream_data, "~> 0.4.2", only: :test}
]
end
defp docs() do
[
main: "Saxy",
extras: [
"guides/getting-started-with-sax.md"
],
source_ref: "v#{@version}",
source_url: "https://github.com/qcam/saxy"
]
end
end
| 19.326923 | 96 | 0.541294 |
73549fb3a89eb541a1fcc28dd67259f2868f6309 | 2,164 | exs | Elixir | config/prod.exs | blvdgroup/crater | 78d03de2eac73d90148df6c5d2d03e99b9b5ccb7 | [
"Apache-2.0"
] | 1 | 2018-03-13T08:15:50.000Z | 2018-03-13T08:15:50.000Z | config/prod.exs | blvdgroup/crater | 78d03de2eac73d90148df6c5d2d03e99b9b5ccb7 | [
"Apache-2.0"
] | 1 | 2018-03-17T15:45:26.000Z | 2018-03-17T15:45:26.000Z | config/prod.exs | blvdgroup/crater | 78d03de2eac73d90148df6c5d2d03e99b9b5ccb7 | [
"Apache-2.0"
] | 1 | 2017-08-30T16:13:09.000Z | 2017-08-30T16:13:09.000Z | use Mix.Config
# For production, we often load configuration from external
# sources, such as your system environment. For this reason,
# you won't find the :http configuration below, but set inside
# CraterWeb.Endpoint.init/2 when load_from_system_env is
# true. Any dynamic configuration should be done there.
#
# Don't forget to configure the url host to something meaningful,
# Phoenix uses this information when generating URLs.
#
# Finally, we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the mix phx.digest task
# which you typically run after static files are built.
config :crater, CraterWeb.Endpoint,
load_from_system_env: true,
url: [host: "example.com", port: 80],
cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :crater, CraterWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [:inet6,
# port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
#
# Where those two env variables return an absolute path to
# the key and cert in disk or a relative path inside priv,
# for example "priv/ssl/server.key".
#
# We also recommend setting `force_ssl`, ensuring no data is
# ever sent via http, always redirecting to https:
#
# config :crater, CraterWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :crater, CraterWeb.Endpoint, server: true
#
# Finally import the config/prod.secret.exs
# which should be versioned separately.
import_config "prod.secret.exs"
| 33.292308 | 67 | 0.720425 |
7354daf77d06a8731b5ea8eaf29cc9292f0c9f69 | 287 | ex | Elixir | lib/phoenix_sample_web/views/layout_view.ex | gotoeveryone/phoenix_sample | 8e53f1d1a0c9bf37e474755c60d06f3cb578ae7f | [
"MIT"
] | null | null | null | lib/phoenix_sample_web/views/layout_view.ex | gotoeveryone/phoenix_sample | 8e53f1d1a0c9bf37e474755c60d06f3cb578ae7f | [
"MIT"
] | null | null | null | lib/phoenix_sample_web/views/layout_view.ex | gotoeveryone/phoenix_sample | 8e53f1d1a0c9bf37e474755c60d06f3cb578ae7f | [
"MIT"
] | null | null | null | defmodule PhoenixSampleWeb.LayoutView do
use PhoenixSampleWeb, :view
# Phoenix LiveDashboard is available only in development by default,
# so we instruct Elixir to not warn if the dashboard route is missing.
@compile {:no_warn_undefined, {Routes, :live_dashboard_path, 2}}
end
| 35.875 | 72 | 0.783972 |
7354fe688e7c70a34f15978554bf2bbb7d38893b | 28,194 | ex | Elixir | lib/phoenix_live_view/test/client_proxy.ex | davydog187/phoenix_live_view | c15d2811a7e91f71f448f4d9d1b9a520e9d0bf91 | [
"MIT"
] | null | null | null | lib/phoenix_live_view/test/client_proxy.ex | davydog187/phoenix_live_view | c15d2811a7e91f71f448f4d9d1b9a520e9d0bf91 | [
"MIT"
] | null | null | null | lib/phoenix_live_view/test/client_proxy.ex | davydog187/phoenix_live_view | c15d2811a7e91f71f448f4d9d1b9a520e9d0bf91 | [
"MIT"
] | null | null | null | defmodule Phoenix.LiveViewTest.ClientProxy do
@moduledoc false
use GenServer
defstruct session_token: nil,
static_token: nil,
module: nil,
endpoint: nil,
pid: nil,
proxy: nil,
topic: nil,
ref: nil,
rendered: nil,
children: [],
child_statics: %{},
id: nil,
connect_params: %{},
connect_info: %{}
alias Phoenix.LiveViewTest.{ClientProxy, DOM, Element, View}
@doc """
Encoding used by the Channel serializer.
"""
def encode!(msg), do: msg
@doc """
Starts a client proxy.
## Options
* `:caller` - the required `{ref, pid}` pair identifying the caller.
* `:view` - the required `%Phoenix.LiveViewTest.View{}`
* `:html` - the required string of HTML for the document.
"""
def start_link(opts) do
GenServer.start_link(__MODULE__, opts)
end
def init(opts) do
# Since we are always running in the test client,
# we will disable our own logging and let the client
# do the job.
Logger.disable(self())
{_caller_pid, _caller_ref} = caller = Keyword.fetch!(opts, :caller)
root_html = Keyword.fetch!(opts, :html)
root_view = Keyword.fetch!(opts, :proxy)
session = Keyword.fetch!(opts, :session)
url = Keyword.fetch!(opts, :url)
test_supervisor = Keyword.fetch!(opts, :test_supervisor)
state = %{
join_ref: 0,
ref: 0,
caller: caller,
views: %{},
ids: %{},
pids: %{},
replies: %{},
root_view: nil,
html: root_html,
session: session,
test_supervisor: test_supervisor,
url: url
}
try do
{root_view, rendered} = mount_view(state, root_view, url)
new_state =
state
|> Map.put(:root_view, root_view)
|> put_view(root_view, rendered)
|> detect_added_or_removed_children(root_view, root_html)
send_caller(new_state, {:ok, build_view(root_view), DOM.to_html(new_state.html)})
{:ok, new_state}
catch
:throw, {:stop, {:shutdown, reason}, _state} ->
send_caller(state, {:error, reason})
:ignore
:throw, {:stop, reason, _} ->
Process.unlink(elem(caller, 0))
{:stop, reason}
end
end
defp build_view(%ClientProxy{} = proxy) do
%{id: id, ref: ref, topic: topic, module: module, endpoint: endpoint, pid: pid} = proxy
%View{id: id, pid: pid, proxy: {ref, topic, self()}, module: module, endpoint: endpoint}
end
defp mount_view(state, view, url) do
ref = make_ref()
case start_supervised_channel(state, view, ref, url) do
{:ok, pid} ->
mon_ref = Process.monitor(pid)
receive do
{^ref, {:ok, %{rendered: rendered}}} ->
Process.demonitor(mon_ref, [:flush])
{%{view | pid: pid}, rendered}
{^ref, {:error, %{live_redirect: opts}}} ->
throw(stop_redirect(state, view.topic, {:live_redirect, opts}))
{^ref, {:error, %{redirect: opts}}} ->
throw(stop_redirect(state, view.topic, {:redirect, opts}))
{^ref, {:error, reason}} ->
throw({:stop, reason, state})
{:DOWN, ^mon_ref, _, _, reason} ->
throw({:stop, reason, state})
end
{:error, reason} ->
throw({:stop, reason, state})
end
end
defp start_supervised_channel(state, view, ref, url) do
socket = %Phoenix.Socket{
transport_pid: self(),
serializer: __MODULE__,
channel: view.module,
endpoint: view.endpoint,
private: %{connect_info: Map.put_new(view.connect_info, :session, state.session)},
topic: view.topic,
join_ref: state.join_ref
}
params = %{
"session" => view.session_token,
"static" => view.static_token,
"url" => url,
"params" => view.connect_params,
"caller" => state.caller,
"joins" => 0
}
spec = %{
id: make_ref(),
start: {Phoenix.LiveView.Channel, :start_link, [{params, {self(), ref}, socket}]},
restart: :temporary
}
Supervisor.start_child(state.test_supervisor, spec)
end
def handle_info({:sync_children, topic, from}, state) do
view = fetch_view_by_topic!(state, topic)
children =
Enum.flat_map(view.children, fn {id, _session} ->
case fetch_view_by_id(state, id) do
{:ok, child} -> [build_view(child)]
:error -> []
end
end)
GenServer.reply(from, {:ok, children})
{:noreply, state}
end
def handle_info({:sync_render, operation, topic_or_element, from}, state) do
view = fetch_view_by_topic!(state, proxy_topic(topic_or_element))
result = state |> root(view) |> select_node(topic_or_element)
reply =
case {operation, result} do
{:find_element, {:ok, node}} -> {:ok, node}
{:find_element, {:error, _, message}} -> {:raise, ArgumentError.exception(message)}
{:has_element?, {:error, :none, _}} -> {:ok, false}
{:has_element?, _} -> {:ok, true}
end
GenServer.reply(from, reply)
{:noreply, state}
end
def handle_info(
%Phoenix.Socket.Message{
event: "redirect",
topic: _topic,
payload: %{to: _to} = opts
},
state
) do
stop_redirect(state, state.root_view.topic, {:redirect, opts})
end
def handle_info(
%Phoenix.Socket.Message{
event: "live_patch",
topic: _topic,
payload: %{to: _to} = opts
},
state
) do
send_patch(state, state.root_view.topic, opts)
{:noreply, state}
end
def handle_info(
%Phoenix.Socket.Message{
event: "live_redirect",
topic: _topic,
payload: %{to: _to} = opts
},
state
) do
stop_redirect(state, state.root_view.topic, {:live_redirect, opts})
end
def handle_info(
%Phoenix.Socket.Message{
event: "diff",
topic: topic,
payload: diff
},
state
) do
{:noreply, merge_rendered(state, topic, diff)}
end
def handle_info(%Phoenix.Socket.Reply{} = reply, state) do
%{ref: ref, payload: payload, topic: topic} = reply
case fetch_reply(state, ref) do
{:ok, {from, _pid}} ->
state = drop_reply(state, ref)
case payload do
%{live_redirect: %{to: _to} = opts} ->
stop_redirect(state, topic, {:live_redirect, opts})
%{live_patch: %{to: _to} = opts} ->
send_patch(state, topic, opts)
{:noreply, render_reply(reply, from, state)}
%{redirect: %{to: _to} = opts} ->
stop_redirect(state, topic, {:redirect, opts})
%{} ->
{:noreply, render_reply(reply, from, state)}
end
:error ->
{:noreply, state}
end
end
def handle_info({:DOWN, _ref, :process, pid, reason}, state) do
case fetch_view_by_pid(state, pid) do
{:ok, _view} ->
{:stop, reason, state}
:error ->
{:noreply, state}
end
end
def handle_info({:socket_close, pid, reason}, state) do
{:ok, view} = fetch_view_by_pid(state, pid)
{:noreply, drop_view_by_id(state, view.id, reason)}
end
def handle_call({:live_children, topic}, from, state) do
view = fetch_view_by_topic!(state, topic)
:ok = Phoenix.LiveView.Channel.ping(view.pid)
send(self(), {:sync_children, view.topic, from})
{:noreply, state}
end
def handle_call({:render, operation, topic_or_element}, from, state) do
topic = proxy_topic(topic_or_element)
%{pid: pid} = fetch_view_by_topic!(state, topic)
:ok = Phoenix.LiveView.Channel.ping(pid)
send(self(), {:sync_render, operation, topic_or_element, from})
{:noreply, state}
end
def handle_call({:render_event, topic_or_element, type, value}, from, state) do
result =
case topic_or_element do
{topic, event} ->
view = fetch_view_by_topic!(state, topic)
{view, nil, event, stringify(value, & &1)}
%Element{} = element ->
view = fetch_view_by_topic!(state, proxy_topic(element))
root = root(state, view)
with {:ok, node} <- select_node(root, element),
:ok <- maybe_enabled(type, node, element),
{:ok, event} <- maybe_event(type, node, element),
{:ok, extra} <- maybe_values(type, node, element),
{:ok, cid} <- maybe_cid(root, node) do
{view, cid, event, DOM.deep_merge(extra, stringify_type(type, value))}
end
end
case result do
{view, cid, event, values} ->
payload = %{
"cid" => cid,
"type" => Atom.to_string(type),
"event" => event,
"value" => encode(type, values)
}
{:noreply, push_with_reply(state, from, view, "event", payload)}
{:patch, topic, path} ->
handle_call({:render_patch, topic, path}, from, state)
{:stop, topic, reason} ->
stop_redirect(state, topic, reason)
{:error, _, message} ->
{:reply, {:raise, ArgumentError.exception(message)}, state}
end
end
def handle_call({:render_patch, topic, path}, from, state) do
view = fetch_view_by_topic!(state, topic)
ref = to_string(state.ref + 1)
send(view.pid, %Phoenix.Socket.Message{
join_ref: state.join_ref,
topic: view.topic,
event: "link",
payload: %{"url" => path},
ref: ref
})
send_patch(state, state.root_view.topic, %{to: path})
{:noreply, put_reply(%{state | ref: state.ref + 1}, ref, from, view.pid)}
end
defp drop_view_by_id(state, id, reason) do
{:ok, view} = fetch_view_by_id(state, id)
push(state, view, "phx_leave", %{})
state =
Enum.reduce(view.children, state, fn {child_id, _child_session}, acc ->
drop_view_by_id(acc, child_id, reason)
end)
flush_replies(
%{
state
| ids: Map.delete(state.ids, view.id),
views: Map.delete(state.views, view.topic),
pids: Map.delete(state.pids, view.pid)
},
view.pid,
reason
)
end
defp flush_replies(state, pid, reason) do
Enum.reduce(state.replies, state, fn
{ref, {from, ^pid}}, acc ->
GenServer.reply(from, {:error, reason})
drop_reply(acc, ref)
{_ref, {_from, _pid}}, acc ->
acc
end)
end
defp fetch_reply(state, ref) do
Map.fetch(state.replies, ref)
end
defp put_reply(state, ref, from, pid) do
%{state | replies: Map.put(state.replies, ref, {from, pid})}
end
defp drop_reply(state, ref) do
%{state | replies: Map.delete(state.replies, ref)}
end
defp put_child(state, %ClientProxy{} = parent, id, session) do
update_in(state, [:views, parent.topic], fn %ClientProxy{} = parent ->
%ClientProxy{parent | children: [{id, session} | parent.children]}
end)
end
defp drop_child(state, %ClientProxy{} = parent, id, reason) do
state
|> update_in([:views, parent.topic], fn %ClientProxy{} = parent ->
new_children = Enum.reject(parent.children, fn {cid, _session} -> id == cid end)
%ClientProxy{parent | children: new_children}
end)
|> drop_view_by_id(id, reason)
end
defp verify_session(%ClientProxy{} = view) do
Phoenix.LiveView.Static.verify_session(view.endpoint, view.session_token, view.static_token)
end
defp put_view(state, %ClientProxy{pid: pid} = view, rendered) do
{:ok, %{view: module}} = verify_session(view)
new_view = %ClientProxy{view | module: module, proxy: self(), pid: pid, rendered: rendered}
Process.monitor(pid)
patch_view(
%{
state
| views: Map.put(state.views, new_view.topic, new_view),
pids: Map.put(state.pids, pid, new_view.topic),
ids: Map.put(state.ids, new_view.id, new_view.topic)
},
view,
DOM.render_diff(rendered)
)
end
defp patch_view(state, view, child_html) do
case DOM.patch_id(view.id, state.html, child_html) do
{new_html, [_ | _] = deleted_cids} ->
push(%{state | html: new_html}, view, "cids_destroyed", %{"cids" => deleted_cids})
{new_html, [] = _deleted_cids} ->
%{state | html: new_html}
end
end
defp stop_redirect(%{caller: {pid, _}} = state, topic, {_kind, opts} = reason)
when is_binary(topic) do
send_caller(state, {:redirect, topic, opts})
Process.unlink(pid)
{:stop, {:shutdown, reason}, state}
end
defp fetch_view_by_topic!(state, topic), do: Map.fetch!(state.views, topic)
defp fetch_view_by_topic(state, topic), do: Map.fetch(state.views, topic)
defp fetch_view_by_pid(state, pid) when is_pid(pid) do
with {:ok, topic} <- Map.fetch(state.pids, pid) do
fetch_view_by_topic(state, topic)
end
end
defp fetch_view_by_id(state, id) do
with {:ok, topic} <- Map.fetch(state.ids, id) do
fetch_view_by_topic(state, topic)
end
end
defp render_reply(reply, from, state) do
%{payload: diff, topic: topic} = reply
new_state = merge_rendered(state, topic, diff)
case fetch_view_by_topic(new_state, topic) do
{:ok, view} ->
GenServer.reply(from, {:ok, new_state.html |> DOM.inner_html!(view.id) |> DOM.to_html()})
new_state
:error ->
new_state
end
end
defp merge_rendered(state, topic, %{diff: diff}), do: merge_rendered(state, topic, diff)
defp merge_rendered(%{html: html_before} = state, topic, %{} = diff) do
case diff do
%{title: new_title} -> send_caller(state, {:title, new_title})
%{} -> :noop
end
case fetch_view_by_topic(state, topic) do
{:ok, view} ->
rendered = DOM.deep_merge(view.rendered, diff)
new_view = %ClientProxy{view | rendered: rendered}
%{state | views: Map.update!(state.views, topic, fn _ -> new_view end)}
|> patch_view(new_view, DOM.render_diff(rendered))
|> detect_added_or_removed_children(new_view, html_before)
:error ->
state
end
end
defp detect_added_or_removed_children(state, view, html_before) do
new_state = recursive_detect_added_or_removed_children(state, view, html_before)
{:ok, new_view} = fetch_view_by_topic(new_state, view.topic)
ids_after =
new_state.html
|> DOM.all("[data-phx-view]")
|> DOM.all_attributes("id")
|> MapSet.new()
Enum.reduce(new_view.children, new_state, fn {id, _session}, acc ->
if id in ids_after do
acc
else
drop_child(acc, new_view, id, {:shutdown, :left})
end
end)
end
defp recursive_detect_added_or_removed_children(state, view, html_before) do
state.html
|> DOM.inner_html!(view.id)
|> DOM.find_live_views()
|> Enum.reduce(state, fn {id, session, static}, acc ->
case fetch_view_by_id(acc, id) do
{:ok, view} ->
patch_view(acc, view, DOM.inner_html!(html_before, view.id))
:error ->
static = static || Map.get(state.root_view.child_statics, id)
child_view = build_child(view, id: id, session_token: session, static_token: static)
{child_view, rendered} = mount_view(acc, child_view, state.url)
acc
|> put_view(child_view, rendered)
|> put_child(view, id, child_view.session_token)
|> recursive_detect_added_or_removed_children(child_view, acc.html)
end
end)
end
defp send_caller(%{caller: {pid, ref}}, msg) when is_pid(pid) do
send(pid, {ref, msg})
end
defp send_patch(state, topic, %{to: _to} = opts) do
send_caller(state, {:patch, topic, opts})
end
defp push(state, view, event, payload) do
ref = to_string(state.ref + 1)
send(view.pid, %Phoenix.Socket.Message{
join_ref: state.join_ref,
topic: view.topic,
event: event,
payload: payload,
ref: ref
})
%{state | ref: state.ref + 1}
end
defp push_with_reply(state, from, view, event, payload) do
ref = to_string(state.ref + 1)
state
|> push(view, event, payload)
|> put_reply(ref, from, view.pid)
end
def build(attrs) do
attrs_with_defaults =
attrs
|> Keyword.merge(topic: Phoenix.LiveView.Utils.random_id())
|> Keyword.put_new_lazy(:ref, fn -> make_ref() end)
struct(__MODULE__, attrs_with_defaults)
end
def build_child(%ClientProxy{ref: ref, proxy: proxy} = parent, attrs) do
attrs
|> Keyword.merge(
ref: ref,
proxy: proxy,
endpoint: parent.endpoint
)
|> build()
end
## Element helpers
defp proxy_topic(topic) when is_binary(topic), do: topic
defp proxy_topic(%{proxy: {_ref, topic, _pid}}), do: topic
defp root(state, view), do: DOM.by_id!(state.html, view.id)
defp select_node(root, %Element{selector: selector, text_filter: nil}) do
root
|> DOM.child_nodes()
|> DOM.maybe_one(selector)
end
defp select_node(root, %Element{selector: selector, text_filter: text_filter}) do
nodes =
root
|> DOM.child_nodes()
|> DOM.all(selector)
filtered_nodes = Enum.filter(nodes, &(DOM.to_text(&1) =~ text_filter))
case {nodes, filtered_nodes} do
{_, [filtered_node]} ->
{:ok, filtered_node}
{[], _} ->
{:error, :none,
"selector #{inspect(selector)} did not return any element within: \n\n" <>
DOM.inspect_html(root)}
{[node], []} ->
{:error, :none,
"selector #{inspect(selector)} did not match text filter #{inspect(text_filter)}, " <>
"got: \n\n#{DOM.inspect_html(node)}"}
{_, []} ->
{:error, :none,
"selector #{inspect(selector)} returned #{length(nodes)} elements " <>
"but none matched the text filter #{inspect(text_filter)}: \n\n" <>
DOM.inspect_html(nodes)}
{_, _} ->
{:error, :many,
"selector #{inspect(selector)} returned #{length(nodes)} elements " <>
"and #{length(filtered_nodes)} of them matched the text filter #{inspect(text_filter)}: \n\n " <>
DOM.inspect_html(filtered_nodes)}
end
end
defp select_node(root, _topic) do
{:ok, root}
end
defp maybe_cid(_tree, nil) do
{:ok, nil}
end
defp maybe_cid(tree, node) do
case DOM.all_attributes(node, "phx-target") do
[] ->
{:ok, nil}
["#" <> _ = target] ->
with {:ok, target} <- DOM.maybe_one(tree, target, "phx-target") do
if cid = DOM.component_id(target) do
{:ok, String.to_integer(cid)}
else
{:ok, nil}
end
end
[maybe_integer] ->
case Integer.parse(maybe_integer) do
{cid, ""} ->
{:ok, cid}
_ ->
{:error, :invalid,
"expected phx-target to be either an ID or a CID, got: #{inspect(maybe_integer)}"}
end
end
end
defp maybe_event(:hook, node, %Element{event: event} = element) do
true = is_binary(event)
if DOM.attribute(node, "phx-hook") do
if DOM.attribute(node, "id") do
{:ok, event}
else
{:error, :invalid,
"element selected by #{inspect(element.selector)} for phx-hook does not have an ID"}
end
else
{:error, :invalid,
"element selected by #{inspect(element.selector)} does not have phx-hook attribute"}
end
end
# TODO: Remove this once deprecated paths have been removed
defp maybe_event(_, _, %{event: event}) when is_binary(event) do
{:ok, event}
end
defp maybe_event(:click, {"a", _, _} = node, element) do
cond do
event = DOM.attribute(node, "phx-click") ->
{:ok, event}
to = DOM.attribute(node, "href") ->
case DOM.attribute(node, "data-phx-link") do
"patch" ->
{:patch, proxy_topic(element), to}
"redirect" ->
kind = DOM.attribute(node, "data-phx-link-state") || "push"
{:stop, proxy_topic(element), {:live_redirect, %{to: to, kind: String.to_atom(kind)}}}
nil ->
{:stop, proxy_topic(element), {:redirect, %{to: to}}}
end
true ->
{:error, :invalid,
"clicked link selected by #{inspect(element.selector)} does not have phx-click or href attributes"}
end
end
defp maybe_event(type, node, element) when type in [:keyup, :keydown] do
cond do
event = DOM.attribute(node, "phx-#{type}") ->
{:ok, event}
event = DOM.attribute(node, "phx-window-#{type}") ->
{:ok, event}
true ->
{:error, :invalid,
"element selected by #{inspect(element.selector)} does not have " <>
"phx-#{type} or phx-window-#{type} attributes"}
end
end
defp maybe_event(type, node, element) do
if event = DOM.attribute(node, "phx-#{type}") do
{:ok, event}
else
{:error, :invalid,
"element selected by #{inspect(element.selector)} does not have phx-#{type} attribute"}
end
end
defp maybe_enabled(_type, {tag, _, _}, %{form_data: form_data})
when tag != "form" and form_data != nil do
{:error, :invalid,
"a form element was given but the selected node is not a form, got #{inspect(tag)}}"}
end
defp maybe_enabled(type, node, element) do
if DOM.attribute(node, "disabled") do
{:error, :invalid,
"cannot #{type} element #{inspect(element.selector)} because it is disabled"}
else
:ok
end
end
defp maybe_values(:hook, _node, _element), do: {:ok, %{}}
defp maybe_values(type, {tag, _, _} = node, element) when type in [:change, :submit] do
if tag == "form" do
defaults =
node
|> DOM.all("input, select, textarea")
|> Enum.reverse()
|> Enum.reduce(%{}, &form_defaults/2)
case fill_in_map(Enum.to_list(element.form_data || %{}), "", node, []) do
{:ok, value} -> {:ok, DOM.deep_merge(defaults, value)}
{:error, _, _} = error -> error
end
else
{:error, :invalid, "phx-#{type} is only allowed in forms, got #{inspect(tag)}"}
end
end
defp maybe_values(_type, node, _element) do
{:ok, DOM.all_values(node)}
end
defp form_defaults(node, acc) do
cond do
DOM.attribute(node, "disabled") -> acc
name = DOM.attribute(node, "name") -> form_defaults(node, name, acc)
true -> acc
end
end
defp form_defaults({"select", _, _} = node, name, acc) do
options = DOM.all(node, "option")
all_selected =
if DOM.attribute(node, "multiple") do
Enum.filter(options, &DOM.attribute(&1, "selected"))
else
List.wrap(Enum.find(options, &DOM.attribute(&1, "selected")) || List.first(options))
end
all_selected
|> Enum.reverse()
|> Enum.reduce(acc, fn selected, acc ->
Plug.Conn.Query.decode_pair({name, DOM.attribute(selected, "value")}, acc)
end)
end
defp form_defaults({"textarea", _, [value]}, name, acc) do
Plug.Conn.Query.decode_pair({name, value}, acc)
end
defp form_defaults({"input", _, _} = node, name, acc) do
type = DOM.attribute(node, "type") || "text"
value = DOM.attribute(node, "value") || ""
cond do
type in ["radio", "checkbox"] ->
if DOM.attribute(node, "checked") do
Plug.Conn.Query.decode_pair({name, value}, acc)
else
acc
end
type in ["image", "submit"] ->
acc
true ->
Plug.Conn.Query.decode_pair({name, value}, acc)
end
end
defp fill_in_map([{key, value} | rest], prefix, node, acc) do
key = to_string(key)
case fill_in_type(value, fill_in_name(prefix, key), node) do
{:ok, value} -> fill_in_map(rest, prefix, node, [{key, value} | acc])
{:error, _, _} = error -> error
end
end
defp fill_in_map([], _prefix, _node, acc) do
{:ok, Map.new(acc)}
end
defp fill_in_type([{_, _} | _] = value, key, node), do: fill_in_map(value, key, node, [])
defp fill_in_type(%_{} = value, key, node), do: fill_in_value(value, key, node)
defp fill_in_type(%{} = value, key, node), do: fill_in_map(Map.to_list(value), key, node, [])
defp fill_in_type(value, key, node), do: fill_in_value(value, key, node)
@limited ["select", "multiple select", "checkbox", "radio", "hidden"]
@forbidden ["submit", "image"]
defp fill_in_value(non_string_value, name, node) do
value = stringify(non_string_value, &to_string/1)
name = if is_list(value), do: name <> "[]", else: name
{types, dom_values} =
node
|> DOM.all("[name=#{inspect(name)}]:not([disabled])")
|> collect_values([], [])
limited? = Enum.all?(types, &(&1 in @limited))
cond do
calendar_value = calendar_value(types, non_string_value, name, node) ->
{:ok, calendar_value}
types == [] ->
{:error, :invalid,
"could not find non-disabled input, select or textarea with name #{inspect(name)} within:\n\n" <>
DOM.inspect_html(DOM.all(node, "[name]"))}
forbidden_type = Enum.find(types, &(&1 in @forbidden)) ->
{:error, :invalid,
"cannot provide value to #{inspect(name)} because #{forbidden_type} inputs are never submitted"}
forbidden_value = limited? && value |> List.wrap() |> Enum.find(&(&1 not in dom_values)) ->
{:error, :invalid,
"value for #{hd(types)} #{inspect(name)} must be one of #{inspect(dom_values)}, " <>
"got: #{inspect(forbidden_value)}"}
true ->
{:ok, value}
end
end
@calendar_fields ~w(year month day hour minute second)a
defp calendar_value([], %{calendar: _} = calendar_type, name, node) do
@calendar_fields
|> Enum.flat_map(fn field ->
string_field = Atom.to_string(field)
with value when not is_nil(value) <- Map.get(calendar_type, field),
{:ok, string_value} <- fill_in_value(value, name <> "[" <> string_field <> "]", node) do
[{string_field, string_value}]
else
_ -> []
end
end)
|> case do
[] -> nil
pairs -> Map.new(pairs)
end
end
defp calendar_value(_, _, _, _) do
nil
end
defp collect_values([{"textarea", _, _} | nodes], types, values) do
collect_values(nodes, ["textarea" | types], values)
end
defp collect_values([{"input", _, _} = node | nodes], types, values) do
type = DOM.attribute(node, "type") || "text"
if type in ["radio", "checkbox", "hidden"] do
value = DOM.attribute(node, "value") || ""
collect_values(nodes, [type | types], [value | values])
else
collect_values(nodes, [type | types], values)
end
end
defp collect_values([{"select", _, _} = node | nodes], types, values) do
options = node |> DOM.all("option") |> Enum.map(&(DOM.attribute(&1, "value") || ""))
if DOM.attribute(node, "multiple") do
collect_values(nodes, ["multiple select" | types], Enum.reverse(options, values))
else
collect_values(nodes, ["select" | types], Enum.reverse(options, values))
end
end
defp collect_values([_ | nodes], types, values) do
collect_values(nodes, types, values)
end
defp collect_values([], types, values) do
{types, Enum.reverse(values)}
end
defp fill_in_name("", name), do: name
defp fill_in_name(prefix, name), do: prefix <> "[" <> name <> "]"
defp encode(:form, value), do: Plug.Conn.Query.encode(value)
defp encode(_, value), do: value
defp stringify_type(:hook, value), do: stringify(value, & &1)
defp stringify_type(_, value), do: stringify(value, &to_string/1)
defp stringify(%{__struct__: _} = struct, fun),
do: stringify_value(struct, fun)
defp stringify(%{} = params, fun),
do: Enum.into(params, %{}, &stringify_kv(&1, fun))
defp stringify([{_, _} | _] = params, fun),
do: Enum.into(params, %{}, &stringify_kv(&1, fun))
defp stringify(params, fun) when is_list(params),
do: Enum.map(params, &stringify(&1, fun))
defp stringify(other, fun),
do: stringify_value(other, fun)
defp stringify_value(other, fun), do: fun.(other)
defp stringify_kv({k, v}, fun), do: {to_string(k), stringify(v, fun)}
end
| 29.277259 | 108 | 0.594098 |
735502335f6734a911f2a44f948d1b7cb437c705 | 630 | exs | Elixir | test/retry_backoff_test.exs | Badiapp/bolt_sips | ea5e1ae3295700a9f9b0324d26d953845da17050 | [
"Apache-2.0"
] | null | null | null | test/retry_backoff_test.exs | Badiapp/bolt_sips | ea5e1ae3295700a9f9b0324d26d953845da17050 | [
"Apache-2.0"
] | null | null | null | test/retry_backoff_test.exs | Badiapp/bolt_sips | ea5e1ae3295700a9f9b0324d26d953845da17050 | [
"Apache-2.0"
] | null | null | null | defmodule Rety.Backoff.Test do
use ExUnit.Case, async: true
import Stream
use Retry
setup_all do
{:ok, [conn: Bolt.Sips.conn()]}
end
test "retry retries execution for specified attempts using an invalid Cypher command",
context do
conn = context[:conn]
{elapsed, _} =
:timer.tc(fn ->
{:error, [ code: "Neo.ClientError.Statement.SyntaxError", message: message]}
=retry with: lin_backoff(500, 1) |> take(5) do
Bolt.Sips.query(conn, "INVALID CYPHER")
end
assert message =~ "INVALID CYPHER"
end)
assert elapsed / 1000 >= 2500
end
end
| 23.333333 | 88 | 0.619048 |
7355099d9f71e8eb65b6057a9859d4728458a36b | 1,507 | ex | Elixir | clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v1/model/boolean_constraint.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v1/model/boolean_constraint.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v1/model/boolean_constraint.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.CloudResourceManager.V1.Model.BooleanConstraint do
@moduledoc """
A `Constraint` that is either enforced or not.
For example a constraint `constraints/compute.disableSerialPortAccess`.
If it is enforced on a VM instance, serial port connections will not be
opened to that instance.
## Attributes
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{}
end
defimpl Poison.Decoder, for: GoogleApi.CloudResourceManager.V1.Model.BooleanConstraint do
def decode(value, options) do
GoogleApi.CloudResourceManager.V1.Model.BooleanConstraint.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudResourceManager.V1.Model.BooleanConstraint do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.76087 | 89 | 0.771068 |
735552d5ccd28ad660098929592a8593852fc896 | 407 | ex | Elixir | lib/bike_brigade/stats/campaign_stats.ex | bikebrigade/dispatch | eb622fe4f6dab7c917d678d3d7a322a01f97da44 | [
"Apache-2.0"
] | 28 | 2021-10-11T01:53:53.000Z | 2022-03-24T17:45:55.000Z | lib/bike_brigade/stats/campaign_stats.ex | bikebrigade/dispatch | eb622fe4f6dab7c917d678d3d7a322a01f97da44 | [
"Apache-2.0"
] | 20 | 2021-10-21T08:12:31.000Z | 2022-03-31T13:35:53.000Z | lib/bike_brigade/stats/campaign_stats.ex | bikebrigade/dispatch | eb622fe4f6dab7c917d678d3d7a322a01f97da44 | [
"Apache-2.0"
] | null | null | null | defmodule BikeBrigade.Stats.CampaignStats do
use BikeBrigade.Schema
alias BikeBrigade.Delivery.Campaign
@primary_key false
schema "campaign_stats" do
field :task_count, :integer, default: 0
field :assigned_rider_count, :integer, default: 0
field :signed_up_rider_count, :integer, default: 0
field :total_distance, :integer, default: 0
belongs_to :campaign, Campaign
end
end
| 25.4375 | 54 | 0.7543 |
7355545af6755d5d0e56c8a8852df2bde853a829 | 197 | exs | Elixir | test/controllers/page_controller_test.exs | shawn-mcginty/phoenix-webpack-skeleton | 311559c860da97157d643d06cd2601aee81c6a3a | [
"MIT"
] | null | null | null | test/controllers/page_controller_test.exs | shawn-mcginty/phoenix-webpack-skeleton | 311559c860da97157d643d06cd2601aee81c6a3a | [
"MIT"
] | null | null | null | test/controllers/page_controller_test.exs | shawn-mcginty/phoenix-webpack-skeleton | 311559c860da97157d643d06cd2601aee81c6a3a | [
"MIT"
] | null | null | null | defmodule TimeVoice.PageControllerTest do
use TimeVoice.ConnCase
test "GET /", %{conn: conn} do
conn = get conn, "/"
assert html_response(conn, 200) =~ "Welcome to Phoenix!"
end
end
| 21.888889 | 60 | 0.680203 |
73556d4a70ae7b5c5cfb28940cc68a6aa83dc986 | 3,862 | ex | Elixir | apps/edr_validations_consumer/lib/edr_validations_consumer/kafka/consumer.ex | ehealth-ua/ehealth.api | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 8 | 2019-06-14T11:34:49.000Z | 2021-08-05T19:14:24.000Z | apps/edr_validations_consumer/lib/edr_validations_consumer/kafka/consumer.ex | edenlabllc/ehealth.api.public | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 1 | 2019-07-08T15:20:22.000Z | 2019-07-08T15:20:22.000Z | apps/edr_validations_consumer/lib/edr_validations_consumer/kafka/consumer.ex | ehealth-ua/ehealth.api | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 6 | 2018-05-11T13:59:32.000Z | 2022-01-19T20:15:22.000Z | defmodule EdrValidationsConsumer.Kafka.Consumer do
@moduledoc false
alias Core.CapitationContractRequests
alias Core.Contracts.ContractSuspender
alias Core.LegalEntities.EdrData
alias Core.LegalEntities.LegalEntity
alias Core.PRMRepo
alias Core.ReimbursementContractRequests
alias Jobs.ContractRequestTerminationJob
import Ecto.Changeset
import Ecto.Query
require Logger
@read_prm_repo Application.get_env(:core, :repos)[:read_prm_repo]
@rpc_edr_worker Application.get_env(:core, :rpc_edr_worker)
def handle_message(%{offset: offset, value: message}) do
value = :erlang.binary_to_term(message)
Logger.debug(fn -> "message: " <> inspect(value) end)
Logger.info(fn -> "offset: #{offset}" end)
:ok = consume(value)
end
def consume(%{"id" => id}) do
case get_edr_data(id) do
{:ok, edr_data} ->
do_consume(edr_data)
:ok
_ ->
:ok
end
end
def consume(value) do
Logger.warn("Invalid message #{inspect(value)}")
:ok
end
defp do_consume(%EdrData{state: previous_state} = edr_data) do
id = edr_data.id
with {:ok, response} <- get_legal_entity_from_edr(edr_data.edr_id) do
data = %{
"name" => response["names"]["name"],
"short_name" => response["names"]["short"],
"public_name" => response["names"]["display"],
"legal_form" => response["olf_code"],
"kveds" => response["activity_kinds"],
"registration_address" => response["address"],
"state" => response["state"],
"updated_by" => Confex.fetch_env!(:core, :system_user)
}
changeset = EdrData.changeset(edr_data, data)
PRMRepo.transaction(fn ->
if previous_state == 1 && get_change(changeset, :state) do
legal_entity_ids =
LegalEntity
|> select([le], %{id: le.id})
|> where([le], le.edr_data_id == ^id and le.nhs_verified)
|> PRMRepo.all()
|> Enum.map(& &1.id)
LegalEntity
|> where([le], le.id in ^legal_entity_ids)
|> PRMRepo.update_all(
set: [
status: LegalEntity.status(:suspended),
status_reason: "AUTO_SUSPEND",
nhs_verified: false,
nhs_unverified_at: DateTime.utc_now()
]
)
suspend_contracts(legal_entity_ids)
end
PRMRepo.update(changeset)
end)
end
end
defp suspend_contracts(ids) do
Enum.each(ids, fn id ->
ContractSuspender.suspend_by_contractor_legal_entity_id(id)
terminate_contract_requests(id)
end)
end
def terminate_contract_requests(legal_entity_id) do
system_user = Confex.fetch_env!(:core, :system_user)
contract_requests =
Enum.concat([
CapitationContractRequests.get_contract_requests_to_deactivate(legal_entity_id),
ReimbursementContractRequests.get_contract_requests_to_deactivate(legal_entity_id)
])
Enum.reduce_while(contract_requests, :ok, fn contract_request, acc ->
case ContractRequestTerminationJob.create(contract_request.entity, system_user) do
{:ok, %{}} ->
{:cont, acc}
err ->
Logger.warn(inspect(err))
PRMRepo.rollback(:contract_request_termination)
{:halt, err}
end
end)
end
defp get_edr_data(id) do
case @read_prm_repo.get(EdrData, id) do
nil ->
Logger.error("Can't get edr data by id #{id}")
%EdrData{} = edr_data ->
{:ok, edr_data}
end
end
defp get_legal_entity_from_edr(id) do
case @rpc_edr_worker.run("edr_api", EdrApi.Rpc, :get_legal_entity_detailed_info, [id]) do
{:ok, response} ->
{:ok, response}
{:error, reason} ->
Logger.error("Can't get edr data from edr. Reason: #{inspect(reason)}")
end
end
end
| 28.607407 | 93 | 0.630502 |
73557b0590d2269be1baca9679488c5f1863e7c0 | 189 | exs | Elixir | test/leetcode_elixir_solution_test.exs | kenspirit/leetcode_elixir_solutions | c146f8303f4e5830cf52bdf48b1c327ee46bc2a8 | [
"MIT"
] | null | null | null | test/leetcode_elixir_solution_test.exs | kenspirit/leetcode_elixir_solutions | c146f8303f4e5830cf52bdf48b1c327ee46bc2a8 | [
"MIT"
] | null | null | null | test/leetcode_elixir_solution_test.exs | kenspirit/leetcode_elixir_solutions | c146f8303f4e5830cf52bdf48b1c327ee46bc2a8 | [
"MIT"
] | null | null | null | defmodule LeetcodeElixirSolutionTest do
use ExUnit.Case
doctest LeetcodeElixirSolution
test "greets the leetcode" do
assert LeetcodeElixirSolution.hello() == :leetcode
end
end
| 21 | 54 | 0.78836 |
735594c95300828f91f937d17493b7c6b0f5007f | 380 | ex | Elixir | web/views/error_view.ex | danielberkompas/phoenix_ecto_encryption_sample | 1fd0f337eb214cb6a5b7348a70ad27e50968d851 | [
"MIT"
] | 4 | 2015-08-11T04:01:14.000Z | 2019-09-17T04:47:02.000Z | web/views/error_view.ex | danielberkompas/phoenix_ecto_encryption_sample | 1fd0f337eb214cb6a5b7348a70ad27e50968d851 | [
"MIT"
] | null | null | null | web/views/error_view.ex | danielberkompas/phoenix_ecto_encryption_sample | 1fd0f337eb214cb6a5b7348a70ad27e50968d851 | [
"MIT"
] | null | null | null | defmodule Encryption.ErrorView do
use Encryption.Web, :view
def render("404.html", _assigns) do
"Page not found"
end
def render("500.html", _assigns) do
"Server internal error"
end
# In case no render clause matches or no
# template is found, let's render it as 500
def template_not_found(_template, assigns) do
render "500.html", assigns
end
end
| 21.111111 | 47 | 0.702632 |
7355a218da0a6b59e2c45ed2b67654e12caeb9f7 | 1,228 | ex | Elixir | apps/astarte_trigger_engine/lib/astarte_trigger_engine_web/router.ex | Spidey20202022/astarte | 3950855c592b34363af0cf7f8a921762ce64e512 | [
"Apache-2.0"
] | 1 | 2020-02-04T13:15:22.000Z | 2020-02-04T13:15:22.000Z | apps/astarte_trigger_engine/lib/astarte_trigger_engine_web/router.ex | Spidey20202022/astarte | 3950855c592b34363af0cf7f8a921762ce64e512 | [
"Apache-2.0"
] | 1 | 2020-01-20T09:52:48.000Z | 2020-01-20T09:52:48.000Z | apps/astarte_trigger_engine/lib/astarte_trigger_engine_web/router.ex | Spidey20202022/astarte | 3950855c592b34363af0cf7f8a921762ce64e512 | [
"Apache-2.0"
] | 1 | 2020-02-04T13:15:50.000Z | 2020-02-04T13:15:50.000Z | #
# This file is part of Astarte.
#
# Copyright 2020 Ispirata Srl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
defmodule Astarte.TriggerEngineWeb.Router do
@moduledoc false
use Plug.Router
alias Astarte.TriggerEngine.Health
plug Astarte.TriggerEngineWeb.Metrics.PrometheusExporter
plug :match
plug :dispatch
get "/health" do
try do
case Health.get_health() do
{:ok, %{status: :ready}} ->
send_resp(conn, 200, "")
{:ok, %{status: :degraded}} ->
send_resp(conn, 200, "")
_ ->
send_resp(conn, 503, "")
end
rescue
e ->
send_resp(conn, 500, "")
end
end
match _ do
send_resp(conn, 404, "Not found")
end
end
| 23.169811 | 74 | 0.667752 |
7355c7fcaa043069a26cc9fef7bb3e94af0c5845 | 559 | ex | Elixir | apps/banking_api_challenge_web/lib/banking_api_challenge_web/views/error_view.ex | jhonndabi/banking-api-challenge | 1e13c675b02c8e62a76e82b0a0dd6a44306a211e | [
"Apache-2.0"
] | null | null | null | apps/banking_api_challenge_web/lib/banking_api_challenge_web/views/error_view.ex | jhonndabi/banking-api-challenge | 1e13c675b02c8e62a76e82b0a0dd6a44306a211e | [
"Apache-2.0"
] | null | null | null | apps/banking_api_challenge_web/lib/banking_api_challenge_web/views/error_view.ex | jhonndabi/banking-api-challenge | 1e13c675b02c8e62a76e82b0a0dd6a44306a211e | [
"Apache-2.0"
] | 1 | 2021-04-20T19:05:48.000Z | 2021-04-20T19:05:48.000Z | defmodule BankingApiChallengeWeb.ErrorView do
use BankingApiChallengeWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.json", _assigns) do
# %{errors: %{detail: "Internal Server Error"}}
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.json" becomes
# "Not Found".
def template_not_found(template, _assigns) do
%{errors: %{detail: Phoenix.Controller.status_message_from_template(template)}}
end
end
| 32.882353 | 83 | 0.731664 |
7355cb1ac6a4d6673d2e58339610fdd5069806e1 | 814 | ex | Elixir | lib/oli/delivery/attempts/core/resource_access.ex | DevShashi1993/oli-torus | e6e0b66f0973f9790a5785731b22db6fb1c50a73 | [
"MIT"
] | 45 | 2020-04-17T15:40:27.000Z | 2022-03-25T00:13:30.000Z | lib/oli/delivery/attempts/core/resource_access.ex | DevShashi1993/oli-torus | e6e0b66f0973f9790a5785731b22db6fb1c50a73 | [
"MIT"
] | 944 | 2020-02-13T02:37:01.000Z | 2022-03-31T17:50:07.000Z | lib/oli/delivery/attempts/core/resource_access.ex | DevShashi1993/oli-torus | e6e0b66f0973f9790a5785731b22db6fb1c50a73 | [
"MIT"
] | 23 | 2020-07-28T03:36:13.000Z | 2022-03-17T14:29:02.000Z | defmodule Oli.Delivery.Attempts.Core.ResourceAccess do
use Ecto.Schema
import Ecto.Changeset
schema "resource_accesses" do
field(:access_count, :integer)
field(:score, :float)
field(:out_of, :float)
belongs_to(:user, Oli.Accounts.User)
belongs_to(:section, Oli.Delivery.Sections.Section)
belongs_to(:resource, Oli.Resources.Resource)
has_many(:resource_attempts, Oli.Delivery.Attempts.Core.ResourceAttempt)
timestamps(type: :utc_datetime)
end
@doc false
def changeset(resource_access, attrs) do
resource_access
|> cast(attrs, [:access_count, :score, :out_of, :user_id, :section_id, :resource_id])
|> validate_required([:access_count, :user_id, :section_id, :resource_id])
|> unique_constraint(:entry, name: :resource_accesses_unique_index)
end
end
| 31.307692 | 89 | 0.734644 |
7355e1ca320b80ce1549d243166f341f746c073a | 383 | ex | Elixir | discuss/web/controllers/plugs/require_auth.ex | enelesmai/elixir-bootcamp | 0cc3ee791063eba48ed9eedb64a9b2d46a0e7cf9 | [
"MIT"
] | null | null | null | discuss/web/controllers/plugs/require_auth.ex | enelesmai/elixir-bootcamp | 0cc3ee791063eba48ed9eedb64a9b2d46a0e7cf9 | [
"MIT"
] | null | null | null | discuss/web/controllers/plugs/require_auth.ex | enelesmai/elixir-bootcamp | 0cc3ee791063eba48ed9eedb64a9b2d46a0e7cf9 | [
"MIT"
] | null | null | null | defmodule Discuss.Plugs.RequireAuth do
import Plug.Conn
import Phoenix.Controller
alias Discuss.Router.Helpers
def init(_params) do
end
def call(conn, _params) do
if conn.assigns[:user] do
conn
else
conn
|> put_flash(:error, "You must be logged in.")
|> redirect(to: Helpers.topic_path(conn, :index))
|> halt()
end
end
end | 19.15 | 55 | 0.642298 |
7355f58968a13f6cb17e8dbfdaa3df09865b481a | 945 | exs | Elixir | apps/admin_api/test/admin_api/v1/controllers/fallback_controller_test.exs | turbo-play/ewallet | b7fee3eed62ac716f46246132c2ead1045f2e4f3 | [
"Apache-2.0"
] | 2 | 2019-07-13T05:49:03.000Z | 2021-08-19T23:58:23.000Z | apps/admin_api/test/admin_api/v1/controllers/fallback_controller_test.exs | turbo-play/ewallet | b7fee3eed62ac716f46246132c2ead1045f2e4f3 | [
"Apache-2.0"
] | null | null | null | apps/admin_api/test/admin_api/v1/controllers/fallback_controller_test.exs | turbo-play/ewallet | b7fee3eed62ac716f46246132c2ead1045f2e4f3 | [
"Apache-2.0"
] | 3 | 2018-05-08T17:15:42.000Z | 2021-11-10T04:08:33.000Z | defmodule AdminAPI.V1.FallbackControllerTest do
use AdminAPI.ConnCase, async: true
describe "/not_found" do
test "returns correct error response for client-authtenticated requests" do
expected = %{
"version" => "1",
"success" => false,
"data" => %{
"object" => "error",
"code" => "client:endpoint_not_found",
"description" => "Endpoint not found",
"messages" => nil
}
}
assert client_request("/not_found") == expected
end
test "returns correct error response for user-authenticated requests" do
expected = %{
"version" => "1",
"success" => false,
"data" => %{
"object" => "error",
"code" => "client:endpoint_not_found",
"description" => "Endpoint not found",
"messages" => nil
}
}
assert user_request("/not_found") == expected
end
end
end
| 26.25 | 79 | 0.54709 |
7356056415a218a43cea3e8cce13806a497f44bb | 166 | ex | Elixir | test/support/mocked_server/external_schema/server_subuser.ex | kintu-games/elidactyl | 2d95694ef4a85c72e962379d8d12fc08bd8352ac | [
"MIT"
] | 6 | 2020-04-28T21:38:40.000Z | 2022-02-13T01:04:10.000Z | test/support/mocked_server/external_schema/server_subuser.ex | kintu-games/elidactyl | 2d95694ef4a85c72e962379d8d12fc08bd8352ac | [
"MIT"
] | 1 | 2021-03-16T10:39:32.000Z | 2021-03-16T10:39:32.000Z | test/support/mocked_server/external_schema/server_subuser.ex | Kintull/elidactyl | 9a051ed511ed92fa7578038784baa73288f1312b | [
"MIT"
] | null | null | null | defmodule Elidactyl.MockedServer.ExternalSchema.ServerSubuser do
@moduledoc false
@derive Jason.Encoder
defstruct object: "server_subuser", attributes: %{}
end
| 27.666667 | 64 | 0.801205 |
73560a08e0db4d826e061a217644f755dc143000 | 2,009 | ex | Elixir | apps/astarte_appengine_api/lib/astarte_appengine_api_web/endpoint.ex | Annopaolo/astarte | f8190e8bf044759a9b84bdeb5786a55b6f793a4f | [
"Apache-2.0"
] | 191 | 2018-03-30T13:23:08.000Z | 2022-03-02T12:05:32.000Z | apps/astarte_appengine_api/lib/astarte_appengine_api_web/endpoint.ex | Annopaolo/astarte | f8190e8bf044759a9b84bdeb5786a55b6f793a4f | [
"Apache-2.0"
] | 402 | 2018-03-30T13:37:00.000Z | 2022-03-31T16:47:10.000Z | apps/astarte_appengine_api/lib/astarte_appengine_api_web/endpoint.ex | Annopaolo/astarte | f8190e8bf044759a9b84bdeb5786a55b6f793a4f | [
"Apache-2.0"
] | 24 | 2018-03-30T13:29:48.000Z | 2022-02-28T11:10:26.000Z | #
# This file is part of Astarte.
#
# Copyright 2017 Ispirata Srl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule Astarte.AppEngine.APIWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :astarte_appengine_api
socket "/v1/socket", Astarte.AppEngine.APIWeb.UserSocket, websocket: true
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phoenix.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :astarte_appengine_api,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug CORSPlug
plug Astarte.AppEngine.APIWeb.HealthPlug
plug Astarte.AppEngine.APIWeb.MetricsPlug
plug PlugLoggerWithMeta
plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
plug Plug.Session,
store: :cookie,
key: "_astarte_appengine_api_key",
signing_salt: "V7l/jiVr"
plug Astarte.AppEngine.APIWeb.Router
end
| 31.390625 | 75 | 0.740169 |
735611682fcb9e355e28e659af36310e72834298 | 7,435 | exs | Elixir | lib/mix/test/mix/dep_test.exs | sunaku/elixir | 8aa43eaedd76be8ac0d495049eb9ecd56971f4fe | [
"Apache-2.0"
] | null | null | null | lib/mix/test/mix/dep_test.exs | sunaku/elixir | 8aa43eaedd76be8ac0d495049eb9ecd56971f4fe | [
"Apache-2.0"
] | null | null | null | lib/mix/test/mix/dep_test.exs | sunaku/elixir | 8aa43eaedd76be8ac0d495049eb9ecd56971f4fe | [
"Apache-2.0"
] | 1 | 2020-12-07T08:04:16.000Z | 2020-12-07T08:04:16.000Z | Code.require_file "../test_helper.exs", __DIR__
defmodule Mix.DepTest do
use MixTest.Case
defmodule DepsApp do
def project do
[ deps: [
{:ok, "0.1.0", path: "deps/ok"},
{:invalidvsn, "0.2.0", path: "deps/invalidvsn"},
{:invalidapp, "0.1.0", path: "deps/invalidapp"},
{:noappfile, "0.1.0", path: "deps/noappfile"},
{:uncloned, git: "https://github.com/elixir-lang/uncloned.git"},
{:optional, git: "https://github.com/elixir-lang/optional.git", optional: true}
] ]
end
end
defmodule MixVersionApp do
def project do
[ deps: [ {:ok, "~> 0.1", path: "deps/ok"} ] ]
end
end
defmodule NoSCMApp do
def project do
[ deps: [ { :ok, "~> 0.1", not_really: :ok } ] ]
end
end
defmodule InvalidDepsReq do
def project do
[ deps: [ {:ok, "+- 0.1.0", path: "deps/ok"} ] ]
end
end
test "extracts all dependencies from the given project" do
Mix.Project.push DepsApp
in_fixture "deps_status", fn ->
deps = Mix.Dep.loaded([])
assert length(deps) == 6
assert Enum.find deps, &match?(%Mix.Dep{app: :ok, status: {:ok, _}}, &1)
assert Enum.find deps, &match?(%Mix.Dep{app: :invalidvsn, status: {:invalidvsn, :ok}}, &1)
assert Enum.find deps, &match?(%Mix.Dep{app: :invalidapp, status: {:invalidapp, _}}, &1)
assert Enum.find deps, &match?(%Mix.Dep{app: :noappfile, status: {:noappfile, _}}, &1)
assert Enum.find deps, &match?(%Mix.Dep{app: :uncloned, status: {:unavailable, _}}, &1)
assert Enum.find deps, &match?(%Mix.Dep{app: :optional, status: {:unavailable, _}}, &1)
end
end
test "use requirements for dependencies" do
Mix.Project.push MixVersionApp
in_fixture "deps_status", fn ->
deps = Mix.Dep.loaded([])
assert Enum.find deps, &match?(%Mix.Dep{app: :ok, status: {:ok, _}}, &1)
end
end
test "raises when no SCM is specified" do
Mix.Project.push NoSCMApp
in_fixture "deps_status", fn ->
send self, {:mix_shell_input, :yes?, false}
msg = "Could not find a SCM for dependency :ok from Mix.DepTest.NoSCMApp"
assert_raise Mix.Error, msg, fn -> Mix.Dep.loaded([]) end
end
end
test "does not set the manager before the dependency was loaded" do
# It is important to not eagerly set the manager because the dependency
# needs to be loaded (i.e. available in the filesystem) in order to get
# the proper manager.
Mix.Project.push DepsApp
{_, true, _} =
Mix.Dep.Converger.converge(false, [], nil, fn dep, acc, lock ->
assert nil?(dep.manager)
{dep, acc or true, lock}
end)
end
test "raises on invalid deps req" do
Mix.Project.push InvalidDepsReq
in_fixture "deps_status", fn ->
assert_raise Mix.Error, ~r"Invalid requirement", fn ->
Mix.Dep.loaded([])
end
end
end
defmodule NestedDepsApp do
def project do
[
app: :raw_sample,
version: "0.1.0",
deps: [
{:deps_repo, "0.1.0", path: "custom/deps_repo"}
]
]
end
end
test "nested deps come first" do
Mix.Project.push NestedDepsApp
in_fixture "deps_status", fn ->
assert Enum.map(Mix.Dep.loaded([]), &(&1.app)) == [:git_repo, :deps_repo]
end
end
test "nested optional deps are never added" do
Mix.Project.push NestedDepsApp
in_fixture "deps_status", fn ->
File.write! "custom/deps_repo/mix.exs", """
defmodule DepsRepo do
use Mix.Project
def project do
[
app: :deps_repo,
version: "0.1.0",
deps: [
{:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo"), optional: true}
]
]
end
end
"""
assert Enum.map(Mix.Dep.loaded([]), &(&1.app)) == [:deps_repo]
end
end
defmodule ConvergedDepsApp do
def project do
[
app: :raw_sample,
version: "0.1.0",
deps: [
{:deps_repo, "0.1.0", path: "custom/deps_repo"},
{:git_repo, "0.1.0", git: MixTest.Case.fixture_path("git_repo")}
]
]
end
end
test "correctly order converged deps" do
Mix.Project.push ConvergedDepsApp
in_fixture "deps_status", fn ->
assert Enum.map(Mix.Dep.loaded([]), &(&1.app)) == [:git_repo, :deps_repo]
end
end
test "correctly order converged deps even with optional dependencies" do
Mix.Project.push ConvergedDepsApp
in_fixture "deps_status", fn ->
File.write! "custom/deps_repo/mix.exs", """
defmodule DepsRepo do
use Mix.Project
def project do
[
app: :deps_repo,
version: "0.1.0",
deps: [
{:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo"), optional: true}
]
]
end
end
"""
assert Enum.map(Mix.Dep.loaded([]), &(&1.app)) == [:git_repo, :deps_repo]
end
end
defmodule IdentityRemoteConverger do
@behaviour Mix.RemoteConverger
def remote?(_app), do: true
def converge(_deps, lock) do
Process.put(:remote_converger, true)
lock
end
def deps(_deps, _lock) do
[]
end
end
test "remote converger" do
Mix.Project.push ConvergedDepsApp
Mix.RemoteConverger.register(IdentityRemoteConverger)
in_fixture "deps_status", fn ->
Mix.Tasks.Deps.Get.run([])
message = "* Getting git_repo (#{fixture_path("git_repo")})"
assert_received {:mix_shell, :info, [^message]}
assert Process.get(:remote_converger)
end
after
Mix.RemoteConverger.register(nil)
end
defmodule OnlyDeps do
def project do
[ deps: [ {:foo, github: "elixir-lang/foo"},
{:bar, github: "elixir-lang/bar", only: :other_env} ] ]
end
end
test "only extract deps matching environment" do
Mix.Project.push OnlyDeps
in_fixture "deps_status", fn ->
deps = Mix.Dep.loaded([env: :other_env])
assert length(deps) == 2
deps = Mix.Dep.loaded([])
assert length(deps) == 2
deps = Mix.Dep.loaded([env: :prod])
assert length(deps) == 1
assert Enum.find deps, &match?(%Mix.Dep{app: :foo}, &1)
end
end
defmodule OnlyChildDeps do
def project do
[ app: :raw_sample,
version: "0.1.0",
deps: [ {:only_deps, path: fixture_path("only_deps")} ] ]
end
end
test "only fetch child deps matching prod env" do
Mix.Project.push OnlyChildDeps
in_fixture "deps_status", fn ->
Mix.Tasks.Deps.Get.run([])
message = "* Getting git_repo (#{fixture_path("git_repo")})"
refute_received {:mix_shell, :info, [^message]}
end
end
defmodule OnlyParentDeps do
def project do
[ app: :raw_sample,
version: "0.1.0",
deps: [ {:only, github: "elixir-lang/only", only: :dev} ] ]
end
end
test "only fetch parent deps matching specified env" do
Mix.Project.push OnlyParentDeps
in_fixture "deps_status", fn ->
Mix.Tasks.Deps.Get.run(["--only", "prod"])
refute_received {:mix_shell, :info, ["* Getting" <> _]}
assert_raise Mix.Error, "Can't continue due to errors on dependencies", fn ->
Mix.Tasks.Deps.Check.run([])
end
Mix.env(:prod)
Mix.Tasks.Deps.Check.run([])
end
end
end
| 26.648746 | 100 | 0.588299 |
735664bda4df1645484e5906c5a3c31c38869f2c | 317 | exs | Elixir | priv/repo/migrations/20160124163843_create_comment.exs | gilcierweb/appElixirPhoenix | 4c594b365ddc07e732bd421901620469a49b2395 | [
"MIT"
] | 1 | 2018-04-25T15:03:42.000Z | 2018-04-25T15:03:42.000Z | priv/repo/migrations/20160124163843_create_comment.exs | gilcierweb/appElixirPhoenix | 4c594b365ddc07e732bd421901620469a49b2395 | [
"MIT"
] | null | null | null | priv/repo/migrations/20160124163843_create_comment.exs | gilcierweb/appElixirPhoenix | 4c594b365ddc07e732bd421901620469a49b2395 | [
"MIT"
] | null | null | null | defmodule AppElixirPhoenix.Repo.Migrations.CreateComment do
use Ecto.Migration
def change do
create table(:comments) do
add :name, :string
add :content, :text
add :post_id, references(:posts, on_delete: :nothing)
timestamps
end
create index(:comments, [:post_id])
end
end
| 19.8125 | 59 | 0.678233 |
735682fe7749019dc2b674dea8450fa3916627a6 | 1,330 | ex | Elixir | lib/elsa/group/supervisor.ex | mhanberg/elsa | 53eb831ca4abcc0f2de6a8a9686bd13fc4767ca6 | [
"Apache-2.0"
] | 76 | 2019-05-31T20:35:19.000Z | 2022-02-26T10:15:27.000Z | lib/elsa/group/supervisor.ex | mhanberg/elsa | 53eb831ca4abcc0f2de6a8a9686bd13fc4767ca6 | [
"Apache-2.0"
] | 44 | 2019-06-13T14:43:30.000Z | 2022-03-31T12:16:19.000Z | lib/elsa/group/supervisor.ex | mhanberg/elsa | 53eb831ca4abcc0f2de6a8a9686bd13fc4767ca6 | [
"Apache-2.0"
] | 11 | 2019-06-05T02:23:06.000Z | 2022-02-10T16:14:33.000Z | defmodule Elsa.Group.Supervisor do
@moduledoc """
Orchestrates the creation of dynamic supervisor and worker
processes for per-topic consumer groups, manager processes
for coordinating topic/partition assignment, and a registry
for differentiating named processes between consumer groups.
"""
use Supervisor, restart: :transient
import Elsa.Supervisor, only: [registry: 1]
@type init_opts :: [
connection: Elsa.connection(),
topics: [Elsa.topic()],
group: String.t(),
config: list
]
@spec start_link(init_opts) :: GenServer.on_start()
def start_link(init_arg \\ []) do
connection = Keyword.fetch!(init_arg, :connection)
Supervisor.start_link(__MODULE__, init_arg, name: {:via, Elsa.Registry, {registry(connection), __MODULE__}})
end
@impl Supervisor
def init(init_arg) do
connection = Keyword.fetch!(init_arg, :connection)
registry = registry(connection)
children =
[
{DynamicSupervisor, [strategy: :one_for_one, name: {:via, Elsa.Registry, {registry, :worker_supervisor}}]},
{Elsa.Group.Manager, manager_args(init_arg)}
]
|> List.flatten()
Supervisor.init(children, strategy: :one_for_all)
end
defp manager_args(args) do
args
|> Keyword.put(:supervisor_pid, self())
end
end
| 29.555556 | 115 | 0.681955 |
7356ac65a94be9d1ed5ce91bd41c5bce48de9df3 | 78 | exs | Elixir | test/test_helper.exs | tamanugi/realworld-phoenix | 1c0f90234926550a9124863b3946934b8f2e19a4 | [
"MIT"
] | 9 | 2021-08-23T23:21:31.000Z | 2022-03-27T23:19:55.000Z | test/test_helper.exs | tamanugi/realworld-phoenix | 1c0f90234926550a9124863b3946934b8f2e19a4 | [
"MIT"
] | null | null | null | test/test_helper.exs | tamanugi/realworld-phoenix | 1c0f90234926550a9124863b3946934b8f2e19a4 | [
"MIT"
] | 1 | 2021-09-06T07:26:37.000Z | 2021-09-06T07:26:37.000Z | ExUnit.start()
Ecto.Adapters.SQL.Sandbox.mode(RealworldPhoenix.Repo, :manual)
| 26 | 62 | 0.807692 |
7356f72573450d29c7d2031d75df0dd1980cf68c | 469 | exs | Elixir | test/cadet/auth/providers/cognito_claim_extractor_test.exs | source-academy/backend | 0074e1ab846a091ba3bbfbfb76660498b3e4b4be | [
"Apache-2.0"
] | 3 | 2021-08-15T23:57:32.000Z | 2022-02-02T06:50:55.000Z | test/cadet/auth/providers/cognito_claim_extractor_test.exs | source-academy/backend | 0074e1ab846a091ba3bbfbfb76660498b3e4b4be | [
"Apache-2.0"
] | 76 | 2021-07-05T07:13:52.000Z | 2022-03-28T07:23:11.000Z | test/cadet/auth/providers/cognito_claim_extractor_test.exs | source-academy/backend | 0074e1ab846a091ba3bbfbfb76660498b3e4b4be | [
"Apache-2.0"
] | 1 | 2022-03-26T14:27:51.000Z | 2022-03-26T14:27:51.000Z | defmodule Cadet.Auth.Providers.CognitoClaimExtractorTest do
use ExUnit.Case, async: true
alias Cadet.Auth.Providers.CognitoClaimExtractor, as: Testee
@username "adofjihid"
@role :staff
@claims %{"username" => @username, "cognito:groups" => [Atom.to_string(@role)]}
test "test" do
assert @username == Testee.get_username(@claims, "")
assert @username == Testee.get_name(@claims, "")
assert Testee.get_token_type() == "access_token"
end
end
| 27.588235 | 81 | 0.707889 |
7356fbec1ef01447cc9fcfaaac96373111baa8ea | 5,355 | exs | Elixir | test/transaction_event_test.exs | tpitale/elixir_agent | f356b588fe9d7cc3b9db00d9a251e009a20b3e0e | [
"Apache-2.0"
] | 227 | 2018-09-05T15:33:23.000Z | 2022-02-25T18:12:06.000Z | test/transaction_event_test.exs | tpitale/elixir_agent | f356b588fe9d7cc3b9db00d9a251e009a20b3e0e | [
"Apache-2.0"
] | 211 | 2018-09-05T21:42:41.000Z | 2022-03-25T17:51:56.000Z | test/transaction_event_test.exs | tpitale/elixir_agent | f356b588fe9d7cc3b9db00d9a251e009a20b3e0e | [
"Apache-2.0"
] | 84 | 2018-09-05T04:26:26.000Z | 2022-03-09T14:28:14.000Z | defmodule TransactionEventTest do
use ExUnit.Case
use Plug.Test
alias NewRelic.Harvest
alias NewRelic.Harvest.Collector
alias NewRelic.Transaction.Event
defmodule TestPlugApp do
use Plug.Router
plug(:match)
plug(:dispatch)
get "/" do
Process.sleep(10)
send_resp(conn, 200, "transactionEvent")
end
end
test "post a transaction event" do
agent_run_id = Collector.AgentRun.agent_run_id()
tr_1 = %Event{
web_duration: 0.010,
database_duration: nil,
timestamp: System.system_time(:millisecond) / 1_000,
name: "WebTransaction/AgentTest/Transaction/name",
duration: 0.010,
type: "Transaction",
user_attributes: %{
foo: "bar"
}
}
sampling = %{
reservoir_size: 100,
events_seen: 1
}
transaction_events = Event.format_events([tr_1])
payload = [agent_run_id, sampling, transaction_events]
Collector.Protocol.transaction_event(payload)
end
test "collect and store top priority events" do
Application.put_env(:new_relic_agent, :transaction_event_reservoir_size, 2)
{:ok, harvester} =
DynamicSupervisor.start_child(
Collector.TransactionEvent.HarvesterSupervisor,
Collector.TransactionEvent.Harvester
)
ev1 = %Event{name: "Ev1", duration: 1, user_attributes: %{priority: 3}}
ev2 = %Event{name: "Ev2", duration: 2, user_attributes: %{priority: 2}}
ev3 = %Event{name: "Ev3", duration: 3, user_attributes: %{priority: 1}}
GenServer.cast(harvester, {:report, ev1})
GenServer.cast(harvester, {:report, ev2})
GenServer.cast(harvester, {:report, ev3})
events = GenServer.call(harvester, :gather_harvest)
assert length(events) == 2
assert Enum.find(events, fn [_, tx] -> tx.priority == 3 end)
assert Enum.find(events, fn [_, tx] -> tx.priority == 2 end)
refute Enum.find(events, fn [_, tx] -> tx.priority == 1 end)
# Verify that the Harvester shuts down w/o error
Process.monitor(harvester)
Harvest.HarvestCycle.send_harvest(Collector.TransactionEvent.HarvesterSupervisor, harvester)
assert_receive {:DOWN, _ref, _, ^harvester, :shutdown}, 1000
Application.delete_env(:new_relic_agent, :transaction_event_reservoir_size)
end
test "user attributes can be truncated" do
TestHelper.restart_harvest_cycle(Collector.TransactionEvent.HarvestCycle)
Collector.TransactionEvent.Harvester.report_event(%Event{
name: "Ev1",
duration: 1,
user_attributes: %{long_entry: String.duplicate("1", 5000)}
})
[[_, attrs]] = TestHelper.gather_harvest(Collector.TransactionEvent.Harvester)
assert String.length(attrs.long_entry) == 4095
TestHelper.pause_harvest_cycle(Collector.TransactionEvent.HarvestCycle)
end
test "harvest cycle" do
Application.put_env(:new_relic_agent, :transaction_event_harvest_cycle, 300)
TestHelper.restart_harvest_cycle(Collector.TransactionEvent.HarvestCycle)
first = Harvest.HarvestCycle.current_harvester(Collector.TransactionEvent.HarvestCycle)
Process.monitor(first)
# Wait until harvest swap
assert_receive {:DOWN, _ref, _, ^first, :shutdown}, 1000
second = Harvest.HarvestCycle.current_harvester(Collector.TransactionEvent.HarvestCycle)
Process.monitor(second)
refute first == second
assert Process.alive?(second)
TestHelper.pause_harvest_cycle(Collector.TransactionEvent.HarvestCycle)
Application.delete_env(:new_relic_agent, :transaction_event_harvest_cycle)
# Ensure the last harvester has shut down
assert_receive {:DOWN, _ref, _, ^second, :shutdown}, 1000
end
test "instrument & harvest" do
TestHelper.restart_harvest_cycle(Collector.TransactionEvent.HarvestCycle)
TestHelper.request(TestPlugApp, conn(:get, "/"))
TestHelper.request(TestPlugApp, conn(:get, "/"))
[event | _] = events = TestHelper.gather_harvest(Collector.TransactionEvent.Harvester)
assert length(events) == 2
assert [%{name: "WebTransaction/Plug/GET"}, _] = event
TestHelper.pause_harvest_cycle(Collector.TransactionEvent.HarvestCycle)
end
test "Ignore late reports" do
TestHelper.restart_harvest_cycle(Collector.TransactionEvent.HarvestCycle)
harvester =
Collector.TransactionEvent.HarvestCycle
|> Harvest.HarvestCycle.current_harvester()
assert :ok == GenServer.call(harvester, :send_harvest)
GenServer.cast(harvester, {:report, :late_msg})
assert :completed == GenServer.call(harvester, :send_harvest)
TestHelper.pause_harvest_cycle(Collector.TransactionEvent.HarvestCycle)
end
test "Respect the reservoir_size" do
Application.put_env(:new_relic_agent, :transaction_event_reservoir_size, 3)
TestHelper.restart_harvest_cycle(Collector.TransactionEvent.HarvestCycle)
TestHelper.request(TestPlugApp, conn(:get, "/"))
TestHelper.request(TestPlugApp, conn(:get, "/"))
TestHelper.request(TestPlugApp, conn(:get, "/"))
TestHelper.request(TestPlugApp, conn(:get, "/"))
TestHelper.request(TestPlugApp, conn(:get, "/"))
events = TestHelper.gather_harvest(Collector.TransactionEvent.Harvester)
assert length(events) == 3
Application.delete_env(:new_relic_agent, :transaction_event_reservoir_size)
TestHelper.pause_harvest_cycle(Collector.TransactionEvent.HarvestCycle)
end
end
| 32.652439 | 96 | 0.725303 |
7357333ba71dc4a94b55f1c96de5656a095c0306 | 6,292 | ex | Elixir | lib/game/format/items.ex | jgsmith/ex_venture | 546adaa8fe80d45a72fde6de8d8d6906902c12d4 | [
"MIT"
] | 2 | 2019-05-14T11:36:44.000Z | 2020-07-01T08:54:04.000Z | lib/game/format/items.ex | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | null | null | null | lib/game/format/items.ex | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | 1 | 2021-01-29T14:12:40.000Z | 2021-01-29T14:12:40.000Z | defmodule Game.Format.Items do
@moduledoc """
Format functions for items
"""
use Game.Currency
import Game.Format.Context
alias Data.Item
alias Game.Format
@doc """
Format an items name, cyan
iex> Items.item_name(%{name: "Potion"})
"{item}Potion{/item}"
"""
@spec item_name(Item.t()) :: String.t()
def item_name(item) do
context()
|> assign(:name, item.name)
|> Format.template("{item}[name]{/item}")
end
@doc """
Format currency
"""
@spec currency(Save.t() | Room.t()) :: String.t()
def currency(%{currency: currency}) when currency == 0, do: ""
def currency(%{currency: amount}) do
currency(amount)
end
def currency(amount) do
context()
|> assign(:amount, amount)
|> assign(:currency, @currency)
|> Format.template("{item}[amount] [currency]{/item}")
end
@doc """
Display an item
Example:
iex> string = Items.item(%{name: "Short Sword", description: "A simple blade"})
iex> Regex.match?(~r(Short Sword), string)
true
"""
@spec item(Item.t()) :: String.t()
def item(item) do
context()
|> assign(:name, item_name(item))
|> assign(:underline, Format.underline(item.name))
|> assign(:description, item.description)
|> assign(:stats, item_stats(item))
|> Format.template(template("item"))
|> Format.resources()
end
@doc """
Format an items stats
iex> Items.item_stats(%{type: "armor", stats: %{slot: :chest}})
"Slot: chest"
iex> Items.item_stats(%{type: "basic"})
""
"""
@spec item_stats(Item.t()) :: String.t()
def item_stats(item)
def item_stats(%{type: "armor", stats: stats}) do
context()
|> assign(:slot, stats.slot)
|> Format.template("Slot: [slot]")
end
def item_stats(_), do: ""
@doc """
Format your inventory
"""
@spec inventory(integer(), map(), map(), [Item.t()]) :: String.t()
def inventory(currency_amount, wearing, wielding, items) do
items =
items
|> Enum.map(&inventory_item/1)
|> Enum.join("\n")
context()
|> assign(:equipment, equipment(wearing, wielding))
|> assign(:items, items)
|> assign(:currency, currency(currency_amount))
|> Format.template(template("inventory"))
end
def inventory_item(%{item: item, quantity: 1}) do
context()
|> assign(:name, item_name(item))
|> Format.template(" - [name]")
end
def inventory_item(%{item: item, quantity: quantity}) do
context()
|> assign(:name, item_name(item))
|> assign(:quantity, quantity)
|> Format.template(" - {item}[name] x[quantity]{/item}")
end
@doc """
Format your equipment
Example:
iex> wearing = %{chest: %{name: "Leather Armor"}}
iex> wielding = %{right: %{name: "Short Sword"}, left: %{name: "Shield"}}
iex> Items.equipment(wearing, wielding)
"You are wearing:\\n - {item}Leather Armor{/item} on your chest\\nYou are wielding:\\n - {item}Shield{/item} in your left hand\\n - {item}Short Sword{/item} in your right hand"
"""
@spec equipment(map(), map()) :: String.t()
def equipment(wearing, wielding) do
wearing =
wearing
|> Map.to_list()
|> Enum.sort_by(&elem(&1, 0))
|> Enum.map(&wearing_item/1)
|> Enum.join("\n")
wielding =
wielding
|> Map.to_list()
|> Enum.sort_by(&elem(&1, 0))
|> Enum.map(&wielding_item/1)
|> Enum.join("\n")
context()
|> assign(:wearing, wearing)
|> assign(:wielding, wielding)
|> Format.template("You are wearing:\n[wearing]\nYou are wielding:\n[wielding]")
end
def wearing_item({part, item}) do
context()
|> assign(:name, item_name(item))
|> assign(:part, part)
|> Format.template(" - [name] on your [part]")
end
def wielding_item({hand, item}) do
context()
|> assign(:name, item_name(item))
|> assign(:hand, hand)
|> Format.template(" - [name] in your [hand] hand")
end
@doc """
Message for users of items
iex> target = %{type: "npc", name: "Bandit"}
iex> user = %{type: "player", name: "Player"}
iex> Items.user_item(%{name: "Potion", user_text: "You used [name] on [target]."}, target: target, user: user)
"You used {item}Potion{/item} on {npc}Bandit{/npc}."
"""
def user_item(item, opts \\ []) do
context()
|> assign(:name, item_name(item))
|> assign(:target, Format.target_name(Keyword.get(opts, :target)))
|> assign(:user, Format.target_name(Keyword.get(opts, :user)))
|> Format.template(item.user_text)
end
@doc """
Message for usees of items
iex> target = %{type: "npc", name: "Bandit"}
iex> user = %{type: "player", name: "Player"}
iex> Items.usee_item(%{name: "Potion", usee_text: "You used [name] on [target]."}, target: target, user: user)
"You used {item}Potion{/item} on {npc}Bandit{/npc}."
"""
def usee_item(item, opts \\ []) do
context()
|> assign(:name, item_name(item))
|> assign(:target, Format.target_name(Keyword.get(opts, :target)))
|> assign(:user, Format.target_name(Keyword.get(opts, :user)))
|> Format.template(item.usee_text)
end
@doc """
An item was dropped message
iex> Items.dropped(%{type: "npc", name: "NPC"}, %{name: "Sword"})
"{npc}NPC{/npc} dropped {item}Sword{/item}."
iex> Items.dropped(%{type: "player", name: "Player"}, %{name: "Sword"})
"{player}Player{/player} dropped {item}Sword{/item}."
iex> Items.dropped(%{type: "player", name: "Player"}, {:currency, 100})
"{player}Player{/player} dropped {item}100 gold{/item}."
"""
@spec dropped(Character.t(), Item.t()) :: String.t()
def dropped(who, {:currency, amount}) do
context()
|> assign(:character, Format.name(who))
|> assign(:currency, currency(amount))
|> Format.template("[character] dropped [currency].")
end
def dropped(who, item) do
context()
|> assign(:character, Format.name(who))
|> assign(:name, item_name(item))
|> Format.template("[character] dropped [name].")
end
def template("item") do
"""
[name]
[underline]
[description]
[stats]
"""
end
def template("inventory") do
"""
[equipment]
You are holding:
[items]
You have [currency].
"""
end
end
| 26.661017 | 185 | 0.594882 |
7357347e3c424e5e88e82a80f8dfc803ac339d95 | 898 | ex | Elixir | clients/cloud_support/lib/google_api/cloud_support/v2beta/metadata.ex | mopp/elixir-google-api | d496227d17600bccbdf8f6be9ad1b7e7219d7ec6 | [
"Apache-2.0"
] | null | null | null | clients/cloud_support/lib/google_api/cloud_support/v2beta/metadata.ex | mopp/elixir-google-api | d496227d17600bccbdf8f6be9ad1b7e7219d7ec6 | [
"Apache-2.0"
] | null | null | null | clients/cloud_support/lib/google_api/cloud_support/v2beta/metadata.ex | mopp/elixir-google-api | d496227d17600bccbdf8f6be9ad1b7e7219d7ec6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudSupport.V2beta do
@moduledoc """
API client metadata for GoogleApi.CloudSupport.V2beta.
"""
@discovery_revision "20220125"
def discovery_revision(), do: @discovery_revision
end
| 33.259259 | 74 | 0.762806 |
73573a0888415de38833cd06d04afeb87d547df4 | 243 | ex | Elixir | lib/oc2.ex | TraceyOnim/TwinklyMaHa | cb9d907d8807e00f1e6e44085fd6f39ae32370b6 | [
"MIT"
] | 1 | 2020-07-16T19:49:53.000Z | 2020-07-16T19:49:53.000Z | lib/oc2.ex | TraceyOnim/TwinklyMaHa | cb9d907d8807e00f1e6e44085fd6f39ae32370b6 | [
"MIT"
] | 68 | 2021-06-16T15:30:11.000Z | 2022-03-07T08:38:19.000Z | lib/oc2.ex | sFractal-Podii/TwinklyMaHa | cfcffc355aad7200bef3c4d8ab9b8e179f02b26c | [
"MIT"
] | 5 | 2020-07-14T05:03:08.000Z | 2021-06-15T18:21:19.000Z | defmodule Oc2 do
@moduledoc """
Oc2 keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
end
| 24.3 | 66 | 0.744856 |
73577089c041095fa0dc9939079d80238b5b72ed | 1,177 | ex | Elixir | lib/oauth_xyz_web/channels/user_socket.ex | ritou/elixir-oauth-xyz | 110d4eadb16fa5c106ae0f6fad49c0424bdbf477 | [
"MIT"
] | 2 | 2020-04-22T13:22:25.000Z | 2020-12-01T12:01:30.000Z | lib/oauth_xyz_web/channels/user_socket.ex | ritou/elixir-oauth-xyz | 110d4eadb16fa5c106ae0f6fad49c0424bdbf477 | [
"MIT"
] | 3 | 2019-12-05T01:32:09.000Z | 2019-12-09T01:15:32.000Z | lib/oauth_xyz_web/channels/user_socket.ex | ritou/elixir-oauth-xyz-web | 110d4eadb16fa5c106ae0f6fad49c0424bdbf477 | [
"MIT"
] | null | null | null | defmodule OAuthXYZWeb.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", OAuthXYZWeb.RoomChannel
## Transports
transport(:websocket, Phoenix.Transports.WebSocket)
# transport :longpoll, Phoenix.Transports.LongPoll
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# OAuthXYZWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 30.973684 | 83 | 0.703483 |
735777744ade63fa0589e5fc488007cb37b55387 | 1,343 | ex | Elixir | lib/hound/json_driver/session.ex | knewter/hound | 49ca7d71f7fb0d9a9de14afa86ca1a1fe5ae1278 | [
"MIT"
] | 1 | 2015-07-08T04:32:02.000Z | 2015-07-08T04:32:02.000Z | lib/hound/json_driver/session.ex | knewter/hound | 49ca7d71f7fb0d9a9de14afa86ca1a1fe5ae1278 | [
"MIT"
] | null | null | null | lib/hound/json_driver/session.ex | knewter/hound | 49ca7d71f7fb0d9a9de14afa86ca1a1fe5ae1278 | [
"MIT"
] | null | null | null | defmodule Hound.JsonDriver.Session do
@moduledoc "Functions to switch sessions."
import Hound.JsonDriver.Utils
@doc """
When you need more than one browser session, use this function switch to another session.
If the session doesn't exist it a new one will be created for you.
All further commands will then run in the session you switched to.
# Pass any name to the session to refer to it later.
change_session_to("random-session")
The name can be an atom or a string. The default session created is called `:default`.
"""
def change_session_to(session_name) do
:gen_server.call(:hound_sessions, {:change_current_session_for_pid, session_name}, 30000)
end
@doc """
When running multiple browser sessions, calling this function will switch to the default browser session.
change_to_default_session
# is the same as calling
change_session_to(:default)
"""
def change_to_default_session do
change_session_to(:default)
end
@doc """
Execute commands in a seperate browser session.
in_browser_session "another_user" do
navigate_to "http://example.com"
click({:id, "announcement"})
end
"""
def in_browser_session(session_name, func) do
change_session_to(session_name)
apply(func, [])
change_to_default_session()
end
end | 27.979167 | 107 | 0.72003 |
735796359fcb6371111b8c43c9af15a5322d360b | 1,926 | exs | Elixir | test/composition_errors_test.exs | ityonemo/Exonerate | 42b888c156c9179d222b78609d34c07f0b887eaf | [
"MIT"
] | 14 | 2021-01-14T20:14:30.000Z | 2022-01-28T00:58:07.000Z | test/composition_errors_test.exs | ityonemo/Exonerate | 42b888c156c9179d222b78609d34c07f0b887eaf | [
"MIT"
] | 13 | 2019-09-11T17:48:48.000Z | 2021-11-22T23:02:44.000Z | test/composition_errors_test.exs | ityonemo/Exonerate | 42b888c156c9179d222b78609d34c07f0b887eaf | [
"MIT"
] | 1 | 2021-09-12T13:08:54.000Z | 2021-09-12T13:08:54.000Z | defmodule ExonerateTest.CompositionTest do
use ExUnit.Case, async: true
require Exonerate
Exonerate.function_from_string(:defp, :one_of, """
{
"oneOf": [
{ "type": "number", "multipleOf": 5 },
{ "type": "number", "multipleOf": 3 },
{ "type": "object" }
]
}
""")
describe "oneOf" do
test "reports failures when there are multiple failures" do
assert {:error, list} = one_of("foobarbaz")
assert "/oneOf" = list[:schema_pointer]
assert [[
schema_pointer: "/oneOf/0/type",
error_value: "foobarbaz",
json_pointer: "/"
],[
schema_pointer: "/oneOf/1/type",
error_value: "foobarbaz",
json_pointer: "/"
],[
schema_pointer: "/oneOf/2/type",
error_value: "foobarbaz",
json_pointer: "/"
]] = list[:failures]
assert "no matches" == list[:reason]
end
test "reports multiple failures" do
assert {:error, list} = one_of(15)
assert "/oneOf" = list[:schema_pointer]
assert [[
schema_pointer: "/oneOf/2/type",
error_value: 15,
json_pointer: "/"
]] = list[:failures]
assert ["/oneOf/0", "/oneOf/1"] == list[:matches]
assert "multiple matches" == list[:reason]
end
end
Exonerate.function_from_string(:defp, :any_of, """
{
"anyOf": [
{ "type": "string", "maxLength": 5 },
{ "type": "number", "minimum": 0 }
]
}
""")
describe "anyOf" do
test "reports all failures when there are multiple failures" do
assert {:error, list} = any_of("foobarbaz")
assert "/anyOf" = list[:schema_pointer]
assert [[
schema_pointer: "/anyOf/0/maxLength",
error_value: "foobarbaz",
json_pointer: "/"
],[
schema_pointer: "/anyOf/1/type",
error_value: "foobarbaz",
json_pointer: "/"
]] = list[:failures]
end
end
end
| 25.342105 | 67 | 0.558671 |
7357a4c5fedd50514da998c08661c4e35490e71f | 687 | ex | Elixir | lib/slack/user/profile.ex | jclem/slack_ex | e2376cea87c35d14cc31dc545ff8e99106dee28c | [
"MIT"
] | 21 | 2016-06-28T04:03:55.000Z | 2020-01-19T01:51:11.000Z | lib/slack/user/profile.ex | jclem/slack_ex | e2376cea87c35d14cc31dc545ff8e99106dee28c | [
"MIT"
] | 5 | 2016-06-28T04:34:14.000Z | 2019-12-09T20:09:12.000Z | lib/slack/user/profile.ex | jclem/slack_ex | e2376cea87c35d14cc31dc545ff8e99106dee28c | [
"MIT"
] | 6 | 2016-06-28T04:27:08.000Z | 2020-04-22T11:07:03.000Z | defmodule Slack.User.Profile do
@moduledoc """
Functions for working with user profiles
"""
@base "users.profile"
use Slack.Request
@doc """
Get the profile information of a user.
https://api.slack.com/methods/users.profile.get
## Examples
Slack.User.Profile.get(client, user: "U1234567890")
"""
@spec get(Slack.Client.t, Keyword.t) :: Slack.slack_response
defget :get
@doc """
Set the profile information of a user.
https://api.slack.com/methods/users.profile.set
## Examples
Slack.User.Profile.set(client, user: "U1234567890", name: "Name")
"""
@spec set(Slack.Client.t, Keyword.t) :: Slack.slack_response
defpost :set
end
| 20.205882 | 71 | 0.6754 |
7358122f44a7e52c7beb5addc530afd6e2e0fe0d | 2,944 | ex | Elixir | apps/extended_api/lib/extended_api/worker/find_transactions/tags/helper/helper.ex | iotaledger/chronicle | 73566e5613268e4b0c5951265ae4760cedb4051f | [
"Apache-2.0"
] | 19 | 2019-09-17T18:14:36.000Z | 2021-12-06T07:29:27.000Z | apps/extended_api/lib/extended_api/worker/find_transactions/tags/helper/helper.ex | iotaledger/chronicle | 73566e5613268e4b0c5951265ae4760cedb4051f | [
"Apache-2.0"
] | 5 | 2019-09-30T04:57:14.000Z | 2020-11-10T15:41:03.000Z | apps/extended_api/lib/extended_api/worker/find_transactions/tags/helper/helper.ex | iotaledger/chronicle | 73566e5613268e4b0c5951265ae4760cedb4051f | [
"Apache-2.0"
] | 2 | 2019-09-17T19:03:16.000Z | 2021-03-01T01:04:31.000Z | defmodule ExtendedApi.Worker.FindTransactions.Tags.Helper do
@moduledoc """
This module hold all the required helper functions
Which is gonna be used by FindTransactions.Tags worker
"""
alias ExtendedApi.Worker.FindTransactions.{Tags, Tags.EdgeFn}
alias Core.DataModel.{Keyspace.Tangle, Table.Edge}
import OverDB.Builder.Query
# these types check guards
defguardp is_tag(tag) when is_binary(tag) and byte_size(tag) == 27
@edge_cql "SELECT el FROM tangle.edge WHERE v1 = ? AND lb = 70"
# Start of Helper functions for edge table queries ###########################
@doc """
This function takes Tags as list and Worker State then
return tuple
{:ok, state} :
state is updated map which include all the new queries_states,
and initial hashes/hints.
{:error, term} : error occurs either because of invalid
tag structure/type or dead shard stage in the query engine.
"""
@spec queries(list,map, list, integer) :: {:ok, map} | {:error, term}
def queries(tags, state, queries_states_list \\ [], ref \\ 0)
def queries(tags, state, queries_states_list, ref) do
_queries(tags, state, queries_states_list, ref)
end
@spec _queries(list,map, list, integer) :: tuple
defp _queries([tag | rest], state, queries_states_list, ref) when is_tag(tag) do
{ok?, _, q_s} = edge_query(tag, ref)
_queries(ok?,rest,state, queries_states_list, ref, q_s)
end
@spec _queries(list,map, list, integer) :: tuple
defp _queries([],%{from: from}, queries_states_list, ref) do
# ref indicates the total number of queries.
# queries_states_list is a list which hold all the
# generated queries_states that needed to decode future responses.
state =
Enum.into(
queries_states_list,
%{ref: ref, hints: [], from: from}
)
# return state to worker
{:ok, state}
end
@spec _queries(list,map, list, integer) :: tuple
defp _queries(_,_, _, _) do
{:error, :invalid}
end
@spec _queries(atom, list, map, list, integer, map) :: tuple
defp _queries(:ok,rest,state, queries_states_list, ref, q_s) do
# :ok indicates ref => q_s has been received by the shard's stage.
# therefore we should put that in queries_states_list and increase the ref.
# now loop through the rest with updated ref/queries_states_list.
_queries(rest,state, [{ref, q_s} | queries_states_list], ref+1)
end
@spec _queries(term, list, map, list, integer, map) :: tuple
defp _queries(ok?,_,_, _, _, _) do
{:error, ok?}
end
@spec edge_query(binary, integer, nil | map) :: tuple
def edge_query(tag,ref, opts \\ nil) do
{Tangle, Edge}
|> select([:el]) |> type(:stream)
|> assign(tag: tag)
|> cql(@edge_cql)
|> values([{:blob, tag}])
|> opts(opts || %{function: {EdgeFn, :create_hint, [tag]}})
|> pk([v1: tag]) |> prepare?(true) |> reference({:edge, ref})
|> Tags.query()
end
end
| 34.635294 | 82 | 0.659647 |
73582b8e0fbe1ae8b71273441c79475c3c1bcb8f | 1,633 | ex | Elixir | server_elixir/lib/server_elixir_web/controllers/authentication.ex | johnosullivan/MiHome | 4e4062fd8801144a26bea8811e76688009913f94 | [
"MIT"
] | 7 | 2018-05-29T01:41:12.000Z | 2021-12-26T04:27:21.000Z | server_elixir/lib/server_elixir_web/controllers/authentication.ex | johnosullivan/MiHome | 4e4062fd8801144a26bea8811e76688009913f94 | [
"MIT"
] | 4 | 2017-12-06T14:48:59.000Z | 2021-05-04T23:57:30.000Z | server_elixir/lib/server_elixir_web/controllers/authentication.ex | johnosullivan/MiHome | 4e4062fd8801144a26bea8811e76688009913f94 | [
"MIT"
] | 5 | 2017-10-23T16:30:57.000Z | 2019-07-27T19:40:11.000Z | defmodule ServerElixirWeb.Controllers.Authentication do
use ServerElixirWeb, :controller
alias ServerElixir.Authentication
def create(conn, %{"email" => email, "password" => password}) do
case Authentication.authenticate_user(email, password) do
{:ok, user} ->
if user.is_active do
if user.two_factor do
{:ok, token, _} =
ServerElixirWeb.Guardian.encode_and_sign(user, %{typ: "totp"}, ttl: {2, :minute})
conn
|> put_status(:ok)
|> put_view(ServerElixirWeb.UserView)
|> render("sign_in_user_totp.json", token: token)
else
{:ok, token, _} = ServerElixirWeb.Guardian.encode_and_sign(user, %{}, ttl: {7, :day})
conn
|> fetch_session
|> put_session(:current_user_id, user.id)
|> put_status(:ok)
|> put_view(ServerElixirWeb.UserView)
|> render("sign_in_user.json", user: user, token: token)
end
else
conn
|> put_status(:unauthorized)
|> put_view(ServerElixirWeb.ErrorView)
|> render("error.json", reason: "account is deactivated: #{user.email}")
end
{:error, message} ->
conn
|> put_status(:unauthorized)
|> put_view(ServerElixirWeb.ErrorView)
|> render("error.json", reason: message)
end
end
def index(conn, _params) do
user = ServerElixirWeb.Guardian.Plug.current_resource(conn)
conn
|> put_status(:ok)
|> put_view(ServerElixirWeb.UserView)
|> render("ping_auth_user.json", user: user)
end
end
| 31.403846 | 97 | 0.590937 |
73584048f7bcf0a34479f71fdfb1ef9c61ae7f95 | 986 | ex | Elixir | web/helpers/pubsub.ex | enixdark/audit_api | 3aa6e9169cd9e80f06d91e4104438398a012a86f | [
"MIT"
] | 1 | 2018-01-20T00:58:33.000Z | 2018-01-20T00:58:33.000Z | web/helpers/pubsub.ex | enixdark/audit_api | 3aa6e9169cd9e80f06d91e4104438398a012a86f | [
"MIT"
] | null | null | null | web/helpers/pubsub.ex | enixdark/audit_api | 3aa6e9169cd9e80f06d91e4104438398a012a86f | [
"MIT"
] | null | null | null | # require IEx;
# defmodule Helpers.PubSub do
# use GenServer
# require Logger
# # defstart start_link(_), do: initial_state(0)
# def start_link(otps \\ []) do
# Logger.info "start pubsub redis...................."
# GenServer.start_link(__MODULE__, [], otps)
# end
# def init(_) do
# Redix.PubSub.start_link()
# end
# def handle_call(command, _from, state) do
# result = case command do
# {:pub, key} -> Redix.command(state, ["GET","#{key}"])
# end
# {:reply, result, state}
# end
# def handle_cast(command, state) do
# case command do
# {:flush, key} ->
# Redix.command!(state, ["PUBLISH",:flush, "#{key}"])
# end
# {:noreply, state}
# end
# @doc """
# get pid of worker when it started
# """
# @spec get(pid, any) :: pid
# def get(pid, key) do
# GenServer.call(pid, {:get, key}, 1_000_000)
# end
# def flush(pid, key) do
# GenServer.cast(pid, {:flush, key})
# end
# end
| 20.978723 | 61 | 0.558824 |
735845cdf5be78bcd648c34a2254af7aa136a097 | 9,687 | ex | Elixir | lib/formex/view.ex | nickolaich/formex | 74214349102b2094fcadec5a13b86c9140e5e37c | [
"MIT"
] | 221 | 2017-01-15T18:14:55.000Z | 2021-08-17T14:15:40.000Z | lib/formex/view.ex | nickolaich/formex | 74214349102b2094fcadec5a13b86c9140e5e37c | [
"MIT"
] | 25 | 2017-04-09T01:01:20.000Z | 2019-04-17T01:36:01.000Z | lib/formex/view.ex | nickolaich/formex | 74214349102b2094fcadec5a13b86c9140e5e37c | [
"MIT"
] | 24 | 2017-04-08T22:15:06.000Z | 2021-11-15T11:55:41.000Z | defmodule Formex.View do
use Phoenix.HTML
alias Formex.Form
alias Formex.Field
alias Formex.FormCollection
alias Formex.FormNested
alias Formex.Button
@moduledoc """
Helper functions for templating.
Example of use:
<%= formex_form_for @form, @action, fn f -> %>
<%= if @form.submitted? do %>
<div class="alert alert-danger">
<p>Oops, something went wrong! Please check the errors below.</p>
</div>
<% end %>
<%= formex_rows f %>
<div class="form-group">
<%= submit "Submit", class: "btn btn-primary" %>
</div>
<% end %>
## Changing a form template
You can change the template globally or in the specific form/field.
* config
```
config :formex,
template: Formex.Template.BootstrapHorizontal
template_options: [ # options used by this template
left_column: "col-xs-2",
right_column: "col-xs-10"
]
```
* `formex_form_for/4`:
```
<%= formex_form_for @form, @action, [
class: "form-horizontal",
template: Formex.Template.BootstrapHorizontal
], fn f -> %>
...
<% end %>
```
* `formex_rows/2`:
```
<%= formex_rows f, template: Formex.Template.BootstrapHorizontal %>
```
* `formex_row/3`:
```
<%= formex_row f, :name, template: Formex.Template.BootstrapHorizontal %>
```
"""
defmacro __using__([]) do
quote do
import Formex.View
import Formex.View.Nested
import Formex.View.Collection
end
end
@doc """
Works similar to a
[Phoenix.HTML.Form](https://hexdocs.pm/phoenix_html/Phoenix.HTML.Form.html#form_for/4)
In the callback function the first argument is `t:Formex.Form.t/0` instead of a
`t:Phoenix.HTML.Form.t/0`.
This argument contains the `t:Phoenix.HTML.Form.t/0` under a `:phoenix_form` key
## Options
In `options` argument you are passing together options for `Formex.View` and for `Phoenix.HTML`.
### Formex options
* `template` - a form template that implements `Formex.Template`, for example:
`Formex.Template.BootstrapHorizontal`
* `template_options` - additional options, supported by the template
### Phoenix options
Options not mentioned before will be passed to a
[Phoenix.HTML.Form](https://hexdocs.pm/phoenix_html/Phoenix.HTML.Form.html#form_for/4)
function. Options below are already set by Formex and can be overriden.
* `as` - form name, defaults to struct name
* `method` - method, defaults to `:post`
For rest of options, see
[Phoenix.HTML.Form](https://hexdocs.pm/phoenix_html/Phoenix.HTML.Form.html#form_for/4) docs.
"""
@spec formex_form_for(
form :: Form.t(),
action :: String.t(),
options :: Keyword.t(),
fun :: (Formex.t() -> Phoenix.HTML.unsafe())
) :: Phoenix.HTML.safe()
def formex_form_for(form, action, options \\ [], fun) do
phoenix_options =
options
|> Keyword.delete(:template)
|> Keyword.delete(:template_options)
|> Keyword.put_new(:as, form_for_name(form))
|> Keyword.put_new(:method, form.method || :post)
fake_params =
%{}
|> Map.put(to_string(phoenix_options[:as]), form_to_params(form))
fake_conn = %Plug.Conn{params: fake_params, method: "POST"}
Phoenix.HTML.Form.form_for(fake_conn, action, phoenix_options, fn phx_form ->
form
|> Map.put(:phoenix_form, phx_form)
|> Map.put(:template, options[:template])
|> Map.put(:template_options, options[:template_options])
|> fun.()
end)
end
defp form_for_name(%{struct_module: module}) do
module
|> Module.split()
|> List.last()
|> Macro.underscore()
end
@spec form_to_params(form :: Form.t()) :: Map.t()
defp form_to_params(form) do
form.items
|> Enum.map(fn item ->
case item do
%Field{} ->
form_to_params_field(form, item)
%FormNested{} ->
form_to_params_nested(form, item)
%FormCollection{} ->
form_to_params_collection(form, item)
_ ->
false
end
end)
|> Enum.filter(& &1)
|> Enum.into(%{})
end
@spec form_to_params_field(form :: Form.t(), item :: Field.t()) :: Map.t()
defp form_to_params_field(form, item) do
val =
if item.custom_value do
value = Map.get(form.new_struct, item.struct_name)
item.custom_value.(value)
else
Map.get(form.new_struct, item.struct_name)
end
new_val =
case item.type do
:multiple_select ->
(val || [])
|> Enum.map(fn subval ->
subval
|> case do
substruct when is_map(substruct) ->
substruct.id
_ ->
subval
end
|> to_string
end)
_ ->
val
end
{to_string(item.name), new_val}
end
@spec form_to_params_nested(form :: Form.t(), item :: FormNested.t()) :: Map.t()
defp form_to_params_nested(_form, item) do
sub_params = form_to_params(item.form)
sub_struct = item.form.new_struct
sub_params =
if Map.has_key?(sub_struct, :id) do
sub_params
|> Map.put("id", sub_struct.id |> to_string)
else
sub_params
end
{to_string(item.name), sub_params}
end
@spec form_to_params_collection(form :: Form.t(), item :: FormCollection.t()) :: Map.t()
defp form_to_params_collection(_form, item) do
new_val =
item.forms
|> Enum.with_index()
|> Enum.map(fn {nested_form, key} ->
sub_struct = nested_form.form.new_struct
subparams =
nested_form.form
|> form_to_params()
|> Map.put("id", sub_struct.id |> to_string)
|> Map.put("formex_id", sub_struct.formex_id)
|> Map.put(
to_string(item.delete_field),
sub_struct
|> Map.get(item.delete_field)
|> to_string
)
{key, subparams}
end)
|> Enum.into(%{})
{to_string(item.name), new_val}
end
@doc """
Generates all `formex_row/2`s at once
## Options
* `template` - a form template that implements `Formex.Template`, for example:
`Formex.Template.BootstrapHorizontal`
* `template_options` - additional options, supported by the template
"""
@spec formex_rows(Form.t(), Keyword.t()) :: Phoenix.HTML.safe()
def formex_rows(form, options \\ []) do
Enum.map(form.items, fn item ->
formex_row(form, item.name, options)
end)
end
@doc """
Generates a row using `formex_label/3` and `formex_input/3`
Example of use:
<%= formex_row f, :title %>
<%= formex_row f, :content %>
<%= formex_row f, :category_id %>
## Options
* `template` - a form template that implements `Formex.Template`, for example:
`Formex.Template.BootstrapHorizontal`
* `template_options` - additional options, supported by the template
"""
@spec formex_row(Form.t(), Atom.t(), Keyword.t()) :: Phoenix.HTML.safe()
def formex_row(form, item_name, options \\ []) do
item = get_item(form, item_name)
template = get_template(form, options)
template_options = get_template_options(form, options)
case item do
%Field{} ->
template.generate_row(form, item, template_options)
%Button{} ->
template.generate_row(form, item, template_options)
%FormNested{} ->
Formex.View.Nested.formex_nested(form, item_name, options)
%FormCollection{} ->
Formex.View.Collection.formex_collection(form, item_name, options)
end
end
@doc """
Generates an input, used by `formex_row/3`
Example of use:
<div>
<%= formex_label f, :title %>
<%= formex_input f, :title %>
</div>
<%= formex_input f, :some_hidden_field %>
## Options
* `template` - a form template that implements `Formex.Template`, for example:
`Formex.Template.BootstrapHorizontal`
"""
@spec formex_input(Form.t(), Atom.t(), Keyword.t()) :: Phoenix.HTML.safe()
def formex_input(form, item_name, options \\ []) do
item = get_item(form, item_name)
template = get_template(form, options)
template.generate_input(form, item)
end
@doc """
Generates a label, used by `formex_row/3`
Example of use:
<div>
<%= formex_label f, :title %>
<%= formex_input f, :title %>
</div>
## Options
* `template` - a form template that implements `Formex.Template`, for example:
`Formex.Template.BootstrapHorizontal`
"""
@spec formex_label(Form.t(), Atom.t(), Keyword.t()) :: Phoenix.HTML.safe()
def formex_label(form, item_name, options \\ []) do
item = get_item(form, item_name)
template = get_template(form, options)
class = (options[:class] && options[:class]) || ""
template.generate_label(form, item, class)
end
def get_template(form, row_options) do
row_options[:template] || form.template || Application.get_env(:formex, :template) ||
Formex.Template.BootstrapVertical
end
def get_template_options(form, row_options) do
[]
|> Keyword.merge(Application.get_env(:formex, :template_options) || [])
|> Keyword.merge(form.template_options || [])
|> Keyword.merge(row_options[:template_options] || [])
end
defp get_item(form, item_name) do
item = Enum.find(form.items, &(&1.name == item_name))
if !item do
throw("Key :" <> to_string(item_name) <> " not found in form " <> to_string(form.type))
end
item
end
end
| 27.058659 | 100 | 0.607618 |
73585a6d07aa9995adcb16e5041132fbef11a4c1 | 4,084 | ex | Elixir | lib/challenge_gov/messages.ex | smartlogic/Challenge_gov | b4203d1fcfb742dd17ecfadb9e9c56ad836d4254 | [
"CC0-1.0"
] | null | null | null | lib/challenge_gov/messages.ex | smartlogic/Challenge_gov | b4203d1fcfb742dd17ecfadb9e9c56ad836d4254 | [
"CC0-1.0"
] | null | null | null | lib/challenge_gov/messages.ex | smartlogic/Challenge_gov | b4203d1fcfb742dd17ecfadb9e9c56ad836d4254 | [
"CC0-1.0"
] | null | null | null | defmodule ChallengeGov.Messages do
@moduledoc """
Context for Messages
"""
@behaviour Stein.Filter
import Ecto.Query
alias Ecto.Multi
alias ChallengeGov.Repo
alias ChallengeGov.Messages.Message
alias ChallengeGov.Messages.MessageContextStatus
alias Stein.Filter
def all(opts \\ []) do
Message
|> preload(^opts[:preload])
|> order_by([m], desc: m.updated_at)
|> Filter.filter(opts[:filter], __MODULE__)
|> Repo.all()
end
def all_drafts_for_user(user, opts \\ []) do
case user.role do
"challenge_owner" ->
Message
|> join(:inner, [m], mc in assoc(m, :context), as: :context)
|> join(:inner, [m], a in assoc(m, :author), as: :author)
|> join(:inner, [context: mc], mcs in assoc(mc, :statuses),
on: mcs.user_id == ^user.id,
as: :context_statuses
)
|> preload([:author, :context])
|> order_by([m], desc: m.updated_at)
|> where([m], m.status == "draft")
|> where([author: a], a.id == ^user.id or a.role == "challenge_owner")
|> Repo.all()
_ ->
filter =
opts[:filter]
|> Map.merge(%{
"author_id" => user.id,
"status" => "draft"
})
all(preload: [:author, :context], filter: filter)
end
end
def get_draft(id) do
Message
|> Repo.get_by(id: id, status: "draft")
|> case do
nil ->
{:error, :not_found}
draft_message ->
{:ok, draft_message}
end
end
def new(), do: Message.changeset(%Message{})
def edit(message), do: Message.changeset(message)
def create(user, context, params) do
Multi.new()
|> Multi.run(:find_message, fn repo, _changes ->
message_id = Map.get(params, "id", nil)
case message_id do
nil ->
{:ok, %Message{}}
id ->
{:ok, repo.get(Message, id)}
end
end)
|> Multi.insert_or_update(:message, fn %{find_message: message} ->
Message.create_changeset(message, user, context, params)
end)
|> Multi.run(:cache_last_message, fn _repo, %{message: message} ->
maybe_cache_last_message(message, context)
end)
|> Multi.run(:message_context_statuses, fn _repo, %{message: message} ->
maybe_set_recipients_unread(message)
end)
|> Multi.update(:author_message_context_status, fn _changes ->
MessageContextStatus
|> Repo.get_by(user_id: user.id, message_context_id: context.id)
|> Ecto.Changeset.change(read: true)
end)
|> Repo.transaction()
|> case do
{:ok, %{message: message}} ->
message = Repo.preload(message, [:author])
{:ok, message}
{:error, _, changeset, _} ->
{:error, changeset}
end
end
defp maybe_cache_last_message(message = %{status: "sent"}, context) do
context
|> Repo.preload([:last_message])
|> Ecto.Changeset.change(last_message: message)
|> Repo.update()
end
defp maybe_cache_last_message(%{status: "draft"}, context), do: {:ok, context}
defp maybe_set_recipients_unread(message = %{status: "sent"}) do
result =
MessageContextStatus
|> where([mcs], mcs.message_context_id == ^message.message_context_id)
|> update(set: [read: false])
|> Repo.update_all([])
{:ok, result}
end
defp maybe_set_recipients_unread(%{status: "draft"}), do: {:ok, nil}
@impl Stein.Filter
def filter_on_attribute({"status", value}, query) do
query
|> where([m], m.status == ^value)
end
def filter_on_attribute({"author_id", value}, query) do
query
|> where([m], m.author_id == ^value)
end
def filter_on_attribute({"challenge_id", value}, query) do
if has_named_binding?(query, :context) do
query
# |> join(:inner, [m], mc in assoc(m, :context))
|> where([context: mc], mc.context == "challenge" and mc.context_id == ^value)
else
query
|> join(:inner, [m], mc in assoc(m, :context))
|> where([m, mc], mc.context == "challenge" and mc.context_id == ^value)
end
end
end
| 27.409396 | 84 | 0.594025 |
73589cf7230127143558fb20884f227fddf1b978 | 65 | ex | Elixir | web/views/email_view.ex | timrourke/colorstorm-api | fee60a52701a4f773fcd2c8c5c70a472d0d52f09 | [
"MIT"
] | null | null | null | web/views/email_view.ex | timrourke/colorstorm-api | fee60a52701a4f773fcd2c8c5c70a472d0d52f09 | [
"MIT"
] | null | null | null | web/views/email_view.ex | timrourke/colorstorm-api | fee60a52701a4f773fcd2c8c5c70a472d0d52f09 | [
"MIT"
] | null | null | null | defmodule Colorstorm.EmailView do
use Colorstorm.Web, :view
end | 21.666667 | 33 | 0.815385 |
7358d4fc44cf1bd58c6e125e27d3070b78476888 | 326 | exs | Elixir | test/loom_maps_test.exs | samgaw/loom | 574db0c8ce6170f65754474789382c142a1050c3 | [
"Apache-2.0"
] | null | null | null | test/loom_maps_test.exs | samgaw/loom | 574db0c8ce6170f65754474789382c142a1050c3 | [
"Apache-2.0"
] | null | null | null | test/loom_maps_test.exs | samgaw/loom | 574db0c8ce6170f65754474789382c142a1050c3 | [
"Apache-2.0"
] | null | null | null | defmodule LoomMapsTest do
use ExUnit.Case
import Loom.TypedORMap
alias Loom.PNCounter, as: C
alias Loom.PNCounterMap, as: CMap
test "Basic definition..." do
defmap(C)
m = C.new() |> C.inc(:a, 5) |> C.dec(:a, 3)
c = CMap.new() |> CMap.put(:a, "omg", m)
assert 2 == CMap.get_value(c, "omg")
end
end
| 23.285714 | 47 | 0.610429 |
7358edd3052110d23b04cfb91a3dc5a5eb5d7dc0 | 1,941 | exs | Elixir | test/changelog/schema/news/news_item_comment_test.exs | axelson/changelog.com | bad9f461aabbde0faa938f7b2ae643ed47d1df9b | [
"MIT"
] | 1 | 2021-01-06T18:21:45.000Z | 2021-01-06T18:21:45.000Z | test/changelog/schema/news/news_item_comment_test.exs | codexn/changelog.com | 25ce501ee62eef76731c38d590667e8132096ba8 | [
"MIT"
] | null | null | null | test/changelog/schema/news/news_item_comment_test.exs | codexn/changelog.com | 25ce501ee62eef76731c38d590667e8132096ba8 | [
"MIT"
] | null | null | null | defmodule Changelog.NewsItemCommentTest do
use Changelog.SchemaCase
alias Changelog.NewsItemComment
describe "insert_changeset" do
test "with valid attributes" do
changeset =
NewsItemComment.insert_changeset(%NewsItemComment{}, %{
content: "ohai",
item_id: 1,
author_id: 2
})
assert changeset.valid?
end
test "with invalid attributes" do
changeset = NewsItemComment.insert_changeset(%NewsItemComment{}, %{content: "ohnoes"})
refute changeset.valid?
end
end
describe "mentioned_people/1" do
test "returns an empty list when there aren't any mentions" do
comment = build(:news_item_comment, content: "zomg this is rad")
assert NewsItemComment.mentioned_people(comment) == []
end
test "also works directly with comment content" do
assert NewsItemComment.mentioned_people("zomg this is rad") == []
end
test "returns one person when they are mentioned" do
person = insert(:person, handle: "joeblow")
comment = build(:news_item_comment, content: "zomg @joeblow this is rad")
assert NewsItemComment.mentioned_people(comment) == [person]
end
test "returns many people when they are mentioned" do
p1 = insert(:person, handle: "joeblow")
p2 = insert(:person, handle: "janeblow")
p3 = insert(:person, handle: "aliceblow")
comment =
build(:news_item_comment, content: "zomg @joeblow & @janeblow this is rad @aliceblow!")
assert NewsItemComment.mentioned_people(comment) == [p1, p2, p3]
end
end
describe "nested/1" do
test "nests comments appropriately" do
parent = %{id: 1, parent_id: nil, content: "ohai"}
reply = %{id: 2, parent_id: 1, content: "bai now"}
nested = NewsItemComment.nested([reply, parent])
assert length(nested) == 1
assert length(List.first(nested).children) == 1
end
end
end
| 30.328125 | 95 | 0.66306 |
7359195777bdf30d4e04be2b024adb9cbfe2e113 | 1,148 | exs | Elixir | test/releases/assets_test.exs | nmohoric/tentacat | 3bbb9990aafad0a7232d302526ca00e282e7ba43 | [
"MIT"
] | null | null | null | test/releases/assets_test.exs | nmohoric/tentacat | 3bbb9990aafad0a7232d302526ca00e282e7ba43 | [
"MIT"
] | null | null | null | test/releases/assets_test.exs | nmohoric/tentacat | 3bbb9990aafad0a7232d302526ca00e282e7ba43 | [
"MIT"
] | null | null | null | defmodule Tentacat.Releases.AssetsTest do
use ExUnit.Case, async: false
use ExVCR.Mock, adapter: ExVCR.Adapter.Hackney
import Tentacat.Releases.Assets
doctest Tentacat.Releases.Assets
@client Tentacat.Client.new(%{access_token: "8e663c8614ced27c09b963f806ac46776a29db50"})
setup_all do
HTTPoison.start
end
test "list/4" do
use_cassette "releases/assets#list" do
{_,[%{"name" => name}],_} = list(2317862, "soudqwiggle", "elixir-conspiracy", @client)
assert name == "some.zip"
end
end
test "find/4" do
use_cassette "releases/assets#find" do
{_,%{"name" => name},_} = find(1146038, "soudqwiggle", "elixir-conspiracy", @client)
assert name == "some.zip"
end
end
test "edit/6" do
use_cassette "releases/assets#edit" do
{_,%{"name" => name},_} = edit("foo.zip", 1146038, "soudqwiggle", "elixir-conspiracy", @client, [])
assert name == "foo.zip"
end
end
test "delete/4" do
use_cassette "releases/assets#delete" do
{status_code, _,_} = delete(1146038, "soudqwiggle", "elixir-conspiracy", @client)
assert status_code == 204
end
end
end
| 27.333333 | 105 | 0.660279 |
735927078d04f04f342fd24bfa31fb49f5b92920 | 1,093 | exs | Elixir | config/dev.exs | joaoevangelista/iss-phoenix | ad30fa88ebe1172a1e14f22896edf73e97490ce5 | [
"MIT"
] | null | null | null | config/dev.exs | joaoevangelista/iss-phoenix | ad30fa88ebe1172a1e14f22896edf73e97490ce5 | [
"MIT"
] | null | null | null | config/dev.exs | joaoevangelista/iss-phoenix | ad30fa88ebe1172a1e14f22896edf73e97490ce5 | [
"MIT"
] | null | null | null | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
config :iss_location, ISS.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [node: ["node_modules/brunch/bin/brunch", "watch", "--stdin",
cd: Path.expand("../", __DIR__)]]
# Watch static and templates for browser reloading.
config :iss_location, ISS.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{web/views/.*(ex)$},
~r{web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
| 31.228571 | 73 | 0.68893 |
7359274b987679d5288907dceddf7e3de1e520bf | 2,262 | exs | Elixir | mix.exs | csboling/faqcheck | bc182c365d466c8dcacc6b1a5fe9186a2c912cd4 | [
"CC0-1.0"
] | null | null | null | mix.exs | csboling/faqcheck | bc182c365d466c8dcacc6b1a5fe9186a2c912cd4 | [
"CC0-1.0"
] | 20 | 2021-09-08T04:07:31.000Z | 2022-03-10T21:52:24.000Z | mix.exs | csboling/faqcheck | bc182c365d466c8dcacc6b1a5fe9186a2c912cd4 | [
"CC0-1.0"
] | null | null | null | defmodule Faqcheck.Umbrella.MixProject do
use Mix.Project
def project do
[
apps_path: "apps",
version: "0.1.0",
start_permanent: Mix.env() == :prod,
deps: deps(),
aliases: aliases(),
releases: [
faqcheck_umbrella: [
applications: [
faqcheck: :permanent,
faqcheck_web: :permanent,
],
],
],
]
end
# thanks to https://fiqus.coop/en/2019/07/15/add-git-commit-info-to-your-elixir-phoenix-app/
defp update_version(_) do
contents = maybe_write_version()
Mix.shell().info("updated app version: #{inspect(contents)}")
end
defp maybe_write_version() do
try do
write_version()
rescue
_ -> File.read("VERSION.txt")
end
end
defp get_commit_sha() do
System.cmd("git", ["describe", "--always", "--dirty"])
|> elem(0)
|> String.trim()
end
defp get_commit_date() do
[sec, tz] =
System.cmd("git", ~w|log -1 --date=raw --format=%cd|)
|> elem(0)
|> String.split(~r/\s+/, trim: true)
|> Enum.map(&String.to_integer/1)
DateTime.from_unix!(sec + tz * 36)
end
defp write_version() do
contents = [
get_commit_sha(),
get_commit_date(),
]
File.write("VERSION.txt", Enum.join(contents, "\n"), [:write])
contents
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options.
#
# Dependencies listed here are available only for this project
# and cannot be accessed from applications inside the apps/ folder.
defp deps do
[]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to install project dependencies and perform other setup tasks, run:
#
# $ mix setup
#
# See the documentation for `Mix` for more info on aliases.
#
# Aliases listed here are available only for this project
# and cannot be accessed from applications inside the apps/ folder.
defp aliases do
[
# run `mix setup` in all child apps
setup: ["cmd mix setup"],
compile: ["compile --all-warnings", &update_version/1],
]
end
end
| 24.06383 | 94 | 0.617153 |
735942321db51212c644fb83855279ae2ef94313 | 1,459 | exs | Elixir | lib/mix/test/mix/tasks/compile.yecc_test.exs | guilleiguaran/elixir | 952052869ff7af0e293d2a7160b1aebc68fc46be | [
"Apache-2.0"
] | null | null | null | lib/mix/test/mix/tasks/compile.yecc_test.exs | guilleiguaran/elixir | 952052869ff7af0e293d2a7160b1aebc68fc46be | [
"Apache-2.0"
] | null | null | null | lib/mix/test/mix/tasks/compile.yecc_test.exs | guilleiguaran/elixir | 952052869ff7af0e293d2a7160b1aebc68fc46be | [
"Apache-2.0"
] | null | null | null | Code.require_file "../../test_helper.exs", __DIR__
defmodule Mix.Tasks.Compile.YeccTest do
use MixTest.Case
import ExUnit.CaptureIO
setup do
Mix.Project.push MixTest.Case.Sample
:ok
end
teardown do
Mix.Project.pop
:ok
end
test "compilation continues if one file fails to compile" do
in_fixture "compile_yecc", fn ->
File.write!("src/zzz.yrl", """)
oops.
"""
assert_raise CompileError, fn ->
capture_io fn ->
Mix.Tasks.Compile.Yecc.run ["--force"]
end
end
assert File.regular?("src/test_ok.erl")
end
end
test "compiles src/test_ok.yrl" do
in_fixture "compile_yecc", fn ->
assert Mix.Tasks.Compile.Yecc.run([]) == :ok
assert_received { :mix_shell, :info, ["Compiled src/test_ok.yrl"] }
assert File.regular?("src/test_ok.erl")
assert Mix.Tasks.Compile.Yecc.run([]) == :noop
refute_received { :mix_shell, :info, ["Compiled src/test_ok.yrl"] }
assert Mix.Tasks.Compile.Yecc.run(["--force"]) == :ok
assert_received { :mix_shell, :info, ["Compiled src/test_ok.yrl"] }
end
end
test "removes old artifact files" do
in_fixture "compile_yecc", fn ->
assert Mix.Tasks.Compile.Yecc.run([]) == :ok
assert File.regular?("src/test_ok.erl")
File.rm!("src/test_ok.yrl")
assert Mix.Tasks.Compile.Yecc.run([]) == :ok
refute File.regular?("src/test_ok.erl")
end
end
end
| 24.728814 | 73 | 0.6244 |
73596bc1d736db65cc6cb13ce05c008b3d67bf67 | 132 | exs | Elixir | apps/heroes_web/test/test_helper.exs | miquecg/heroes-board-game | 2e8745c7577060a7fc80ad1e4c38c2c507a8c488 | [
"MIT"
] | 2 | 2020-04-09T06:42:15.000Z | 2022-03-03T15:52:38.000Z | apps/heroes_web/test/test_helper.exs | miquecg/heroes-board-game | 2e8745c7577060a7fc80ad1e4c38c2c507a8c488 | [
"MIT"
] | 5 | 2020-04-28T16:05:46.000Z | 2021-01-03T14:08:19.000Z | apps/heroes_web/test/test_helper.exs | miquecg/heroes-board-game | 2e8745c7577060a7fc80ad1e4c38c2c507a8c488 | [
"MIT"
] | null | null | null | GenServer.stop(Web.ChannelWatcher)
Hammox.defmock(GameMock, for: GameBehaviour)
ExUnit.start(exclude: :browser, capture_log: true)
| 26.4 | 50 | 0.810606 |
735984e85ce51f781e5a353bdcb1ac38bd0ff69c | 63 | exs | Elixir | test/ex_mexc_test.exs | fremantle-industries/ex_mexc | f340fa93e06377a5e75e30c32b53cfc7ab4717b1 | [
"MIT"
] | 1 | 2021-08-06T01:07:25.000Z | 2021-08-06T01:07:25.000Z | test/ex_mexc_test.exs | fremantle-industries/ex_mexc | f340fa93e06377a5e75e30c32b53cfc7ab4717b1 | [
"MIT"
] | 7 | 2021-11-01T05:04:13.000Z | 2022-03-01T05:02:51.000Z | test/ex_mexc_test.exs | fremantle-industries/ex_mexc | f340fa93e06377a5e75e30c32b53cfc7ab4717b1 | [
"MIT"
] | null | null | null | defmodule ExMexcTest do
use ExUnit.Case
doctest ExMexc
end
| 12.6 | 23 | 0.793651 |
73598d995409f79b447551aeef9de7414b3faff2 | 2,175 | ex | Elixir | lib/pigeon/legacy_fcm/result_parser.ex | VoiSmart/pigeon | 54735f869dd5500991dfa0b13dc71ca2191cd0ff | [
"MIT"
] | 545 | 2015-09-06T15:50:32.000Z | 2022-03-21T08:21:11.000Z | lib/pigeon/legacy_fcm/result_parser.ex | VoiSmart/pigeon | 54735f869dd5500991dfa0b13dc71ca2191cd0ff | [
"MIT"
] | 178 | 2016-01-14T22:21:20.000Z | 2022-02-18T22:34:30.000Z | lib/pigeon/legacy_fcm/result_parser.ex | VoiSmart/pigeon | 54735f869dd5500991dfa0b13dc71ca2191cd0ff | [
"MIT"
] | 124 | 2016-01-17T11:30:16.000Z | 2022-02-28T16:59:41.000Z | defmodule Pigeon.LegacyFCM.ResultParser do
@moduledoc false
def parse([], [], notif) do
notif
end
def parse(regid, results, notif) when is_binary(regid) do
parse([regid], results, notif)
end
def parse([regid | reg_res], [result | rest_results], notif) do
updated_notif =
case result do
%{"message_id" => id, "registration_id" => new_regid} ->
notif
|> put_update(regid, new_regid)
|> Map.put(:message_id, id)
%{"message_id" => id} ->
notif
|> put_success(regid)
|> Map.put(:message_id, id)
%{"error" => error} ->
notif
|> put_error(regid, error)
end
parse(reg_res, rest_results, updated_notif)
end
defp put_update(%{response: resp} = notif, regid, new_regid) do
new_resp = [{:update, {regid, new_regid}} | resp]
%{notif | response: new_resp}
end
defp put_success(%{response: resp} = notif, regid) do
new_resp = [{:success, regid} | resp]
%{notif | response: new_resp}
end
defp put_error(%{response: resp} = notif, regid, error) do
error = parse_error(error)
%{notif | response: [{error, regid} | resp]}
end
def parse_error("DeviceMessageRateExceeded"),
do: :device_message_rate_exceeded
def parse_error("InternalServerError"), do: :internal_server_error
def parse_error("InvalidApnsCredential"), do: :invalid_apns_credential
def parse_error("InvalidDataKey"), do: :invalid_data_key
def parse_error("InvalidPackageName"), do: :invalid_package_name
def parse_error("InvalidParameters"), do: :invalid_parameters
def parse_error("InvalidRegistration"), do: :invalid_registration
def parse_error("InvalidTtl"), do: :invalid_ttl
def parse_error("MessageTooBig"), do: :message_too_big
def parse_error("MissingRegistration"), do: :missing_registration
def parse_error("MismatchSenderId"), do: :mismatch_sender_id
def parse_error("NotRegistered"), do: :not_registered
def parse_error("TopicsMessageRateExceeded"),
do: :topics_message_rate_exceeded
def parse_error("Unavailable"), do: :unavailable
def parse_error(_), do: :unknown_error
end
| 27.1875 | 72 | 0.675402 |
735997e9478cb408b28c0072354debebd43d9735 | 567 | ex | Elixir | lib/mysimplelist_web/live/list_live/show.ex | ScorpionResponse/mysimplelist | 3c792373cc372ab5e196fe109b9dae68b97b7220 | [
"Apache-2.0"
] | null | null | null | lib/mysimplelist_web/live/list_live/show.ex | ScorpionResponse/mysimplelist | 3c792373cc372ab5e196fe109b9dae68b97b7220 | [
"Apache-2.0"
] | null | null | null | lib/mysimplelist_web/live/list_live/show.ex | ScorpionResponse/mysimplelist | 3c792373cc372ab5e196fe109b9dae68b97b7220 | [
"Apache-2.0"
] | null | null | null | defmodule MysimplelistWeb.ListLive.Show do
use MysimplelistWeb, :live_view
alias Mysimplelist.Lists
@impl true
def mount(_params, %{"current_user_token" => current_user_token}, socket) do
{:ok, assign(socket, current_user_token: current_user_token)}
end
@impl true
def handle_params(%{"id" => id}, _, socket) do
{:noreply,
socket
|> assign(:page_title, page_title(socket.assigns.live_action))
|> assign(:list, Lists.get_list!(id))}
end
defp page_title(:show), do: "Show List"
defp page_title(:edit), do: "Edit List"
end
| 25.772727 | 78 | 0.694885 |
7359caa21e2de12cfc4a0a401fc972ba1bac26a9 | 19,487 | exs | Elixir | test/zaryn_web/controllers/api/transaction_payload_test.exs | ambareesha7/node-zaryn | 136e542801bf9b6fa4a015d3464609fdf3dacee8 | [
"Apache-2.0"
] | 1 | 2021-07-06T19:47:14.000Z | 2021-07-06T19:47:14.000Z | test/zaryn_web/controllers/api/transaction_payload_test.exs | ambareesha7/node-zaryn | 136e542801bf9b6fa4a015d3464609fdf3dacee8 | [
"Apache-2.0"
] | null | null | null | test/zaryn_web/controllers/api/transaction_payload_test.exs | ambareesha7/node-zaryn | 136e542801bf9b6fa4a015d3464609fdf3dacee8 | [
"Apache-2.0"
] | null | null | null | defmodule ZarynWeb.API.TransactionPayloadTest do
use ExUnit.Case
alias Zaryn.Crypto
alias ZarynWeb.API.TransactionPayload
describe "changeset/1" do
test "should return errors if there are missing fields in the transaction schema" do
assert %Ecto.Changeset{
valid?: false,
errors: [
data: {"can't be blank", [validation: :required]},
address: {"can't be blank", [validation: :required]},
type: {"can't be blank", [validation: :required]},
previousPublicKey: {"can't be blank", [validation: :required]},
previousSignature: {"can't be blank", [validation: :required]},
originSignature: {"can't be blank", [validation: :required]}
]
} = TransactionPayload.changeset(%{})
end
test "should return errors if the crypto primitives are invalid" do
assert %Ecto.Changeset{
valid?: false,
errors: [
address: {"must be hexadecimal", _},
previousPublicKey: {"must be hexadecimal", _},
previousSignature: {"must be hexadecimal", _},
originSignature: {"must be hexadecimal", _}
]
} =
TransactionPayload.changeset(%{
"address" => "abc",
"type" => "transfer",
"data" => %{
"code" => "",
"content" => "",
"ledger" => %{
"zaryn" => %{
"transfers" => []
},
"nft" => %{
"transfers" => []
}
},
"keys" => %{},
"recipients" => []
},
"previousPublicKey" => "abc",
"previousSignature" => "abc",
"originSignature" => "abc"
})
end
test "should return an error if the content is not in hex" do
%Ecto.Changeset{
valid?: false,
changes: %{data: %{errors: errors}}
} =
TransactionPayload.changeset(%{
"address" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"type" => "transfer",
"timestamp" => DateTime.utc_now() |> DateTime.to_unix(:millisecond),
"previousPublicKey" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"previousSignature" => Base.encode16(:crypto.strong_rand_bytes(64)),
"originSignature" => Base.encode16(:crypto.strong_rand_bytes(64)),
"data" => %{"content" => "hello"}
})
assert {"must be hexadecimal", _} = Keyword.get(errors, :content)
end
test "should return an error if the code is not a string" do
%Ecto.Changeset{
valid?: false,
changes: %{data: %{errors: errors}}
} =
TransactionPayload.changeset(%{
"address" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"type" => "transfer",
"timestamp" => DateTime.utc_now() |> DateTime.to_unix(:millisecond),
"previousPublicKey" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"previousSignature" => Base.encode16(:crypto.strong_rand_bytes(64)),
"originSignature" => Base.encode16(:crypto.strong_rand_bytes(64)),
"data" => %{"code" => 123}
})
assert {"is invalid", _} = Keyword.get(errors, :code)
end
test "should return an error if the zaryn ledger transfer address is invalid" do
%Ecto.Changeset{
valid?: false,
changes: %{
data: %{
changes: %{
ledger: %{
changes: %{
zaryn: %{
changes: %{
transfers: [
%{
errors: errors
}
]
}
}
}
}
}
}
}
} =
TransactionPayload.changeset(%{
"address" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"type" => "transfer",
"timestamp" => DateTime.utc_now() |> DateTime.to_unix(:millisecond),
"previousPublicKey" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"previousSignature" => Base.encode16(:crypto.strong_rand_bytes(64)),
"originSignature" => Base.encode16(:crypto.strong_rand_bytes(64)),
"data" => %{
"ledger" => %{"zaryn" => %{"transfers" => [%{"to" => "abc", "amount" => 10.0}]}}
}
})
assert {"must be hexadecimal", _} = Keyword.get(errors, :to)
end
test "should return an error if the zaryn ledger transfer amount is invalid" do
%Ecto.Changeset{
valid?: false,
changes: %{
data: %{
changes: %{
ledger: %{
changes: %{
zaryn: %{
changes: %{
transfers: [
%{
errors: errors
}
]
}
}
}
}
}
}
}
} =
TransactionPayload.changeset(%{
"address" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"type" => "transfer",
"timestamp" => DateTime.utc_now() |> DateTime.to_unix(:millisecond),
"previousPublicKey" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"previousSignature" => Base.encode16(:crypto.strong_rand_bytes(64)),
"originSignature" => Base.encode16(:crypto.strong_rand_bytes(64)),
"data" => %{
"ledger" => %{
"zaryn" => %{
"transfers" => [
%{
"to" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"amount" => "abc"
}
]
}
}
}
})
assert {"is invalid", _} = Keyword.get(errors, :amount)
end
test "should return an error if the nft ledger transfer address is invalid" do
%Ecto.Changeset{
valid?: false,
changes: %{
data: %{
changes: %{
ledger: %{
changes: %{
nft: %{
changes: %{
transfers: [
%{
errors: errors
}
]
}
}
}
}
}
}
}
} =
TransactionPayload.changeset(%{
"address" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"type" => "transfer",
"timestamp" => DateTime.utc_now() |> DateTime.to_unix(:millisecond),
"previousPublicKey" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"previousSignature" => Base.encode16(:crypto.strong_rand_bytes(64)),
"originSignature" => Base.encode16(:crypto.strong_rand_bytes(64)),
"data" => %{
"ledger" => %{
"nft" => %{
"transfers" => [
%{
"to" => "abc",
"amount" => 10.0,
"nft" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>)
}
]
}
}
}
})
assert {"must be hexadecimal", _} = Keyword.get(errors, :to)
end
test "should return an error if the nft ledger transfer amount is invalid" do
%Ecto.Changeset{
valid?: false,
changes: %{
data: %{
changes: %{
ledger: %{
changes: %{
nft: %{
changes: %{
transfers: [
%{
errors: errors
}
]
}
}
}
}
}
}
}
} =
TransactionPayload.changeset(%{
"address" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"type" => "transfer",
"timestamp" => DateTime.utc_now() |> DateTime.to_unix(:millisecond),
"previousPublicKey" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"previousSignature" => Base.encode16(:crypto.strong_rand_bytes(64)),
"originSignature" => Base.encode16(:crypto.strong_rand_bytes(64)),
"data" => %{
"ledger" => %{
"nft" => %{
"transfers" => [
%{
"to" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"amount" => "abc",
"nft" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>)
}
]
}
}
}
})
assert {"is invalid", _} = Keyword.get(errors, :amount)
end
test "should return an error if the nft ledger transfer nft address is invalid" do
%Ecto.Changeset{
valid?: false,
changes: %{
data: %{
changes: %{
ledger: %{
changes: %{
nft: %{
changes: %{
transfers: [
%{
errors: errors
}
]
}
}
}
}
}
}
}
} =
TransactionPayload.changeset(%{
"address" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"type" => "transfer",
"timestamp" => DateTime.utc_now() |> DateTime.to_unix(:millisecond),
"previousPublicKey" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"previousSignature" => Base.encode16(:crypto.strong_rand_bytes(64)),
"originSignature" => Base.encode16(:crypto.strong_rand_bytes(64)),
"data" => %{
"ledger" => %{
"nft" => %{
"transfers" => [
%{
"to" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"amount" => 10.0,
"nft" => "abc"
}
]
}
}
}
})
assert {"must be hexadecimal", _} = Keyword.get(errors, :nft)
end
test "should return an error if the encrypted secret is not an hexadecimal" do
%Ecto.Changeset{
valid?: false,
changes: %{
data: %{
changes: %{
keys: %{
errors: errors
}
}
}
}
} =
TransactionPayload.changeset(%{
"address" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"type" => "transfer",
"timestamp" => DateTime.utc_now() |> DateTime.to_unix(:millisecond),
"previousPublicKey" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"previousSignature" => Base.encode16(:crypto.strong_rand_bytes(64)),
"originSignature" => Base.encode16(:crypto.strong_rand_bytes(64)),
"data" => %{
"keys" => %{
"secret" => "abc"
}
}
})
assert {"must be hexadecimal", _} = Keyword.get(errors, :secret)
end
test "should return an error if the public key in the authorized keys is not valid" do
%Ecto.Changeset{
valid?: false,
changes: %{
data: %{
changes: %{
keys: %{
errors: errors
}
}
}
}
} =
TransactionPayload.changeset(%{
"address" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"type" => "transfer",
"timestamp" => DateTime.utc_now() |> DateTime.to_unix(:millisecond),
"previousPublicKey" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"previousSignature" => Base.encode16(:crypto.strong_rand_bytes(64)),
"originSignature" => Base.encode16(:crypto.strong_rand_bytes(64)),
"data" => %{
"keys" => %{
"authorizedKeys" => %{
"key" => "hello"
}
}
}
})
assert {"public key must be hexadecimal", _} = Keyword.get(errors, :authorizedKeys)
%Ecto.Changeset{
valid?: false,
changes: %{
data: %{
changes: %{
keys: %{
errors: errors
}
}
}
}
} =
TransactionPayload.changeset(%{
"address" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"type" => "transfer",
"timestamp" => DateTime.utc_now() |> DateTime.to_unix(:millisecond),
"previousPublicKey" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"previousSignature" => Base.encode16(:crypto.strong_rand_bytes(64)),
"originSignature" => Base.encode16(:crypto.strong_rand_bytes(64)),
"data" => %{
"keys" => %{
"authorizedKeys" =>
Map.put(%{}, Base.encode16(:crypto.strong_rand_bytes(32)), "hello")
}
}
})
assert {"public key is invalid", _} = Keyword.get(errors, :authorizedKeys)
end
test "should return an error if the encrypted key in the authorized keys is not valid" do
%Ecto.Changeset{
valid?: false,
changes: %{
data: %{
changes: %{
keys: %{
errors: errors
}
}
}
}
} =
TransactionPayload.changeset(%{
"address" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"type" => "transfer",
"timestamp" => DateTime.utc_now() |> DateTime.to_unix(:millisecond),
"previousPublicKey" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"previousSignature" => Base.encode16(:crypto.strong_rand_bytes(64)),
"originSignature" => Base.encode16(:crypto.strong_rand_bytes(64)),
"data" => %{
"keys" => %{
"authorizedKeys" =>
Map.put(
%{},
Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"hello"
)
}
}
})
assert {"encrypted key must be hexadecimal", _} = Keyword.get(errors, :authorizedKeys)
end
test "should return an error if the recipients are invalid" do
%Ecto.Changeset{
valid?: false,
changes: %{
data: %{
errors: errors
}
}
} =
TransactionPayload.changeset(%{
"address" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"type" => "transfer",
"timestamp" => DateTime.utc_now() |> DateTime.to_unix(:millisecond),
"previousPublicKey" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"previousSignature" => Base.encode16(:crypto.strong_rand_bytes(64)),
"originSignature" => Base.encode16(:crypto.strong_rand_bytes(64)),
"data" => %{
"recipients" => ["hello"]
}
})
{"must be hexadecimal", _} = Keyword.get(errors, :recipients)
%Ecto.Changeset{
valid?: false,
changes: %{
data: %{
errors: errors
}
}
} =
TransactionPayload.changeset(%{
"address" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"type" => "transfer",
"timestamp" => DateTime.utc_now() |> DateTime.to_unix(:millisecond),
"previousPublicKey" => Base.encode16(<<0::8, :crypto.strong_rand_bytes(32)::binary>>),
"previousSignature" => Base.encode16(:crypto.strong_rand_bytes(64)),
"originSignature" => Base.encode16(:crypto.strong_rand_bytes(64)),
"data" => %{
"recipients" => [Base.encode16(:crypto.strong_rand_bytes(32))]
}
})
{"invalid hash", _} = Keyword.get(errors, :recipients)
end
end
test "to_map/1 should return a map of the changeset" do
address = <<0::8, :crypto.strong_rand_bytes(32)::binary>>
previous_public_key = <<0::8, :crypto.strong_rand_bytes(32)::binary>>
previous_signature = :crypto.strong_rand_bytes(64)
origin_signature = :crypto.strong_rand_bytes(64)
recipient = <<0::8, :crypto.strong_rand_bytes(32)::binary>>
zaryn_to = <<0::8, :crypto.strong_rand_bytes(32)::binary>>
aes_key = :crypto.strong_rand_bytes(32)
secret = Crypto.aes_encrypt("hello", aes_key)
{authorized_public_key, _} = Crypto.generate_deterministic_keypair("seed")
encrypted_key = Crypto.ec_encrypt(aes_key, authorized_public_key)
assert %{
address: address,
type: "transfer",
previous_public_key: previous_public_key,
previous_signature: previous_signature,
origin_signature: origin_signature,
data: %{
recipients: [recipient],
ledger: %{
zaryn: %{
transfers: [
%{to: zaryn_to, amount: 10.2}
]
}
},
keys: %{
secret: secret,
authorized_keys: %{
authorized_public_key => encrypted_key
}
}
}
} ==
TransactionPayload.changeset(%{
"address" => Base.encode16(address),
"type" => "transfer",
"previousPublicKey" => Base.encode16(previous_public_key),
"previousSignature" => Base.encode16(previous_signature),
"originSignature" => Base.encode16(origin_signature),
"data" => %{
"ledger" => %{
"zaryn" => %{
"transfers" => [
%{"to" => Base.encode16(uco_to), "amount" => 10.2}
]
}
},
"keys" => %{
"secret" => Base.encode16(secret),
"authorizedKeys" =>
Map.put(
%{},
Base.encode16(authorized_public_key),
Base.encode16(encrypted_key)
)
},
"recipients" => [Base.encode16(recipient)]
}
})
|> TransactionPayload.to_map()
end
end
| 34.860465 | 96 | 0.457998 |
7359d1d7d6e7dc494d0049e9a0e97de1825f11f6 | 2,887 | ex | Elixir | clients/cloud_billing/lib/google_api/cloud_billing/v1/model/project_billing_info.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/cloud_billing/lib/google_api/cloud_billing/v1/model/project_billing_info.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/cloud_billing/lib/google_api/cloud_billing/v1/model/project_billing_info.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.CloudBilling.V1.Model.ProjectBillingInfo do
@moduledoc """
Encapsulation of billing information for a GCP Console project. A project has at most one associated billing account at a time (but a billing account can be assigned to multiple projects).
## Attributes
- billingAccountName (String.t): The resource name of the billing account associated with the project, if any. For example, `billingAccounts/012345-567890-ABCDEF`. Defaults to: `null`.
- billingEnabled (boolean()): True if the project is associated with an open billing account, to which usage on the project is charged. False if the project is associated with a closed billing account, or no billing account at all, and therefore cannot use paid services. This field is read-only. Defaults to: `null`.
- name (String.t): The resource name for the `ProjectBillingInfo`; has the form `projects/{project_id}/billingInfo`. For example, the resource name for the billing information for project `tokyo-rain-123` would be `projects/tokyo-rain-123/billingInfo`. This field is read-only. Defaults to: `null`.
- projectId (String.t): The ID of the project that this `ProjectBillingInfo` represents, such as `tokyo-rain-123`. This is a convenience field so that you don't need to parse the `name` field to obtain a project ID. This field is read-only. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:billingAccountName => any(),
:billingEnabled => any(),
:name => any(),
:projectId => any()
}
field(:billingAccountName)
field(:billingEnabled)
field(:name)
field(:projectId)
end
defimpl Poison.Decoder, for: GoogleApi.CloudBilling.V1.Model.ProjectBillingInfo do
def decode(value, options) do
GoogleApi.CloudBilling.V1.Model.ProjectBillingInfo.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudBilling.V1.Model.ProjectBillingInfo do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 50.649123 | 340 | 0.744371 |
7359ebaa7aadb672902c8fab370fd19f2fae8439 | 22,585 | ex | Elixir | lib/logger/lib/logger.ex | xtian/elixir | c680eb1a3992309c272e8f808e15990ea5318d6e | [
"Apache-2.0"
] | null | null | null | lib/logger/lib/logger.ex | xtian/elixir | c680eb1a3992309c272e8f808e15990ea5318d6e | [
"Apache-2.0"
] | null | null | null | lib/logger/lib/logger.ex | xtian/elixir | c680eb1a3992309c272e8f808e15990ea5318d6e | [
"Apache-2.0"
] | null | null | null | defmodule Logger do
@moduledoc ~S"""
A logger for Elixir applications.
It includes many features:
* Provides debug, info, warn and error levels.
* Supports multiple backends which are automatically
supervised when plugged into `Logger`.
* Formats and truncates messages on the client
to avoid clogging `Logger` backends.
* Alternates between sync and async modes to remain
performant when required but also apply backpressure
when under stress.
* Wraps OTP's `error_logger` to prevent it from
overflowing.
## Levels
The supported levels are:
* `:debug` - for debug-related messages
* `:info` - for information of any kind
* `:warn` - for warnings
* `:error` - for errors
## Configuration
`Logger` supports a wide range of configurations.
This configuration is split in three categories:
* Application configuration - must be set before the `:logger`
application is started
* Runtime configuration - can be set before the `:logger`
application is started, but may be changed during runtime
* Error logger configuration - configuration for the
wrapper around OTP's `error_logger`
### Application configuration
The following configuration must be set via config files (e.g.,
`config/config.exs`) before the `:logger` application is started.
* `:backends` - the backends to be used. Defaults to `[:console]`.
See the "Backends" section for more information.
* `:compile_time_purge_level` - purges *at compilation time* all calls that
have log level lower than the value of this option. This means that
`Logger` calls with level lower than this option will be completely
removed at compile time, accruing no overhead at runtime. Defaults to
`:debug` and only applies to the `Logger.debug/2`, `Logger.info/2`,
`Logger.warn/2`, and `Logger.error/2` macros (e.g., it doesn't apply to
`Logger.log/3`). Note that arguments passed to `Logger` calls that are
removed from the AST at compilation time are never evaluated, thus any
function call that occurs in these arguments is never executed. As a
consequence, avoid code that looks like `Logger.debug("Cleanup:
#{perform_cleanup()}")` as in the example `perform_cleanup/0` won't be
executed if the `:compile_time_purge_level` is `:info` or higher.
* `:compile_time_application` - sets the `:application` metadata value
to the configured value at compilation time. This configuration is
usually only useful for build tools to automatically add the
application to the metadata for `Logger.debug/2`, `Logger.info/2`, etc.
style of calls.
For example, to configure the `:backends` and `compile_time_purge_level`
options in a `config/config.exs` file:
config :logger,
backends: [:console],
compile_time_purge_level: :info
### Runtime Configuration
All configuration below can be set via config files (e.g.,
`config/config.exs`) but also changed dynamically during runtime via
`Logger.configure/1`.
* `:level` - the logging level. Attempting to log any message
with severity less than the configured level will simply
cause the message to be ignored. Keep in mind that each backend
may have its specific level, too. Note that, unlike what happens with the
`:compile_time_purge_level` option, the argument passed to `Logger` calls
is evaluated even if the level of the call is lower than `:level`.
* `:utc_log` - when `true`, uses UTC in logs. By default it uses
local time (i.e., it defaults to `false`).
* `:truncate` - the maximum message size to be logged (in bytes). Defaults
to 8192 bytes. Note this configuration is approximate. Truncated messages
will have `" (truncated)"` at the end. The atom `:infinity` can be passed
to disable this behavior.
* `:sync_threshold` - if the `Logger` manager has more than
`:sync_threshold` messages in its queue, `Logger` will change
to *sync mode*, to apply backpressure to the clients.
`Logger` will return to *async mode* once the number of messages
in the queue is reduced to `sync_threshold * 0.75` messages.
Defaults to 20 messages.
* `:translator_inspect_opts` - when translating OTP reports and
errors, the last message and state must be inspected in the
error reports. This configuration allow developers to change
how much and how the data should be inspected.
For example, to configure the `:level` and `:truncate` options in a
`config/config.exs` file:
config :logger,
level: :warn,
truncate: 4096
### Error logger configuration
The following configuration applies to `Logger`'s wrapper around
Erlang's `error_logger`. All the configurations below must be set
before the `:logger` application starts.
* `:handle_otp_reports` - redirects OTP reports to `Logger` so
they are formatted in Elixir terms. This uninstalls Erlang's
logger that prints terms to terminal. Defaults to `true`.
* `:handle_sasl_reports` - redirects supervisor, crash and
progress reports to `Logger` so they are formatted in Elixir
terms. This uninstalls `sasl`'s logger that prints these
reports to the terminal. Defaults to `false`.
* `:discard_threshold_for_error_logger` - a value that, when
reached, triggers the error logger to discard messages. This
value must be a positive number that represents the maximum
number of messages accepted per second. Once above this
threshold, the `error_logger` enters discard mode for the
remainder of that second. Defaults to 500 messages.
For example, to configure `Logger` to redirect all `error_logger` messages
using a `config/config.exs` file:
config :logger,
handle_otp_reports: true,
handle_sasl_reports: true
Furthermore, `Logger` allows messages sent by Erlang's `error_logger`
to be translated into an Elixir format via translators. Translators
can be dynamically added at any time with the `add_translator/1`
and `remove_translator/1` APIs. Check `Logger.Translator` for more
information.
## Backends
`Logger` supports different backends where log messages are written to.
The available backends by default are:
* `:console` - logs messages to the console (enabled by default)
Developers may also implement their own backends, an option that
is explored in more detail below.
The initial backends are loaded via the `:backends` configuration,
which must be set before the `:logger` application is started.
### Console backend
The console backend logs messages by printing them to the console. It supports
the following options:
* `:level` - the level to be logged by this backend.
Note that messages are filtered by the general
`:level` configuration for the `:logger` application first.
* `:format` - the format message used to print logs.
Defaults to: `"$time $metadata[$level] $levelpad$message\n"`.
* `:metadata` - the metadata to be printed by `$metadata`.
Defaults to an empty list (no metadata).
* `:colors` - a keyword list of coloring options.
* `:device` - the device to log error messages to. Defaults to
`:user` but can be changed to something else such as `:standard_error`.
* `:max_buffer` - maximum events to buffer while waiting
for a confirmation from the IO device (default: 32).
Once the buffer is full, the backend will block until
a confirmation is received.
In addition to the keys provided by the user via `Logger.metadata/1`,
the following extra keys are available to the `:metadata` list:
* `:application` - the current application
* `:module` - the current module
* `:function` - the current function
* `:file` - the current file
* `:line` - the current line
The supported keys in the `:colors` keyword list are:
* `:enabled` - boolean value that allows for switching the
coloring on and off. Defaults to: `IO.ANSI.enabled?/0`
* `:debug` - color for debug messages. Defaults to: `:cyan`
* `:info` - color for info messages. Defaults to: `:normal`
* `:warn` - color for warn messages. Defaults to: `:yellow`
* `:error` - color for error messages. Defaults to: `:red`
See the `IO.ANSI` module for a list of colors and attributes.
Here is an example of how to configure the `:console` backend in a
`config/config.exs` file:
config :logger, :console,
format: "\n$time $metadata[$level] $levelpad$message\n"
metadata: [:user_id]
You can read more about formatting in `Logger.Formatter`.
### Custom backends
Any developer can create their own `Logger` backend.
Since `Logger` is an event manager powered by `:gen_event`,
writing a new backend is a matter of creating an event
handler, as described in the [`:gen_event`](http://erlang.org/doc/man/gen_event.html)
documentation.
From now on, we will be using the term "event handler" to refer
to your custom backend, as we head into implementation details.
Once the `:logger` application starts, it installs all event handlers listed under
the `:backends` configuration into the `Logger` event manager. The event
manager and all added event handlers are automatically supervised by `Logger`.
Once initialized, the handler should be designed to handle events
in the following format:
{level, group_leader, {Logger, message, timestamp, metadata}} | :flush
where:
* `level` is one of `:debug`, `:info`, `:warn`, or `:error`, as previously
described
* `group_leader` is the group leader of the process which logged the message
* `{Logger, message, timestamp, metadata}` is a tuple containing information
about the logged message:
* the first element is always the atom `Logger`
* `message` is the actual message (as chardata)
* `timestamp` is the timestamp for when the message was logged, as a
`{{year, month, day}, {hour, minute, second, millisecond}}` tuple
* `metadata` is a keyword list of metadata used when logging the message
It is recommended that handlers ignore messages where
the group leader is in a different node than the one where
the handler is installed. For example:
def handle_event({_level, gl, {Logger, _, _, _}}, state)
when node(gl) != node() do
{:ok, state}
end
In the case of the event `:flush` handlers should flush any pending data. This
event is triggered by `flush/0`.
Furthermore, backends can be configured via the
`configure_backend/2` function which requires event handlers
to handle calls of the following format:
{:configure, options}
where `options` is a keyword list. The result of the call is
the result returned by `configure_backend/2`. The recommended
return value for successful configuration is `:ok`.
It is recommended that backends support at least the following
configuration options:
* `:level` - the logging level for that backend
* `:format` - the logging format for that backend
* `:metadata` - the metadata to include in that backend
Check the implementation for `Logger.Backends.Console`, for
examples on how to handle the recommendations in this section
and how to process the existing options.
"""
@type backend :: :gen_event.handler
@type message :: IO.chardata | String.Chars.t
@type level :: :error | :info | :warn | :debug
@type metadata :: Keyword.t(String.Chars.t)
@levels [:error, :info, :warn, :debug]
@metadata :logger_metadata
@compile {:inline, __metadata__: 0}
defp __metadata__ do
Process.get(@metadata) || {true, []}
end
@doc """
Alters the current process metadata according the given keyword list.
This will merge the given keyword list into the existing metadata. With
the exception of setting a key to nil will remove a key from the metadata.
"""
@spec metadata(metadata) :: :ok
def metadata(keywords) do
{enabled?, metadata} = __metadata__()
metadata =
Enum.reduce(keywords, metadata, fn
{key, nil}, acc -> Keyword.delete(acc, key)
{key, val}, acc -> Keyword.put(acc, key, val)
end)
Process.put(@metadata, {enabled?, metadata})
:ok
end
@doc """
Reads the current process metadata.
"""
@spec metadata() :: metadata
def metadata() do
__metadata__() |> elem(1)
end
@doc """
Resets the current process metadata to the given keyword list.
"""
@spec reset_metadata(metadata) :: :ok
def reset_metadata(keywords \\ []) do
{enabled?, _metadata} = __metadata__()
Process.put(@metadata, {enabled?, []})
metadata(keywords)
end
@doc """
Enables logging for the current process.
Currently the only accepted process is self().
"""
@spec enable(pid) :: :ok
def enable(pid) when pid == self() do
Process.put(@metadata, {true, metadata()})
:ok
end
@doc """
Disables logging for the current process.
Currently the only accepted process is self().
"""
@spec disable(pid) :: :ok
def disable(pid) when pid == self() do
Process.put(@metadata, {false, metadata()})
:ok
end
@doc """
Retrieves the Logger level.
The Logger level can be changed via `configure/1`.
"""
@spec level() :: level
def level() do
%{level: level} = Logger.Config.__data__
level
end
@doc """
Compares log levels.
Receives two log levels and compares the `left`
against `right` and returns `:lt`, `:eq` or `:gt`.
"""
@spec compare_levels(level, level) :: :lt | :eq | :gt
def compare_levels(level, level), do:
:eq
def compare_levels(left, right), do:
if(level_to_number(left) > level_to_number(right), do: :gt, else: :lt)
defp level_to_number(:debug), do: 0
defp level_to_number(:info), do: 1
defp level_to_number(:warn), do: 2
defp level_to_number(:error), do: 3
@doc """
Configures the logger.
See the "Runtime Configuration" section in `Logger` module
documentation for the available options.
"""
@valid_options [:compile_time_purge_level, :compile_time_application, :sync_threshold, :truncate, :level, :utc_log]
@spec configure(Keyword.t) :: :ok
def configure(options) do
Logger.Config.configure(Keyword.take(options, @valid_options))
end
@doc """
Flushes the logger.
This basically guarantees all messages sent to
`Logger` prior to this call will be processed. This is useful
for testing and it should not be called in production code.
"""
@spec flush :: :ok
def flush do
_ = :gen_event.which_handlers(:error_logger)
:gen_event.sync_notify(Logger, :flush)
end
@doc """
Adds a new backend.
## Options
* `:flush` - when `true`, guarantees all messages currently sent
to both Logger and Erlang's `error_logger` are processed before
the backend is added
"""
@spec add_backend(atom, Keyword.t) :: Supervisor.on_start_child
def add_backend(backend, opts \\ []) do
_ = if opts[:flush], do: flush()
case Logger.Watcher.watch(Logger, Logger.Config.translate_backend(backend), backend) do
{:ok, _} = ok ->
Logger.Config.add_backend(backend)
ok
{:error, {:already_started, _pid}} ->
{:error, :already_present}
{:error, _} = error ->
error
end
end
@doc """
Removes a backend.
## Options
* `:flush` - when `true`, guarantees all messages currently sent
to both Logger and Erlang's `error_logger` are processed before
the backend is removed
"""
@spec remove_backend(atom, Keyword.t) :: :ok | {:error, term}
def remove_backend(backend, opts \\ []) do
_ = if opts[:flush], do: flush()
Logger.Config.remove_backend(backend)
Logger.Watcher.unwatch(Logger, Logger.Config.translate_backend(backend))
end
@doc """
Adds a new translator.
"""
@spec add_translator({module, function :: atom}) :: :ok
def add_translator({mod, fun} = translator) when is_atom(mod) and is_atom(fun) do
Logger.Config.add_translator(translator)
end
@doc """
Removes a translator.
"""
@spec remove_translator({module, function :: atom}) :: :ok
def remove_translator({mod, fun} = translator) when is_atom(mod) and is_atom(fun) do
Logger.Config.remove_translator(translator)
end
@doc """
Configures the given backend.
The backend needs to be started and running in order to
be configured at runtime.
"""
@spec configure_backend(backend, Keyword.t) :: term
def configure_backend(backend, options) when is_list(options) do
:gen_event.call(Logger, Logger.Config.translate_backend(backend), {:configure, options})
end
@doc """
Logs a message dynamically.
Use this function only when there is a need to
explicitly avoid embedding metadata.
"""
@spec bare_log(level, message | (() -> message | {message, Keyword.t}), Keyword.t) ::
:ok | {:error, :noproc} | {:error, term}
def bare_log(level, chardata_or_fun, metadata \\ [])
when level in @levels and is_list(metadata) do
case __metadata__() do
{true, pdict} ->
%{mode: mode, truncate: truncate,
level: min_level, utc_log: utc_log?} = Logger.Config.__data__
if compare_levels(level, min_level) != :lt do
metadata = [pid: self()] ++ Keyword.merge(pdict, metadata)
{message, metadata} = normalize_message(chardata_or_fun, metadata)
truncated = truncate(message, truncate)
tuple = {Logger, truncated, Logger.Utils.timestamp(utc_log?), metadata}
try do
notify(mode, {level, Process.group_leader(), tuple})
:ok
rescue
ArgumentError -> {:error, :noproc}
catch
:exit, reason -> {:error, reason}
end
else
:ok
end
{false, _} ->
:ok
end
end
@doc """
Logs a warning.
Returns the atom `:ok` or an `{:error, reason}` tuple.
## Examples
Logger.warn "knob turned too far to the right"
Logger.warn fn -> "expensive to calculate warning" end
Logger.warn fn -> {"expensive to calculate warning", [additional: :metadata]} end
"""
defmacro warn(chardata_or_fun, metadata \\ []) do
maybe_log(:warn, chardata_or_fun, metadata, __CALLER__)
end
@doc """
Logs some info.
Returns the atom `:ok` or an `{:error, reason}` tuple.
## Examples
Logger.info "mission accomplished"
Logger.info fn -> "expensive to calculate info" end
Logger.info fn -> {"expensive to calculate info", [additional: :metadata]} end
"""
defmacro info(chardata_or_fun, metadata \\ []) do
maybe_log(:info, chardata_or_fun, metadata, __CALLER__)
end
@doc """
Logs an error.
Returns the atom `:ok` or an `{:error, reason}` tuple.
## Examples
Logger.error "oops"
Logger.error fn -> "expensive to calculate error" end
Logger.error fn -> {"expensive to calculate error", [additional: :metadata]} end
"""
defmacro error(chardata_or_fun, metadata \\ []) do
maybe_log(:error, chardata_or_fun, metadata, __CALLER__)
end
@doc """
Logs a debug message.
Returns the atom `:ok` or an `{:error, reason}` tuple.
## Examples
Logger.debug "hello?"
Logger.debug fn -> "expensive to calculate debug" end
Logger.debug fn -> {"expensive to calculate debug", [additional: :metadata]} end
"""
defmacro debug(chardata_or_fun, metadata \\ []) do
maybe_log(:debug, chardata_or_fun, metadata, __CALLER__)
end
@doc """
Logs a message.
Returns the atom `:ok` or an `{:error, reason}` tuple.
Developers should use the macros `Logger.debug/2`,
`Logger.warn/2`, `Logger.info/2` or `Logger.error/2` instead
of this macro as they can automatically eliminate
the Logger call altogether at compile time if desired.
"""
defmacro log(level, chardata_or_fun, metadata \\ []) do
macro_log(level, chardata_or_fun, metadata, __CALLER__)
end
defp macro_log(level, data, metadata, caller) do
%{module: module, function: fun, file: file, line: line} = caller
caller =
compile_time_application() ++
[module: module, function: form_fa(fun), file: file, line: line]
quote do
Logger.bare_log(unquote(level), unquote(data), unquote(caller) ++ unquote(metadata))
end
end
defp compile_time_application do
if app = Application.get_env(:logger, :compile_time_application) do
[application: app]
else
[]
end
end
defp maybe_log(level, data, metadata, caller) do
min_level = Application.get_env(:logger, :compile_time_purge_level, :debug)
if compare_levels(level, min_level) != :lt do
macro_log(level, data, metadata, caller)
else
handle_unused_variable_warnings(data, caller)
end
end
defp normalize_message(fun, metadata) when is_function(fun, 0),
do: normalize_message(fun.(), metadata)
defp normalize_message({message, fun_metadata}, metadata) when is_list(fun_metadata),
do: {message, Keyword.merge(metadata, fun_metadata)}
defp normalize_message(message, metadata),
do: {message, metadata}
defp truncate(data, n) when is_list(data) or is_binary(data),
do: Logger.Utils.truncate(data, n)
defp truncate(data, n),
do: Logger.Utils.truncate(to_string(data), n)
defp form_fa({name, arity}) do
Atom.to_string(name) <> "/" <> Integer.to_string(arity)
end
defp form_fa(nil), do: nil
defp notify(:sync, msg), do: :gen_event.sync_notify(Logger, msg)
defp notify(:async, msg), do: :gen_event.notify(Logger, msg)
defp handle_unused_variable_warnings(data, caller) do
# We collect all the names of variables (leaving `data` unchanged) with a
# scope of `nil` (as we don't warn for variables with a different scope
# anyways). We only want the variables that figure in `caller.vars`, as the
# AST for calls to local 0-arity functions without parens is the same as the
# AST for variables.
{^data, logged_vars} = Macro.postwalk(data, [], fn
{name, _meta, nil} = var, acc when is_atom(name) ->
if {name, nil} in caller.vars, do: {var, [name | acc]}, else: {var, acc}
ast, acc ->
{ast, acc}
end)
assignments =
logged_vars
|> Enum.reverse()
|> Enum.uniq()
|> Enum.map("e(do: _ = unquote(Macro.var(&1, nil))))
quote do
unquote_splicing(assignments)
:ok
end
end
end
| 33.213235 | 117 | 0.679655 |
7359f025f60709df36aabbaabacde6a27bd4a09e | 2,138 | ex | Elixir | lib/cog/config.ex | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 1,003 | 2016-02-23T17:21:12.000Z | 2022-02-20T14:39:35.000Z | lib/cog/config.ex | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 906 | 2016-02-22T22:54:19.000Z | 2022-03-11T15:19:43.000Z | lib/cog/config.ex | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 95 | 2016-02-23T13:42:31.000Z | 2021-11-30T14:39:55.000Z | defmodule Cog.Config do
@type interval_type :: :ms | :sec | :min | :hour | :day | :week
@type typed_interval :: {integer, interval_type}
@doc """
Token lifetime configuration, converted into seconds. This is how
long after creation time a token is considered valid.
"""
def token_lifetime do
value = Application.fetch_env!(:cog, :token_lifetime)
convert(value, :sec)
end
@doc """
Token reap period configuration, converted into
milliseconds. Expired tokens will be reaped on this schedule.
"""
def token_reap_period do
value = Application.fetch_env!(:cog, :token_reap_period)
convert(value, :ms)
end
@doc """
Convert various tagged time durations into either seconds or
milliseconds, as desired.
Useful for allowing a readable configuration format that can still
easily be translated into the time units most frequently encountered
in Elixir / Erlang code.
More general conversion (e.g., from days to minutes), or using
variable conversion units (i.e., a month can have 28, 29, 30, or 31
days in it, depending on the month and/or year) are explicitly not
handled.
Units are specified as one of the following recognized atoms:
- :ms (millisecond)
- :sec (second)
- :min (minute)
- :hour
- :day
- :week
Examples:
iex> Cog.Config.convert({3, :day}, :sec)
259200
"""
def convert(from, into) do
from
|> convert_to_seconds
|> convert_from_seconds(into)
end
@doc "Returns the mythical Relay id used to execute embedded commands"
def embedded_relay(), do: "28a35f98-7ae1-4b8d-929a-3c716f6717c7"
defp convert_to_seconds({seconds, :sec}),
do: {seconds, :sec}
defp convert_to_seconds({minutes, :min}),
do: {minutes * 60, :sec}
defp convert_to_seconds({hours, :hour}),
do: {hours * 60 *60, :sec}
defp convert_to_seconds({days, :day}),
do: {days * 24 * 60 * 60, :sec}
defp convert_to_seconds({weeks, :week}),
do: {weeks * 7 * 24 * 60 * 60, :sec}
defp convert_from_seconds({seconds, :sec}, :ms),
do: seconds * 1000
defp convert_from_seconds({seconds, :sec}, :sec),
do: seconds
end
| 27.410256 | 72 | 0.679607 |
7359fe0d77cc4db674b9119c22366ab6c0d31872 | 257 | ex | Elixir | lib/capabilities.ex | nathanjohnson320/exselenium | 8794296b305f3418a1c77444f2ca6ea0ec3ae4b6 | [
"MIT"
] | 9 | 2017-04-23T05:32:55.000Z | 2021-01-29T21:11:43.000Z | lib/capabilities.ex | nathanjohnson320/exselenium | 8794296b305f3418a1c77444f2ca6ea0ec3ae4b6 | [
"MIT"
] | null | null | null | lib/capabilities.ex | nathanjohnson320/exselenium | 8794296b305f3418a1c77444f2ca6ea0ec3ae4b6 | [
"MIT"
] | 1 | 2019-07-20T00:18:52.000Z | 2019-07-20T00:18:52.000Z | defmodule Selenium.Capabilities do
defstruct [
javascriptEnabled: false,
version: "",
rotatable: false,
takesScreenshot: true,
cssSelectorsEnabled: true,
browserName: "firefox",
nativeEvents: false,
platform: "ANY"
]
end
| 19.769231 | 34 | 0.677043 |
735a3a79433a9b40f63d3f71265f035c40953dc2 | 1,117 | ex | Elixir | 01/npkstuff/test/support/channel_case.ex | victordomingos/Learning_Elixir | 414f4f647c9eba494b65575e725a58021fde2313 | [
"MIT"
] | 1 | 2021-06-23T21:48:32.000Z | 2021-06-23T21:48:32.000Z | 01/npkstuff/test/support/channel_case.ex | victordomingos/Learning_Elixir | 414f4f647c9eba494b65575e725a58021fde2313 | [
"MIT"
] | null | null | null | 01/npkstuff/test/support/channel_case.ex | victordomingos/Learning_Elixir | 414f4f647c9eba494b65575e725a58021fde2313 | [
"MIT"
] | null | null | null | defmodule NpkstuffWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use NpkstuffWeb.ChannelCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
import Phoenix.ChannelTest
import NpkstuffWeb.ChannelCase
# The default endpoint for testing
@endpoint NpkstuffWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Npkstuff.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Npkstuff.Repo, {:shared, self()})
end
:ok
end
end
| 27.243902 | 70 | 0.728738 |
735a5b54c8d39e7def4ce933c1ad18235770eb2f | 17,209 | ex | Elixir | clients/my_business_account_management/lib/google_api/my_business_account_management/v1/api/locations.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/my_business_account_management/lib/google_api/my_business_account_management/v1/api/locations.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/my_business_account_management/lib/google_api/my_business_account_management/v1/api/locations.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.MyBusinessAccountManagement.V1.Api.Locations do
@moduledoc """
API calls for all endpoints tagged `Locations`.
"""
alias GoogleApi.MyBusinessAccountManagement.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Moves a location from an account that the user owns to another account that the same user administers. The user must be an owner of the account the location is currently associated with and must also be at least a manager of the destination account.
## Parameters
* `connection` (*type:* `GoogleApi.MyBusinessAccountManagement.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. The name of the location to transfer. `locations/{location_id}`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.MyBusinessAccountManagement.V1.Model.TransferLocationRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.MyBusinessAccountManagement.V1.Model.Empty{}}` on success
* `{:error, info}` on failure
"""
@spec mybusinessaccountmanagement_locations_transfer(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.MyBusinessAccountManagement.V1.Model.Empty.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def mybusinessaccountmanagement_locations_transfer(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+name}:transfer", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.MyBusinessAccountManagement.V1.Model.Empty{}])
end
@doc """
Invites the specified user to become an administrator for the specified location. The invitee must accept the invitation in order to be granted access to the location. See AcceptInvitation to programmatically accept an invitation.
## Parameters
* `connection` (*type:* `GoogleApi.MyBusinessAccountManagement.V1.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. The resource name of the location this admin is created for. `locations/{location_id}/admins`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.MyBusinessAccountManagement.V1.Model.Admin.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.MyBusinessAccountManagement.V1.Model.Admin{}}` on success
* `{:error, info}` on failure
"""
@spec mybusinessaccountmanagement_locations_admins_create(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.MyBusinessAccountManagement.V1.Model.Admin.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def mybusinessaccountmanagement_locations_admins_create(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+parent}/admins", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.MyBusinessAccountManagement.V1.Model.Admin{}])
end
@doc """
Removes the specified admin as a manager of the specified location.
## Parameters
* `connection` (*type:* `GoogleApi.MyBusinessAccountManagement.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. The resource name of the admin to remove from the location.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.MyBusinessAccountManagement.V1.Model.Empty{}}` on success
* `{:error, info}` on failure
"""
@spec mybusinessaccountmanagement_locations_admins_delete(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.MyBusinessAccountManagement.V1.Model.Empty.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def mybusinessaccountmanagement_locations_admins_delete(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.MyBusinessAccountManagement.V1.Model.Empty{}])
end
@doc """
Lists all of the admins for the specified location.
## Parameters
* `connection` (*type:* `GoogleApi.MyBusinessAccountManagement.V1.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. The name of the location to list admins of. `locations/{location_id}/admins`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.MyBusinessAccountManagement.V1.Model.ListLocationAdminsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec mybusinessaccountmanagement_locations_admins_list(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.MyBusinessAccountManagement.V1.Model.ListLocationAdminsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def mybusinessaccountmanagement_locations_admins_list(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+parent}/admins", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++
[struct: %GoogleApi.MyBusinessAccountManagement.V1.Model.ListLocationAdminsResponse{}]
)
end
@doc """
Updates the Admin for the specified location. Only the AdminRole of the Admin can be updated.
## Parameters
* `connection` (*type:* `GoogleApi.MyBusinessAccountManagement.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Immutable. The resource name. For account admins, this is in the form: `accounts/{account_id}/admins/{admin_id}` For location admins, this is in the form: `locations/{location_id}/admins/{admin_id}` This field will be ignored if set during admin creation.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:updateMask` (*type:* `String.t`) - Required. The specific fields that should be updated. The only editable field is role.
* `:body` (*type:* `GoogleApi.MyBusinessAccountManagement.V1.Model.Admin.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.MyBusinessAccountManagement.V1.Model.Admin{}}` on success
* `{:error, info}` on failure
"""
@spec mybusinessaccountmanagement_locations_admins_patch(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.MyBusinessAccountManagement.V1.Model.Admin.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def mybusinessaccountmanagement_locations_admins_patch(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:updateMask => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.MyBusinessAccountManagement.V1.Model.Admin{}])
end
end
| 44.239075 | 291 | 0.622523 |
735a898e0c4af941667e8b330d414f540e4c109e | 3,232 | ex | Elixir | clients/network_management/lib/google_api/network_management/v1/model/expr.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/network_management/lib/google_api/network_management/v1/model/expr.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/network_management/lib/google_api/network_management/v1/model/expr.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.NetworkManagement.V1.Model.Expr do
@moduledoc """
Represents a textual expression in the Common Expression Language (CEL) syntax. CEL is a C-like expression language. The syntax and semantics of CEL are documented at https://github.com/google/cel-spec. Example (Comparison): title: "Summary size limit" description: "Determines if a summary is less than 100 chars" expression: "document.summary.size() < 100" Example (Equality): title: "Requestor is owner" description: "Determines if requestor is the document owner" expression: "document.owner == request.auth.claims.email" Example (Logic): title: "Public documents" description: "Determine whether the document should be publicly visible" expression: "document.type != 'private' && document.type != 'internal'" Example (Data Manipulation): title: "Notification string" description: "Create a notification string with a timestamp." expression: "'New message received at ' + string(document.create_time)" The exact variables and functions that may be referenced within an expression are determined by the service that evaluates it. See the service documentation for additional information.
## Attributes
* `description` (*type:* `String.t`, *default:* `nil`) - Optional. Description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
* `expression` (*type:* `String.t`, *default:* `nil`) - Textual representation of an expression in Common Expression Language syntax.
* `location` (*type:* `String.t`, *default:* `nil`) - Optional. String indicating the location of the expression for error reporting, e.g. a file name and a position in the file.
* `title` (*type:* `String.t`, *default:* `nil`) - Optional. Title for the expression, i.e. a short string describing its purpose. This can be used e.g. in UIs which allow to enter the expression.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:description => String.t(),
:expression => String.t(),
:location => String.t(),
:title => String.t()
}
field(:description)
field(:expression)
field(:location)
field(:title)
end
defimpl Poison.Decoder, for: GoogleApi.NetworkManagement.V1.Model.Expr do
def decode(value, options) do
GoogleApi.NetworkManagement.V1.Model.Expr.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.NetworkManagement.V1.Model.Expr do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 57.714286 | 1,092 | 0.737314 |
735a901890611bcfdc9e9645418a0ab229725db5 | 1,541 | ex | Elixir | source/backend/api_app/lib/api_app_web.ex | edwinvautier/aio-group3-proj01 | 2ca3d834f358a38692e8ae4d6266bbea130a6cb5 | [
"MIT"
] | 3 | 2020-04-05T15:26:55.000Z | 2020-06-17T15:01:33.000Z | source/backend/api_app/lib/api_app_web.ex | edwinvautier/aio-group3-proj01 | 2ca3d834f358a38692e8ae4d6266bbea130a6cb5 | [
"MIT"
] | 48 | 2020-04-01T16:00:47.000Z | 2020-04-28T16:22:12.000Z | source/backend/api_app/lib/api_app_web.ex | edwinvautier/aio-group3-proj01 | 2ca3d834f358a38692e8ae4d6266bbea130a6cb5 | [
"MIT"
] | 4 | 2020-03-30T15:36:55.000Z | 2020-06-17T15:01:36.000Z | defmodule ApiAppWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use ApiAppWeb, :controller
use ApiAppWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: ApiAppWeb
import Plug.Conn
import ApiAppWeb.Gettext
alias ApiAppWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/api_app_web/templates",
namespace: ApiAppWeb
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_flash: 1, get_flash: 2, view_module: 1]
import ApiAppWeb.ErrorHelpers
import ApiAppWeb.Gettext
alias ApiAppWeb.Router.Helpers, as: Routes
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
end
end
def channel do
quote do
use Phoenix.Channel
import ApiAppWeb.Gettext
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 23 | 83 | 0.689812 |
735ac86088bb01ca42d0504536626374b38776c6 | 1,732 | exs | Elixir | test/bitpal_web/controllers/user_registration_controller_test.exs | bitpal/bitpal | 0e10eeaacf7a65b23945cfb95e4dbda8bffd4590 | [
"BSD-3-Clause-Clear"
] | 5 | 2021-05-04T21:28:00.000Z | 2021-12-01T11:19:48.000Z | test/bitpal_web/controllers/user_registration_controller_test.exs | bitpal/bitpal | 0e10eeaacf7a65b23945cfb95e4dbda8bffd4590 | [
"BSD-3-Clause-Clear"
] | 71 | 2021-04-21T05:48:49.000Z | 2022-03-23T06:30:37.000Z | test/bitpal_web/controllers/user_registration_controller_test.exs | bitpal/bitpal | 0e10eeaacf7a65b23945cfb95e4dbda8bffd4590 | [
"BSD-3-Clause-Clear"
] | 1 | 2021-04-25T10:35:41.000Z | 2021-04-25T10:35:41.000Z | defmodule BitPalWeb.UserRegistrationControllerTest do
use BitPalWeb.ConnCase, async: true
describe "GET /users/register" do
test "renders registration page", %{conn: conn} do
conn = get(conn, Routes.user_registration_path(conn, :new))
response = html_response(conn, 200)
assert response =~ "<h1>Register</h1>"
assert response =~ "Log in</a>"
assert response =~ "Register</a>"
end
test "redirects if already logged in", %{conn: conn} do
conn = conn |> log_in_user(create_user()) |> get(Routes.user_registration_path(conn, :new))
assert redirected_to(conn) == "/"
end
end
describe "POST /users/register" do
@tag :capture_log
test "creates account and logs the user in", %{conn: conn} do
email = unique_user_email()
conn =
post(conn, Routes.user_registration_path(conn, :create), %{
"user" => valid_user_attributes(email: email)
})
assert get_session(conn, :user_token)
assert redirected_to(conn) == "/"
# Now do a logged in request and assert on the menu
conn = get(conn, "/")
response = html_response(conn, 200)
assert response =~ email
assert response =~ "Settings</a>"
assert response =~ "Log out</a>"
end
test "render errors for invalid data", %{conn: conn} do
conn =
post(conn, Routes.user_registration_path(conn, :create), %{
"user" => %{"email" => "with spaces", "password" => "too short"}
})
response = html_response(conn, 200)
assert response =~ "<h1>Register</h1>"
assert response =~ "must have the @ sign and no spaces"
assert response =~ "should be at least 12 character"
end
end
end
| 32.679245 | 97 | 0.625289 |
735ae0dbf16e87b6596ab7cb4d380b5bd97eacb8 | 3,017 | exs | Elixir | test/manager_interactor_test.exs | moxley/addict | 9271c60d9a862edcefc31e8a764b3eb5a5905171 | [
"MIT"
] | 750 | 2015-01-18T23:00:36.000Z | 2021-03-24T22:11:09.000Z | test/manager_interactor_test.exs | moxley/addict | 9271c60d9a862edcefc31e8a764b3eb5a5905171 | [
"MIT"
] | 130 | 2015-01-19T12:39:42.000Z | 2021-09-28T22:40:52.000Z | test/manager_interactor_test.exs | moxley/addict | 9271c60d9a862edcefc31e8a764b3eb5a5905171 | [
"MIT"
] | 151 | 2015-01-19T09:24:44.000Z | 2020-09-21T13:52:46.000Z | defmodule Addict.ManagerInteractorTest do
use ExUnit.Case, async: true
# alias Addict.ManagerInteractor, as: Interactor
# test "creates a user" do
# user_params = %{"email" => "[email protected]", "password" => "password", "username" => "test"}
# assert Interactor.create(user_params, RepoStub, MailerStub, PasswordInteractorStub) == {:ok, %{email: "[email protected]"}}
# end
#
# test "validates for invalid params" do
# user_params = %{}
# assert catch_throw(Interactor.create(user_params, RepoStub, MailerStub)) == "Unable to create user, invalid hash. Required params: email, password, username"
# end
#
# test "validates for nil params" do
# assert catch_throw(Interactor.create(nil, RepoStub, MailerStub)) == "Unable to create user, invalid hash: nil"
# end
#
# test "allows for password to be recovered" do
# email = "[email protected]"
# assert Interactor.recover_password(email, RepoStub, MailerStub) == {:ok, %{email: "[email protected]"}}
# end
#
# test "handles invalid password recovery requests" do
# email = "[email protected]"
# assert Interactor.recover_password(email, RepoNoMailStub, MailerStub) == {:error, "Unable to send recovery e-mail"}
# end
#
# test "resets password" do
# assert Interactor.reset_password("token123", "valid_password", "valid_password", RepoStub, PasswordInteractorStub) == {:ok, %{email: "[email protected]"}}
# end
#
# test "handles reset password with nilled token" do
# assert Interactor.reset_password(nil, "password", "password", RepoStub) == {:error, "invalid recovery hash"}
# end
#
# test "handles reset password with invalid token" do
# assert Interactor.reset_password("invalidtoken", "password", "password", RepoStub) == {:error, "invalid recovery hash"}
# end
#
# test "handles reset password with invalid password confirmation" do
# assert Interactor.reset_password("invalidtoken", "password", "password_invalid") == {:error, "passwords must match"}
# end
# end
#
# defmodule PasswordInteractorStub do
# def generate_hash(_) do
# "1337h4$h"
# end
# end
#
# defmodule RepoStub do
# def create(_) do
# {:ok, %{email: "[email protected]"}}
# end
#
# def add_recovery_hash(_,_) do
# {:ok, %{email: "[email protected]"}}
# end
#
# def find_by_email(_) do
# {:ok, %{email: "[email protected]"}}
# end
#
# def change_password(_,_) do
# {:ok, %{email: "[email protected]"}}
# end
#
# def find_by_recovery_hash("token123") do
# {:ok, %{email: "[email protected]"}}
# end
#
# def find_by_recovery_hash("invalidtoken") do
# nil
# end
#
# def find_by_recovery_hash(nil) do
# nil
# end
end
defmodule RepoNoMailStub do
def find_by_email(_) do
nil
end
def add_recovery_hash(nil,_) do
{:error, "invalid user"}
end
end
defmodule MailerStub do
def send_welcome_email(_) do
{:ok, %{email: "[email protected]"}}
end
def send_password_recovery_email(_) do
{:ok, %{email: "[email protected]"}}
end
end
| 30.17 | 163 | 0.672522 |
735aef9c16e7a09b2778960255a1d6801e91ca7f | 5,381 | ex | Elixir | clients/service_consumer_management/lib/google_api/service_consumer_management/v1/model/quota_limit.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/service_consumer_management/lib/google_api/service_consumer_management/v1/model/quota_limit.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/service_consumer_management/lib/google_api/service_consumer_management/v1/model/quota_limit.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ServiceConsumerManagement.V1.Model.QuotaLimit do
@moduledoc """
`QuotaLimit` defines a specific limit that applies over a specified duration for a limit type. There can be at most one limit for a duration and limit type combination defined within a `QuotaGroup`.
## Attributes
* `defaultLimit` (*type:* `String.t`, *default:* `nil`) - Default number of tokens that can be consumed during the specified duration. This is the number of tokens assigned when a client application developer activates the service for his/her project. Specifying a value of 0 will block all requests. This can be used if you are provisioning quota to selected consumers and blocking others. Similarly, a value of -1 will indicate an unlimited quota. No other negative values are allowed. Used by group-based quotas only.
* `description` (*type:* `String.t`, *default:* `nil`) - Optional. User-visible, extended description for this quota limit. Should be used only when more context is needed to understand this limit than provided by the limit's display name (see: `display_name`).
* `displayName` (*type:* `String.t`, *default:* `nil`) - User-visible display name for this limit. Optional. If not set, the UI will provide a default display name based on the quota configuration. This field can be used to override the default display name generated from the configuration.
* `duration` (*type:* `String.t`, *default:* `nil`) - Duration of this limit in textual notation. Must be "100s" or "1d". Used by group-based quotas only.
* `freeTier` (*type:* `String.t`, *default:* `nil`) - Free tier value displayed in the Developers Console for this limit. The free tier is the number of tokens that will be subtracted from the billed amount when billing is enabled. This field can only be set on a limit with duration "1d", in a billable group; it is invalid on any other limit. If this field is not set, it defaults to 0, indicating that there is no free tier for this service. Used by group-based quotas only.
* `maxLimit` (*type:* `String.t`, *default:* `nil`) - Maximum number of tokens that can be consumed during the specified duration. Client application developers can override the default limit up to this maximum. If specified, this value cannot be set to a value less than the default limit. If not specified, it is set to the default limit. To allow clients to apply overrides with no upper bound, set this to -1, indicating unlimited maximum quota. Used by group-based quotas only.
* `metric` (*type:* `String.t`, *default:* `nil`) - The name of the metric this quota limit applies to. The quota limits with the same metric will be checked together during runtime. The metric must be defined within the service config.
* `name` (*type:* `String.t`, *default:* `nil`) - Name of the quota limit. The name must be provided, and it must be unique within the service. The name can only include alphanumeric characters as well as '-'. The maximum length of the limit name is 64 characters.
* `unit` (*type:* `String.t`, *default:* `nil`) - Specify the unit of the quota limit. It uses the same syntax as Metric.unit. The supported unit kinds are determined by the quota backend system. Here are some examples: * "1/min/{project}" for quota per minute per project. Note: the order of unit components is insignificant. The "1" at the beginning is required to follow the metric unit syntax.
* `values` (*type:* `map()`, *default:* `nil`) - Tiered limit values. You must specify this as a key:value pair, with an integer value that is the maximum number of requests allowed for the specified unit. Currently only STANDARD is supported.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:defaultLimit => String.t(),
:description => String.t(),
:displayName => String.t(),
:duration => String.t(),
:freeTier => String.t(),
:maxLimit => String.t(),
:metric => String.t(),
:name => String.t(),
:unit => String.t(),
:values => map()
}
field(:defaultLimit)
field(:description)
field(:displayName)
field(:duration)
field(:freeTier)
field(:maxLimit)
field(:metric)
field(:name)
field(:unit)
field(:values, type: :map)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceConsumerManagement.V1.Model.QuotaLimit do
def decode(value, options) do
GoogleApi.ServiceConsumerManagement.V1.Model.QuotaLimit.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceConsumerManagement.V1.Model.QuotaLimit do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 72.716216 | 524 | 0.728303 |
735afc6c28a3c369ea96fed9c403929c0e8534b0 | 1,564 | exs | Elixir | mix.exs | brandaoplaster/bank_api | 3df64ea59b4a0af4f9a74e36a71bbfd1eb5a8ca1 | [
"MIT"
] | null | null | null | mix.exs | brandaoplaster/bank_api | 3df64ea59b4a0af4f9a74e36a71bbfd1eb5a8ca1 | [
"MIT"
] | null | null | null | mix.exs | brandaoplaster/bank_api | 3df64ea59b4a0af4f9a74e36a71bbfd1eb5a8ca1 | [
"MIT"
] | null | null | null | defmodule BankApi.MixProject do
use Mix.Project
def project do
[
app: :bank_api,
version: "0.1.0",
elixir: "~> 1.5",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {BankApi.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.4.0"},
{:phoenix_pubsub, "~> 1.1"},
{:phoenix_ecto, "~> 4.0"},
{:ecto_sql, "~> 3.0"},
{:postgrex, ">= 0.0.0"},
{:gettext, "~> 0.11"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
end
| 25.639344 | 79 | 0.581202 |
735b1b133c987f24138f91cd6b2136748c54bf43 | 825 | ex | Elixir | lib/nomad_client/model/host_disk_stats.ex | mrmstn/nomad_client | a586022e5eb4d166acba08b55b198ec079d4b118 | [
"Apache-2.0"
] | 8 | 2021-09-04T21:22:53.000Z | 2022-02-22T22:48:38.000Z | lib/nomad_client/model/host_disk_stats.ex | mrmstn/nomad_client | a586022e5eb4d166acba08b55b198ec079d4b118 | [
"Apache-2.0"
] | null | null | null | lib/nomad_client/model/host_disk_stats.ex | mrmstn/nomad_client | a586022e5eb4d166acba08b55b198ec079d4b118 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule NomadClient.Model.HostDiskStats do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:Device,
:Mountpoint,
:Size,
:Used,
:Available,
:UsedPercent,
:InodesUsedPercent
]
@type t :: %__MODULE__{
:Device => String.t() | nil,
:Mountpoint => String.t() | nil,
:Size => integer() | nil,
:Used => integer() | nil,
:Available => integer() | nil,
:UsedPercent => float() | nil,
:InodesUsedPercent => float() | nil
}
end
defimpl Poison.Decoder, for: NomadClient.Model.HostDiskStats do
def decode(value, _options) do
value
end
end
| 22.297297 | 91 | 0.597576 |
735b30712bedaa78aa15056df7d83933497b747f | 888 | ex | Elixir | clients/vm_migration/lib/google_api/vm_migration/v1/metadata.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/vm_migration/lib/google_api/vm_migration/v1/metadata.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/vm_migration/lib/google_api/vm_migration/v1/metadata.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.VMMigration.V1 do
@moduledoc """
API client metadata for GoogleApi.VMMigration.V1.
"""
@discovery_revision "20220526"
def discovery_revision(), do: @discovery_revision
end
| 32.888889 | 74 | 0.760135 |
735b541f470e3deffb1244d99822ebec45c32e7f | 4,188 | ex | Elixir | lib/master_proxy/cowboy2_handler.ex | perezd/master_proxy | 9dc9be22acff265b2a862f30d7048801edfb5b81 | [
"MIT"
] | null | null | null | lib/master_proxy/cowboy2_handler.ex | perezd/master_proxy | 9dc9be22acff265b2a862f30d7048801edfb5b81 | [
"MIT"
] | null | null | null | lib/master_proxy/cowboy2_handler.ex | perezd/master_proxy | 9dc9be22acff265b2a862f30d7048801edfb5b81 | [
"MIT"
] | 1 | 2020-11-18T01:39:55.000Z | 2020-11-18T01:39:55.000Z | defmodule MasterProxy.Cowboy2Handler do
require Logger
@moduledoc false
defp connection() do
Application.get_env(:master_proxy, :conn, Plug.Cowboy.Conn)
end
defp log_request(message) do
if Application.get_env(:master_proxy, :log_requests, true) do
Logger.debug(message)
end
end
@not_found_backend %{
plug: MasterProxy.Plug.NotFound
}
# endpoint and opts are not passed in because they
# are dynamically chosen
def init(req, {_endpoint, _opts}) do
log_request("MasterProxy.Cowboy2Handler called with req: #{inspect(req)}")
conn = connection().conn(req)
# extract this and pass in as a param somehow
backends = Application.get_env(:master_proxy, :backends)
backend = choose_backend(conn, backends)
log_request("Backend chosen: #{inspect(backend)}")
dispatch(backend, req)
end
defp choose_backend(conn, backends) do
Enum.find(backends, @not_found_backend, fn backend ->
backend_matches?(conn, backend)
end)
end
defp dispatch(%{phoenix_endpoint: endpoint}, req) do
# we don't pass in any opts here because that is how phoenix does it
# see https://github.com/phoenixframework/phoenix/blob/master/lib/phoenix/endpoint/cowboy2_adapter.ex#L42
Phoenix.Endpoint.Cowboy2Handler.init(req, {endpoint, endpoint.init([])})
end
defp dispatch(%{plug: plug} = backend, req) do
conn = connection().conn(req)
opts = Map.get(backend, :opts, [])
handler = plug
# Copied from https://github.com/phoenixframework/phoenix/blob/master/lib/phoenix/endpoint/cowboy2_handler.ex
c = connection()
%{adapter: {^c, req}} =
conn
|> handler.call(opts)
|> maybe_send(handler)
{:ok, req, {handler, opts}}
end
# Copied from https://github.com/phoenixframework/phoenix/blob/master/lib/phoenix/endpoint/cowboy2_handler.ex
defp maybe_send(%Plug.Conn{state: :unset}, _plug), do: raise(Plug.Conn.NotSentError)
defp maybe_send(%Plug.Conn{state: :set} = conn, _plug), do: Plug.Conn.send_resp(conn)
defp maybe_send(%Plug.Conn{} = conn, _plug), do: conn
defp maybe_send(other, plug) do
raise "MasterProxy expected #{inspect(plug)} to return Plug.Conn but got: " <> inspect(other)
end
defp backend_matches?(conn, backend) do
verb = Map.get(backend, :verb) || ~r/.*/
host = Map.get(backend, :host) || ~r/.*/
path = Map.get(backend, :path) || ~r/.*/
Regex.match?(host, conn.host) && Regex.match?(path, conn.request_path) &&
Regex.match?(verb, conn.method)
end
## Websocket callbacks
# Copied from https://github.com/phoenixframework/phoenix/blob/master/lib/phoenix/endpoint/cowboy2_handler.ex
def websocket_init([handler | state]) do
{:ok, state} = handler.init(state)
{:ok, [handler | state]}
end
def websocket_handle({opcode, payload}, [handler | state]) when opcode in [:text, :binary] do
handle_reply(handler, handler.handle_in({payload, opcode: opcode}, state))
end
def websocket_handle(_other, handler_state) do
{:ok, handler_state}
end
def websocket_info(message, [handler | state]) do
handle_reply(handler, handler.handle_info(message, state))
end
def terminate(_reason, _req, {_handler, _state}) do
:ok
end
def terminate({:error, :closed}, _req, [handler | state]) do
handler.terminate(:closed, state)
end
def terminate({:remote, :closed}, _req, [handler | state]) do
handler.terminate(:closed, state)
end
def terminate({:remote, code, _}, _req, [handler | state])
when code in 1000..1003 or code in 1005..1011 or code == 1015 do
handler.terminate(:closed, state)
end
def terminate(:remote, _req, [handler | state]) do
handler.terminate(:closed, state)
end
def terminate(reason, _req, [handler | state]) do
handler.terminate(reason, state)
end
defp handle_reply(handler, {:ok, state}), do: {:ok, [handler | state]}
defp handle_reply(handler, {:push, data, state}), do: {:reply, data, [handler | state]}
defp handle_reply(handler, {:reply, _status, data, state}),
do: {:reply, data, [handler | state]}
defp handle_reply(handler, {:stop, _reason, state}), do: {:stop, [handler | state]}
end
| 31.022222 | 113 | 0.684097 |
735b60a4f9d6503ff86de3c213ff5f4399e59eb3 | 15 | ex | Elixir | testData/org/elixir_lang/parser_definition/list_dot_operation_parsing_test_case/UnaryNumericOperation.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/parser_definition/list_dot_operation_parsing_test_case/UnaryNumericOperation.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/parser_definition/list_dot_operation_parsing_test_case/UnaryNumericOperation.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | [].!0
[].not 0
| 5 | 8 | 0.333333 |
735b7a7e6469b8b72490eb4bb79a954bd24f152c | 468 | ex | Elixir | apps/binance_mock/lib/binance_mock/application.ex | frathon/hedgehog | 3ed1469919ba819280709a8f26def761003a99df | [
"Unlicense"
] | 65 | 2020-07-07T01:51:27.000Z | 2021-09-27T00:13:59.000Z | apps/binance_mock/lib/binance_mock/application.ex | Cinderella-Man/hedgehog | 3ed1469919ba819280709a8f26def761003a99df | [
"Unlicense"
] | 5 | 2021-02-12T08:21:15.000Z | 2021-09-01T21:17:27.000Z | apps/binance_mock/lib/binance_mock/application.ex | frathon/hedgehog | 3ed1469919ba819280709a8f26def761003a99df | [
"Unlicense"
] | 10 | 2020-08-13T13:39:31.000Z | 2021-09-14T12:46:51.000Z | defmodule BinanceMock.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
children = [
{BinanceMock, []}
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: BinanceMock.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 24.631579 | 65 | 0.711538 |
735bae5d55a92cdede575985940668c033acc13d | 5,015 | exs | Elixir | test/protox/message_test.exs | sdrew/protox | c28d02f1626b5cd39bad7de2b415d20ebbdf76ee | [
"MIT"
] | null | null | null | test/protox/message_test.exs | sdrew/protox | c28d02f1626b5cd39bad7de2b415d20ebbdf76ee | [
"MIT"
] | null | null | null | test/protox/message_test.exs | sdrew/protox | c28d02f1626b5cd39bad7de2b415d20ebbdf76ee | [
"MIT"
] | null | null | null | defmodule Protox.MessageTest do
Code.require_file("test/messages.exs")
use ExUnit.Case
doctest Protox.Message
test "Protobuf 2, replace only set scalar fields" do
r1 = %Protobuf2{a: 0, s: :ONE}
r2 = %Protobuf2{a: nil, s: :TWO}
r3 = %Protobuf2{a: 1, s: nil}
assert Protox.Message.merge(r1, r2) == %Protobuf2{a: 0, s: :TWO}
assert Protox.Message.merge(r1, r3) == %Protobuf2{a: 1, s: :ONE}
assert Protox.Message.merge(r2, r1) == %Protobuf2{a: 0, s: :ONE}
assert Protox.Message.merge(r3, r1) == %Protobuf2{a: 0, s: :ONE}
end
test "Replace scalar fields" do
r1 = %Required{a: 3, b: 4}
r2 = %Required{a: 5, b: 7}
assert Protox.Message.merge(r1, r2) == %Required{a: 5, b: 7}
assert Protox.Message.merge(r2, r1) == %Required{a: 3, b: 4}
end
test "Concatenate repeated fields" do
m1 = %Sub{g: [], j: [4, 5, 6]}
m2 = %Sub{g: [10, 20], j: [1, 2, 3]}
assert Protox.Message.merge(m1, m2) == %Sub{g: [10, 20], j: [4, 5, 6, 1, 2, 3]}
assert Protox.Message.merge(m2, m1) == %Sub{g: [10, 20], j: [1, 2, 3, 4, 5, 6]}
end
test "Recursively merge messages" do
m1 = %Msg{msg_e: true, msg_f: %Sub{g: [], j: [4, 5, 6]}}
m2 = %Msg{msg_e: false, msg_f: %Sub{g: [10, 20], j: [1, 2, 3]}}
assert Protox.Message.merge(m1, m2) == %Msg{
msg_e: true,
msg_f: %Sub{g: [10, 20], j: [4, 5, 6, 1, 2, 3]}
}
assert Protox.Message.merge(m2, m1) == %Msg{
msg_e: true,
msg_f: %Sub{g: [10, 20], j: [1, 2, 3, 4, 5, 6]}
}
end
test "Overwrite nil messages" do
m1 = %Msg{msg_f: nil}
m2 = %Msg{msg_f: %Sub{g: [10, 20], j: [1, 2, 3]}}
assert Protox.Message.merge(m1, m2) == %Msg{
msg_f: %Sub{g: [10, 20], j: [1, 2, 3]}
}
end
test "Don't overwrite with nil messages" do
m1 = %Msg{msg_f: nil}
m2 = %Msg{msg_f: %Sub{g: [10, 20], j: [1, 2, 3]}}
assert Protox.Message.merge(m2, m1) == %Msg{
msg_f: %Sub{g: [10, 20], j: [1, 2, 3]}
}
end
test "Don't overwrite oneof with nil" do
m1 = %Msg{msg_m: {:msg_o, %Sub{k: 2, j: [4, 5, 6]}}}
m2 = %Msg{msg_m: nil}
assert Protox.Message.merge(m1, m2) == %Msg{msg_m: {:msg_o, %Sub{k: 2, j: [4, 5, 6]}}}
end
test "Overwrite nil oneof" do
m1 = %Msg{msg_m: {:msg_o, %Sub{k: 2, j: [4, 5, 6]}}}
m2 = %Msg{msg_m: nil}
assert Protox.Message.merge(m2, m1) == %Msg{msg_m: {:msg_o, %Sub{k: 2, j: [4, 5, 6]}}}
end
test "Recursively merge messages in oneof" do
m1 = %Msg{msg_m: {:msg_o, %Sub{k: 2, j: [4, 5, 6]}}}
m2 = %Msg{msg_m: {:msg_o, %Sub{k: 3, j: [1, 2, 3]}}}
assert Protox.Message.merge(m1, m2) == %Msg{
msg_m: {:msg_o, %Sub{k: 3, j: [4, 5, 6, 1, 2, 3]}}
}
assert Protox.Message.merge(m2, m1) == %Msg{
msg_m: {:msg_o, %Sub{k: 2, j: [1, 2, 3, 4, 5, 6]}}
}
end
test "Overwrite non-messages oneof" do
m1 = %Msg{msg_m: {:msg_n, :FOO}}
m2 = %Msg{msg_m: {:msg_n, :BAR}}
assert Protox.Message.merge(m1, m2) == %Msg{
msg_m: {:msg_n, :BAR}
}
assert Protox.Message.merge(m2, m1) == %Msg{
msg_m: {:msg_n, :FOO}
}
end
test "Merge scalar maps" do
m1 = %Msg{msg_k: %{1 => "a", 2 => "b", 100 => "c"}}
m2 = %Msg{msg_k: %{1 => "x", 2 => "y", 101 => "z"}}
assert Protox.Message.merge(m1, m2) == %Msg{
msg_k: %{1 => "x", 2 => "y", 100 => "c", 101 => "z"}
}
assert Protox.Message.merge(m2, m1) == %Msg{
msg_k: %{1 => "a", 2 => "b", 100 => "c", 101 => "z"}
}
end
test "Merge messages maps" do
m1 = %Upper{
msg_map: %{
"1" => %Msg{msg_e: true, msg_f: %Sub{g: [], j: [4, 5, 6]}},
"2" => %Msg{msg_d: :FOO, msg_m: {:msg_n, "FOO"}},
"100" => %Msg{msg_a: 33}
}
}
m2 = %Upper{
msg_map: %{
"1" => %Msg{msg_e: false, msg_f: %Sub{g: [10, 20], j: [1, 2, 3]}},
"2" => %Msg{msg_d: :BAR, msg_m: {:msg_o, %Sub{}}},
"101" => %Msg{msg_a: 44}
}
}
assert Protox.Message.merge(m1, m2) == %Upper{
msg_map: %{
"1" => %Msg{msg_e: true, msg_f: %Sub{g: [10, 20], j: [4, 5, 6, 1, 2, 3]}},
"2" => %Msg{msg_d: :BAR, msg_m: {:msg_o, %Sub{}}},
"100" => %Msg{msg_a: 33},
"101" => %Msg{msg_a: 44}
}
}
assert Protox.Message.merge(m2, m1) == %Upper{
msg_map: %{
"1" => %Msg{msg_e: true, msg_f: %Sub{g: [10, 20], j: [1, 2, 3, 4, 5, 6]}},
"2" => %Msg{msg_d: :BAR, msg_m: {:msg_n, "FOO"}},
"100" => %Msg{msg_a: 33},
"101" => %Msg{msg_a: 44}
}
}
end
test "Merge with nil" do
m = %Msg{msg_k: %{1 => "a", 2 => "b", 100 => "c"}}
assert Protox.Message.merge(m, nil) == m
assert Protox.Message.merge(nil, m) == m
assert Protox.Message.merge(nil, nil) == nil
end
end
| 30.579268 | 90 | 0.477368 |
735bb1d3ca90c9bb5dcf4cf2a79f47a0347e72dc | 944 | ex | Elixir | elixir/langxpay/lib/langxpay_web/views/error_view.ex | ednailson/languages-experiments | 309a0360c6c26a3c95f3c8e25fe37bc7a5cbb8d4 | [
"MIT"
] | null | null | null | elixir/langxpay/lib/langxpay_web/views/error_view.ex | ednailson/languages-experiments | 309a0360c6c26a3c95f3c8e25fe37bc7a5cbb8d4 | [
"MIT"
] | null | null | null | elixir/langxpay/lib/langxpay_web/views/error_view.ex | ednailson/languages-experiments | 309a0360c6c26a3c95f3c8e25fe37bc7a5cbb8d4 | [
"MIT"
] | null | null | null | defmodule LangxpayWeb.ErrorView do
use LangxpayWeb, :view
import Ecto.Changeset, only: [traverse_errors: 2]
alias Ecto.Changeset
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.json", _assigns) do
# %{errors: %{detail: "Internal Server Error"}}
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.json" becomes
# "Not Found".
def template_not_found(template, _assigns) do
%{errors: %{detail: Phoenix.Controller.status_message_from_template(template)}}
end
def render("400.json", %{result: %Changeset{} = changeset}) do
translate_errors(changeset)
end
defp translate_errors(changeset) do
traverse_errors(changeset, fn {msg, opts} ->
Enum.reduce(opts, msg, fn {key, value}, acc ->
String.replace(acc, "%{#{key}}", to_string(value))
end)
end)
end
end
| 28.606061 | 83 | 0.688559 |
735bccfd013ec2ac4ff88dd606e6f4b3ce05a8af | 1,232 | ex | Elixir | web/views/error_helpers.ex | holandes22/rotterdam | d8b56079638c15a8492c08a6859ed14413163e62 | [
"MIT"
] | null | null | null | web/views/error_helpers.ex | holandes22/rotterdam | d8b56079638c15a8492c08a6859ed14413163e62 | [
"MIT"
] | null | null | null | web/views/error_helpers.ex | holandes22/rotterdam | d8b56079638c15a8492c08a6859ed14413163e62 | [
"MIT"
] | null | null | null | defmodule Rotterdam.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
if error = form.errors[field] do
content_tag :span, translate_error(error), class: "help-block"
end
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# Because error messages were defined within Ecto, we must
# call the Gettext module passing our Gettext backend. We
# also use the "errors" domain as translations are placed
# in the errors.po file.
# Ecto will pass the :count keyword if the error message is
# meant to be pluralized.
# On your own code and templates, depending on whether you
# need the message to be pluralized or not, this could be
# written simply as:
#
# dngettext "errors", "1 file", "%{count} files", count
# dgettext "errors", "is invalid"
#
if count = opts[:count] do
Gettext.dngettext(Rotterdam.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(Rotterdam.Gettext, "errors", msg, opts)
end
end
end
| 30.04878 | 75 | 0.668831 |
735bcebda83a4eb975e76b8248ecc75b3f0438ce | 1,486 | ex | Elixir | lib/hexpm/parallel/parallel.ex | findmypast/hexfmp | 38a50f5e1057833fd98748faac230bf4b9cc26a3 | [
"Apache-2.0"
] | null | null | null | lib/hexpm/parallel/parallel.ex | findmypast/hexfmp | 38a50f5e1057833fd98748faac230bf4b9cc26a3 | [
"Apache-2.0"
] | null | null | null | lib/hexpm/parallel/parallel.ex | findmypast/hexfmp | 38a50f5e1057833fd98748faac230bf4b9cc26a3 | [
"Apache-2.0"
] | null | null | null | defmodule Hexpm.Parallel do
use GenServer
require Logger
@timeout 60 * 1000
def each(fun, args, opts \\ [])
def each(_fun, [], _opts), do: []
def each(fun, args, opts) do
opts = default_opts(opts)
num_args = length(args)
Hexpm.Parallel.ETS.each(fun, num_args, args, opts)
end
def each!(fun, args, opts \\ [])
def each!(fun, args, opts) do
results = each(fun, args, opts)
Enum.map(results, fn
{:ok, value} ->
value
{:error, _} ->
raise "Parallel tasks failed"
end)
end
def reduce(fun, args, acc, reducer, opts \\ [])
def reduce(_fun, [], acc, _reducer, _opts), do: acc
def reduce(fun, args, acc, reducer, opts) do
opts = default_opts(opts)
Hexpm.Parallel.Process.reduce(fun, args, acc, reducer, opts)
end
def reduce!(fun, args, acc, reducer, opts \\ [])
def reduce!(_fun, [], acc, _reducer, _opts), do: acc
def reduce!(fun, args, acc, reducer, opts) do
opts = default_opts(opts)
Hexpm.Parallel.Process.reduce(fun, args, acc, reducer!(reducer), opts)
end
defp reducer!(fun) do
fn
{:ok, value}, acc ->
fun.(value, acc)
{:error, _}, _acc ->
raise "Parallel tasks failed"
end
end
defp default_opts(opts) do
opts
|> Keyword.put(:parallel, parallel(opts[:parallel]))
|> Keyword.put_new(:timeout, @timeout)
end
if Mix.env == :test do
defp parallel(_arg), do: 1
else
defp parallel(arg), do: arg || 10
end
end
| 22.861538 | 74 | 0.605653 |
735bd423fb0ed53b885fc7dca6e286b78c4a612d | 1,517 | ex | Elixir | lib/phoenix_live_vue_test_web/endpoint.ex | tfwright/phoenix_live_vue_test | f560401c28fdf0132c33e4943a80357110d16160 | [
"MIT"
] | null | null | null | lib/phoenix_live_vue_test_web/endpoint.ex | tfwright/phoenix_live_vue_test | f560401c28fdf0132c33e4943a80357110d16160 | [
"MIT"
] | 1 | 2021-05-11T13:17:32.000Z | 2021-05-11T13:17:32.000Z | lib/phoenix_live_vue_test_web/endpoint.ex | tfwright/phoenix_live_vue_test | f560401c28fdf0132c33e4943a80357110d16160 | [
"MIT"
] | null | null | null | defmodule PhoenixLiveVueTestWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :phoenix_live_vue_test
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
@session_options [
store: :cookie,
key: "_phoenix_live_vue_test_key",
signing_salt: "AU/zegi5"
]
socket "/socket", PhoenixLiveVueTestWeb.UserSocket,
websocket: true,
longpoll: false
socket "/live", Phoenix.LiveView.Socket, websocket: [connect_info: [session: @session_options]]
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :phoenix_live_vue_test,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session, @session_options
plug PhoenixLiveVueTestWeb.Router
end
| 30.34 | 97 | 0.727093 |
735bdb5f039f8d9247438a3650550196a2986151 | 976 | ex | Elixir | lib/azure_ad_openid/nonce_store.ex | mottaman85/azure_ad_openid | 9aa31c29d8a5d157d5d1f9c6f845338f7d30882e | [
"MIT"
] | 6 | 2019-06-12T14:36:49.000Z | 2021-01-22T19:53:15.000Z | lib/azure_ad_openid/nonce_store.ex | mottaman85/azure_ad_openid | 9aa31c29d8a5d157d5d1f9c6f845338f7d30882e | [
"MIT"
] | null | null | null | lib/azure_ad_openid/nonce_store.ex | mottaman85/azure_ad_openid | 9aa31c29d8a5d157d5d1f9c6f845338f7d30882e | [
"MIT"
] | 2 | 2020-05-02T02:18:11.000Z | 2021-02-03T20:37:22.000Z | defmodule AzureADOpenId.NonceStore do
@moduledoc """
Creates, stores and checks nonces. A created nonce will be deleted after its timeout elapses.
"""
use Agent
@agent_name __MODULE__
def start(_, _) do
Agent.start_link(fn -> MapSet.new() end, name: @agent_name)
end
def start_link(_) do
Agent.start_link(fn -> MapSet.new() end, name: @agent_name)
end
def create_nonce(timeout) do
# create nonce
nonce = SecureRandom.uuid()
Agent.update(@agent_name, &MapSet.put(&1, nonce))
# set cleanup task
if(timeout != :infinity) do
Task.start(fn -> cleanup(nonce, timeout) end)
end
nonce
end
def check_nonce(nonce) do
deleted = Agent.get(@agent_name, &MapSet.member?(&1, nonce))
delete_nonce(nonce)
deleted
end
defp cleanup(nonce, timeout) do
Process.sleep(timeout)
delete_nonce(nonce)
end
defp delete_nonce(nonce) do
Agent.update(@agent_name, &MapSet.delete(&1, nonce))
end
end
| 22.181818 | 95 | 0.675205 |
735bfafa3d4af7b6ae7eb9bb423dc4217950f85e | 1,129 | exs | Elixir | config/config.exs | Kernael/Blackmagic21 | 011d0d51bbc5baf6fb7a9bdba0b7a55215e7a9fd | [
"MIT"
] | null | null | null | config/config.exs | Kernael/Blackmagic21 | 011d0d51bbc5baf6fb7a9bdba0b7a55215e7a9fd | [
"MIT"
] | null | null | null | config/config.exs | Kernael/Blackmagic21 | 011d0d51bbc5baf6fb7a9bdba0b7a55215e7a9fd | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :black_magic21, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:black_magic21, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.419355 | 73 | 0.753764 |
735bff3f69eb6809835fd5b24c630ee50b582144 | 1,220 | exs | Elixir | apps/tai/test/tai/trading/orders/create_rejected_test.exs | ihorkatkov/tai | 09f9f15d2c385efe762ae138a8570f1e3fd41f26 | [
"MIT"
] | 1 | 2019-12-19T05:16:26.000Z | 2019-12-19T05:16:26.000Z | apps/tai/test/tai/trading/orders/create_rejected_test.exs | ihorkatkov/tai | 09f9f15d2c385efe762ae138a8570f1e3fd41f26 | [
"MIT"
] | null | null | null | apps/tai/test/tai/trading/orders/create_rejected_test.exs | ihorkatkov/tai | 09f9f15d2c385efe762ae138a8570f1e3fd41f26 | [
"MIT"
] | 1 | 2020-05-03T23:32:11.000Z | 2020-05-03T23:32:11.000Z | defmodule Tai.Trading.Orders.CreateRejectedTest do
use ExUnit.Case, async: false
alias Tai.TestSupport.Mocks
alias Tai.Trading.{Order, Orders, OrderSubmissions}
setup do
on_exit(fn ->
:ok = Application.stop(:tai)
end)
start_supervised!(Mocks.Server)
{:ok, _} = Application.ensure_all_started(:tai)
:ok
end
@venue_order_id "df8e6bd0-a40a-42fb-8fea-b33ef4e34f14"
[{:buy, OrderSubmissions.BuyLimitGtc}, {:sell, OrderSubmissions.SellLimitGtc}]
|> Enum.each(fn {side, submission_type} ->
@submission_type submission_type
test "#{side} updates the relevant attributes" do
submission = Support.OrderSubmissions.build_with_callback(@submission_type)
Mocks.Responses.Orders.GoodTillCancel.rejected(@venue_order_id, submission)
{:ok, _} = Orders.create(submission)
assert_receive {
:callback_fired,
nil,
%Order{status: :enqueued}
}
assert_receive {
:callback_fired,
%Order{status: :enqueued},
%Order{status: :rejected} = rejected_order
}
assert rejected_order.venue_order_id == @venue_order_id
assert %DateTime{} = rejected_order.last_venue_timestamp
end
end)
end
| 27.727273 | 81 | 0.686885 |
735c2155e2d1f6420b6b15b84addab67567500ea | 3,702 | ex | Elixir | core/handler/cowboy_routing.ex | ikeyasu/antikythera | 544fdd22e46b1f34177053d87d9e2a9708c74113 | [
"Apache-2.0"
] | null | null | null | core/handler/cowboy_routing.ex | ikeyasu/antikythera | 544fdd22e46b1f34177053d87d9e2a9708c74113 | [
"Apache-2.0"
] | null | null | null | core/handler/cowboy_routing.ex | ikeyasu/antikythera | 544fdd22e46b1f34177053d87d9e2a9708c74113 | [
"Apache-2.0"
] | null | null | null | # Copyright(c) 2015-2018 ACCESS CO., LTD. All rights reserved.
use Croma
defmodule AntikytheraCore.Handler.CowboyRouting do
alias Antikythera.{Env, Domain, GearName, GearNameStr}
alias AntikytheraCore.GearModule
alias AntikytheraCore.Config.Gear, as: GearConfig
alias AntikytheraCore.Ets.ConfigCache
alias AntikytheraCore.Handler.{GearAction, Healthcheck, SystemInfoExporter}
require AntikytheraCore.Logger, as: L
@healthcheck_route_initialized {"/healthcheck" , Healthcheck.Initialized , nil}
@healthcheck_route_uninitialized {"/healthcheck" , Healthcheck.Uninitialized , nil}
@version_report_route {"/versions" , SystemInfoExporter.Versions , nil}
@total_error_count_route {"/error_count/_total" , SystemInfoExporter.ErrorCount, :total}
@per_app_error_count_route {"/error_count/:otp_app_name", SystemInfoExporter.ErrorCount, :per_otp_app}
defun compiled_routes(gear_names :: [GearName.t], initialized? :: v[boolean]) :: :cowboy_router.dispatch_rules do
gear_routes = Enum.flat_map(gear_names, &per_gear_domain_pathroutes_pairs/1)
:cowboy_router.compile(gear_routes ++ wildcard_domain_routes(initialized?))
end
defunp wildcard_domain_routes(initialized? :: v[boolean]) :: :cowboy_router.routes do
path_rules = [
(if initialized?, do: @healthcheck_route_initialized, else: @healthcheck_route_uninitialized),
@version_report_route,
@total_error_count_route,
@per_app_error_count_route,
]
[{:_, path_rules}]
end
defunp per_gear_domain_pathroutes_pairs(gear_name :: v[GearName.t]) :: :cowboy_router.routes do
routes = [
static_file_serving_route(gear_name),
normal_routes(gear_name),
] |> Enum.reject(&is_nil/1)
domains_of(gear_name) |> Enum.map(fn domain -> {domain, routes} end)
end
@typep route_path :: {String.t, module, any}
defunp static_file_serving_route(gear_name :: v[GearName.t]) :: nil | route_path do
router_module = GearModule.router(gear_name)
try do
router_module.static_prefix()
rescue
UndefinedFunctionError -> nil
end
|> case do
nil -> nil
prefix -> {"#{prefix}/[...]", :cowboy_static , {:priv_dir, gear_name, "static", [{:mimetypes, :cow_mimetypes, :all}]}}
end
end
defunp normal_routes(gear_name :: v[GearName.t]) :: route_path do
{"/[...]", GearAction.Web, {gear_name, GearModule.request_helper_modules(gear_name)}}
end
defunp domains_of(gear_name :: v[GearName.t]) :: [Domain.t] do
custom_domains =
case ConfigCache.Gear.read(gear_name) do
nil -> []
%GearConfig{domains: domains} -> domains
end
[default_domain(gear_name) | custom_domains]
end
defun update_routing(gear_names :: [GearName.t], initialized? :: v[boolean]) :: :ok do
if Env.no_listen?() do
:ok
else
L.info("updating cowboy routing (initialized?=#{initialized?})")
:cowboy.set_env(:antikythera_http_listener, :dispatch, compiled_routes(gear_names, initialized?))
end
end
#
# Handling domains
#
@deployments Application.fetch_env!(:antikythera, :deployments)
@current_compile_env Env.compile_env()
# Also used by ac_console
defun default_domain(gear_name :: v[GearName.t | GearNameStr.t], env :: v[Env.t] \\ @current_compile_env) :: Domain.t do
gear_name_replaced = to_string(gear_name) |> String.replace("_", "-")
base_domain =
case Keyword.get(@deployments, env) do
nil -> System.get_env("BASE_DOMAIN") || "localhost"
domain -> domain
end
"#{gear_name_replaced}.#{base_domain}"
end
end
| 38.5625 | 124 | 0.687196 |
735c2f98a492064a12dcadac4d339fa4ed868712 | 1,407 | ex | Elixir | lib/hexpm_web/controllers/user_controller.ex | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 691 | 2017-03-08T09:15:45.000Z | 2022-03-23T22:04:47.000Z | lib/hexpm_web/controllers/user_controller.ex | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 491 | 2017-03-07T12:58:42.000Z | 2022-03-29T23:32:54.000Z | lib/hexpm_web/controllers/user_controller.ex | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 200 | 2017-03-12T23:03:39.000Z | 2022-03-05T17:55:52.000Z | defmodule HexpmWeb.UserController do
use HexpmWeb, :controller
def show(conn, %{"username" => username}) do
user =
Users.get_by_username(username, [
:emails,
:organization,
owned_packages: [:repository, :downloads]
])
if user do
organization = user.organization
case conn.path_info do
["users" | _] when not is_nil(organization) ->
redirect(conn, to: Router.user_path(user))
["orgs" | _] when is_nil(organization) ->
redirect(conn, to: Router.user_path(user))
_ ->
show_user(conn, user)
end
else
not_found(conn)
end
end
defp show_user(conn, user) do
packages =
Packages.accessible_user_owned_packages(user, conn.assigns.current_user)
|> Packages.attach_versions()
downloads = Packages.packages_downloads_with_all_views(packages)
total_downloads =
Enum.reduce(downloads, 0, fn {_id, d}, acc -> acc + Map.get(d, "all", 0) end)
public_email = User.email(user, :public)
gravatar_email = User.email(user, :gravatar)
render(
conn,
"show.html",
title: user.username,
container: "container page user",
user: user,
packages: packages,
downloads: downloads,
total_downloads: total_downloads,
public_email: public_email,
gravatar_email: gravatar_email
)
end
end
| 24.684211 | 83 | 0.628287 |
735c45d6dd7a0a78441d6471d195f2d4641facad | 5,235 | exs | Elixir | test/acceptance/post_test.exs | kyle5794/ash_json_api | 461cd745c69e63cf8961523dc0eb91f1d45eca21 | [
"MIT"
] | null | null | null | test/acceptance/post_test.exs | kyle5794/ash_json_api | 461cd745c69e63cf8961523dc0eb91f1d45eca21 | [
"MIT"
] | null | null | null | test/acceptance/post_test.exs | kyle5794/ash_json_api | 461cd745c69e63cf8961523dc0eb91f1d45eca21 | [
"MIT"
] | null | null | null | defmodule Test.Acceptance.PostTest do
use ExUnit.Case, async: true
defmodule Author do
use Ash.Resource,
data_layer: Ash.DataLayer.Ets,
extensions: [
AshJsonApi.Resource
]
ets do
private?(true)
end
json_api do
type("author")
routes do
base("/authors")
get(:default)
index(:default)
end
end
actions do
read(:default)
create(:default)
end
attributes do
attribute(:id, :uuid, primary_key?: true)
attribute(:name, :string)
end
relationships do
has_many(:posts, Test.Acceptance.PostTest.Post, destination_field: :author_id)
end
end
defmodule Post do
use Ash.Resource,
data_layer: Ash.DataLayer.Ets,
extensions: [
AshJsonApi.Resource
]
ets do
private?(true)
end
json_api do
type("post")
routes do
base("/posts")
get(:default)
index(:default)
post(:default)
end
end
actions do
read(:default)
create :default do
accept([:id, :name, :hidden, :author])
end
end
attributes do
attribute(:id, :uuid, primary_key?: true, allow_nil?: false)
attribute(:name, :string, allow_nil?: false)
attribute(:hidden, :string)
attribute(:email, :string,
allow_nil?: true,
constraints: [
match: ~r/[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,}/
]
)
end
relationships do
belongs_to(:author, Test.Acceptance.PostTest.Author, required?: false)
end
end
defmodule Api do
use Ash.Api,
extensions: [
AshJsonApi.Api
]
json_api do
log_errors?(false)
end
resources do
resource(Post)
resource(Author)
end
end
import AshJsonApi.Test
@tag :attributes
describe "invalid_post" do
test "create without all attributes in accept list" do
id = Ecto.UUID.generate()
response =
Api
|> post("/posts", %{
data: %{
type: "post",
attributes: %{
id: id,
name: "Invalid Post 1"
}
}
})
# response is a Plug.
assert %{"data" => %{"attributes" => %{"hidden" => nil}}} = response.resp_body
end
end
@tag :attributes
describe "post" do
test "create with all attributes in accept list" do
id = Ecto.UUID.generate()
Api
|> post("/posts", %{
data: %{
type: "post",
attributes: %{
id: id,
name: "Post 1",
hidden: "hidden"
}
}
})
|> assert_attribute_equals("email", nil)
end
end
describe "post_email_id_exception" do
test "create with all attributes in accept list with email" do
id = Ecto.UUID.generate()
response =
Api
|> post("/posts", %{
data: %{
type: "post",
attributes: %{
id: id,
name: "Invalid Post 2",
hidden: "hidden",
email: "[email protected]"
}
}
})
# response is a Plug.
assert %{"errors" => [error]} = response.resp_body
assert error["code"] == "InvalidBody"
assert error["detail"] ==
"Expected only defined properties, got key [\"data\", \"attributes\", \"email\"]."
end
end
describe "post_email_id_relationship" do
setup do
author =
Author
|> Ash.Changeset.new(%{id: Ecto.UUID.generate(), name: "John"})
|> Api.create!()
%{author: author}
end
test "create with all attributes in accept list without email along with relationship", %{
author: author
} do
id = Ecto.UUID.generate()
response =
Api
|> post("/posts", %{
data: %{
type: "post",
attributes: %{
id: id,
name: "Post 2",
hidden: "hidden"
},
relationships: %{
author: %{
data: %{id: author.id, type: "author"}
}
}
}
})
# response is a Plug.
assert %{"data" => %{"attributes" => %{"hidden" => "hidden"}}} = response.resp_body
end
test "create with all attributes in accept list with email along with relationship", %{
author: author
} do
id = Ecto.UUID.generate()
response =
Api
|> post("/posts", %{
data: %{
type: "post",
attributes: %{
id: id,
name: "Invalid Post 3",
hidden: "hidden",
email: "[email protected]"
},
relationships: %{
author: %{
data: %{id: author.id, type: "author"}
}
}
}
})
# response is a Plug.
assert %{"errors" => [error]} = response.resp_body
assert error["code"] == "InvalidBody"
assert error["detail"] ==
"Expected only defined properties, got key [\"data\", \"attributes\", \"email\"]."
end
end
end
| 21.280488 | 97 | 0.494747 |
735c5068f0707addca5d801e806a5226c190ba3e | 3,993 | ex | Elixir | clients/you_tube/lib/google_api/you_tube/v3/model/comment_snippet.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/comment_snippet.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/comment_snippet.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.YouTube.V3.Model.CommentSnippet do
@moduledoc """
Basic details about a comment, such as its author and text.
## Attributes
- authorChannelId (String.t): The id of the author's YouTube channel, if any. Defaults to: `null`.
- authorChannelUrl (String.t): Link to the author's YouTube channel, if any. Defaults to: `null`.
- authorDisplayName (String.t): The name of the user who posted the comment. Defaults to: `null`.
- authorProfileImageUrl (String.t): The URL for the avatar of the user who posted the comment. Defaults to: `null`.
- canRate (boolean()): Whether the current viewer can rate this comment. Defaults to: `null`.
- channelId (String.t): The id of the corresponding YouTube channel. In case of a channel comment this is the channel the comment refers to. In case of a video comment it's the video's channel. Defaults to: `null`.
- likeCount (integer()): The total number of likes this comment has received. Defaults to: `null`.
- moderationStatus (String.t): The comment's moderation status. Will not be set if the comments were requested through the id filter. Defaults to: `null`.
- Enum - one of [heldForReview, likelySpam, published, rejected]
- parentId (String.t): The unique id of the parent comment, only set for replies. Defaults to: `null`.
- publishedAt (DateTime.t): The date and time when the comment was orignally published. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format. Defaults to: `null`.
- textDisplay (String.t): The comment's text. The format is either plain text or HTML dependent on what has been requested. Even the plain text representation may differ from the text originally posted in that it may replace video links with video titles etc. Defaults to: `null`.
- textOriginal (String.t): The comment's original raw text as initially posted or last updated. The original text will only be returned if it is accessible to the viewer, which is only guaranteed if the viewer is the comment's author. Defaults to: `null`.
- updatedAt (DateTime.t): The date and time when was last updated . The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format. Defaults to: `null`.
- videoId (String.t): The ID of the video the comment refers to, if any. Defaults to: `null`.
- viewerRating (String.t): The rating the viewer has given to this comment. For the time being this will never return RATE_TYPE_DISLIKE and instead return RATE_TYPE_NONE. This may change in the future. Defaults to: `null`.
- Enum - one of [dislike, like, none, unspecified]
"""
defstruct [
:authorChannelId,
:authorChannelUrl,
:authorDisplayName,
:authorProfileImageUrl,
:canRate,
:channelId,
:likeCount,
:moderationStatus,
:parentId,
:publishedAt,
:textDisplay,
:textOriginal,
:updatedAt,
:videoId,
:viewerRating
]
end
defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.CommentSnippet do
def decode(value, _options) do
value
end
end
defimpl Poison.Encoder, for: GoogleApi.YouTube.V3.Model.CommentSnippet do
def encode(value, options) do
GoogleApi.YouTube.V3.Deserializer.serialize_non_nil(value, options)
end
end
| 53.959459 | 286 | 0.739544 |
735c62b31babeee04f821dc22fe024424bef10ab | 401 | ex | Elixir | ch06/pooly/lib/pooly/pool_supervisor.ex | seansu4you87/little-otp-book | 2c3ddf34795780d660e50497f7623589cf1a22f4 | [
"MIT"
] | null | null | null | ch06/pooly/lib/pooly/pool_supervisor.ex | seansu4you87/little-otp-book | 2c3ddf34795780d660e50497f7623589cf1a22f4 | [
"MIT"
] | null | null | null | ch06/pooly/lib/pooly/pool_supervisor.ex | seansu4you87/little-otp-book | 2c3ddf34795780d660e50497f7623589cf1a22f4 | [
"MIT"
] | null | null | null | defmodule Pooly.PoolSupervisor do
use Supervisor
def start_link(pool_config) do
Supervisor.start_link(
__MODULE__,
pool_config,
name: :"#{pool_config[:name]}Supervisor"
)
end
def init(pool_config) do
opts = [
strategy: :one_for_all
]
children = [
worker(Pooly.PoolServer, [self, pool_config])
]
supervise(children, opts)
end
end
| 16.708333 | 51 | 0.638404 |
735c8e1dc12602455af32826471702e3d8c4ed08 | 17,513 | ex | Elixir | apps/financial_system/lib/financial_system/money/coin.ex | juniornelson123/tech-challenge-stone | e27b767514bf42a5ade5228de56c3c7ea38459d7 | [
"MIT"
] | null | null | null | apps/financial_system/lib/financial_system/money/coin.ex | juniornelson123/tech-challenge-stone | e27b767514bf42a5ade5228de56c3c7ea38459d7 | [
"MIT"
] | 2 | 2021-03-10T03:19:32.000Z | 2021-09-02T04:33:17.000Z | apps/financial_system/lib/financial_system/money/coin.ex | juniornelson123/tech-challenge-stone | e27b767514bf42a5ade5228de56c3c7ea38459d7 | [
"MIT"
] | null | null | null | defmodule FinancialSystem.Money.Coin do
@moduledoc """
Currencies ISO4217
"""
@coins [
%{
name: "UAE Dirham",
code: "AED",
num_code: 784,
multiplier: 2
},
%{
name: "Afghani",
code: "AFN",
num_code: 971,
multiplier: 2
},
%{
name: "Lek",
code: "ALL",
num_code: 008,
multiplier: 2
},
%{
name: "Armenian Dram",
code: "AMD",
num_code: 051,
multiplier: 2
},
%{
name: "Netherlands Antillean Guilder",
code: "ANG",
num_code: 532,
multiplier: 2
},
%{
name: "Kwanza",
code: "AOA",
num_code: 973,
multiplier: 2
},
%{
name: "Argentine Peso",
code: "ARS",
num_code: 032,
multiplier: 2
},
%{
name: "Australian Dollar",
code: "AUD",
num_code: 036,
multiplier: 2
},
%{
name: "Aruban Florin",
code: "AWG",
num_code: 533,
multiplier: 2
},
%{
name: "Azerbaijan Manat",
code: "AZN",
num_code: 944,
multiplier: 2
},
%{
name: "Convertible Mark",
code: "BAM",
num_code: 977,
multiplier: 2
},
%{
name: "Barbados Dollar",
code: "BBD",
num_code: 052,
multiplier: 2
},
%{
name: "Taka",
code: "BDT",
num_code: 050,
multiplier: 2
},
%{
name: "Bulgarian Lev",
code: "BGN",
num_code: 975,
multiplier: 2
},
%{
name: "Bahraini Dinar",
code: "BHD",
num_code: 048,
multiplier: 3
},
%{
name: "Burundi Franc",
code: "BIF",
num_code: 108,
multiplier: 0
},
%{
name: "Bermudian Dollar",
code: "BMD",
num_code: 060,
multiplier: 2
},
%{
name: "Brunei Dollar",
code: "BND",
num_code: 096,
multiplier: 2
},
%{
name: "Boliviano",
code: "BOB",
num_code: 068,
multiplier: 2
},
%{
name: "Mvdol",
code: "BOV",
num_code: 984,
multiplier: 2
},
%{
name: "Brazilian Real",
code: "BRL",
num_code: 986,
multiplier: 2
},
%{
name: "Bahamian Dollar",
code: "BSD",
num_code: 044,
multiplier: 2
},
%{
name: "Ngultrum",
code: "BTN",
num_code: 064,
multiplier: 2
},
%{
name: "Pula",
code: "BWP",
num_code: 072,
multiplier: 2
},
%{
name: "Belarusian Ruble",
code: "BYN",
num_code: 933,
multiplier: 2
},
%{
name: "Belize Dollar",
code: "BZD",
num_code: 084,
multiplier: 2
},
%{
name: "Canadian Dollar",
code: "CAD",
num_code: 124,
multiplier: 2
},
%{
name: "Congolese Franc",
code: "CDF",
num_code: 976,
multiplier: 2
},
%{
name: "WIR Euro",
code: "CHE",
num_code: 947,
multiplier: 2
},
%{
name: "Swiss Franc",
code: "CHF",
num_code: 756,
multiplier: 2
},
%{
name: "WIR Franc",
code: "CHW",
num_code: 948,
multiplier: 2
},
%{
name: "Unidad de Fomento",
code: "CLF",
num_code: 990,
multiplier: 4
},
%{
name: "Chilean Peso",
code: "CLP",
num_code: 152,
multiplier: 0
},
%{
name: "Yuan Renminbi",
code: "CNY",
num_code: 156,
multiplier: 2
},
%{
name: "Colombian Peso",
code: "COP",
num_code: 170,
multiplier: 2
},
%{
name: "Unidad de Valor Real",
code: "COU",
num_code: 970,
multiplier: 2
},
%{
name: "Costa Rican Colon",
code: "CRC",
num_code: 188,
multiplier: 2
},
%{
name: "Peso Convertible",
code: "CUC",
num_code: 931,
multiplier: 2
},
%{
name: "Cuban Peso",
code: "CUP",
num_code: 192,
multiplier: 2
},
%{
name: "Cabo Verde Escudo",
code: "CVE",
num_code: 132,
multiplier: 2
},
%{
name: "Czech Koruna",
code: "CZK",
num_code: 203,
multiplier: 2
},
%{
name: "Djibouti Franc",
code: "DJF",
num_code: 262,
multiplier: 0
},
%{
name: "Danish Krone",
code: "DKK",
num_code: 208,
multiplier: 2
},
%{
name: "Dominican Peso",
code: "DOP",
num_code: 214,
multiplier: 2
},
%{
name: "Algerian Dinar",
code: "DZD",
num_code: 012,
multiplier: 2
},
%{
name: "Egyptian Pound",
code: "EGP",
num_code: 818,
multiplier: 2
},
%{
name: "Nakfa",
code: "ERN",
num_code: 232,
multiplier: 2
},
%{
name: "Ethiopian Birr",
code: "ETB",
num_code: 230,
multiplier: 2
},
%{
name: "Euro",
code: "EUR",
num_code: 978,
multiplier: 2
},
%{
name: "Fiji Dollar",
code: "FJD",
num_code: 242,
multiplier: 2
},
%{
name: "Falkland Islands Pound",
code: "FKP",
num_code: 238,
multiplier: 2
},
%{
name: "Pound Sterling",
code: "GBP",
num_code: 826,
multiplier: 2
},
%{
name: "Lari",
code: "GEL",
num_code: 981,
multiplier: 2
},
%{
name: "Ghana Cedi",
code: "GHS",
num_code: 936,
multiplier: 2
},
%{
name: "Gibraltar Pound",
code: "GIP",
num_code: 292,
multiplier: 2
},
%{
name: "Dalasi",
code: "GMD",
num_code: 270,
multiplier: 2
},
%{
name: "Guinean Franc",
code: "GNF",
num_code: 324,
multiplier: 0
},
%{
name: "Quetzal",
code: "GTQ",
num_code: 320,
multiplier: 2
},
%{
name: "Guyana Dollar",
code: "GYD",
num_code: 328,
multiplier: 2
},
%{
name: "Hong Kong Dollar",
code: "HKD",
num_code: 344,
multiplier: 2
},
%{
name: "Lempira",
code: "HNL",
num_code: 340,
multiplier: 2
},
%{
name: "Kuna",
code: "HRK",
num_code: 191,
multiplier: 2
},
%{
name: "Gourde",
code: "HTG",
num_code: 332,
multiplier: 2
},
%{
name: "Forint",
code: "HUF",
num_code: 348,
multiplier: 2
},
%{
name: "Rupiah",
code: "IDR",
num_code: 360,
multiplier: 2
},
%{
name: "New Israeli Sheqel",
code: "ILS",
num_code: 376,
multiplier: 2
},
%{
name: "Indian Rupee",
code: "INR",
num_code: 356,
multiplier: 2
},
%{
name: "Iraqi Dinar",
code: "IQD",
num_code: 368,
multiplier: 3
},
%{
name: "Iranian Rial",
code: "IRR",
num_code: 364,
multiplier: 2
},
%{
name: "Iceland Krona",
code: "ISK",
num_code: 352,
multiplier: 0
},
%{
name: "Jamaican Dollar",
code: "JMD",
num_code: 388,
multiplier: 2
},
%{
name: "Jordanian Dinar",
code: "JOD",
num_code: 400,
multiplier: 3
},
%{
name: "Yen",
code: "JPY",
num_code: 392,
multiplier: 0
},
%{
name: "Kenyan Shilling",
code: "KES",
num_code: 404,
multiplier: 2
},
%{
name: "Som",
code: "KGS",
num_code: 417,
multiplier: 2
},
%{
name: "Riel",
code: "KHR",
num_code: 116,
multiplier: 2
},
%{
name: "Comorian Franc ",
code: "KMF",
num_code: 174,
multiplier: 0
},
%{
name: "North Korean Won",
code: "KPW",
num_code: 408,
multiplier: 2
},
%{
name: "Won",
code: "KRW",
num_code: 410,
multiplier: 0
},
%{
name: "Kuwaiti Dinar",
code: "KWD",
num_code: 414,
multiplier: 3
},
%{
name: "Cayman Islands Dollar",
code: "KYD",
num_code: 136,
multiplier: 2
},
%{
name: "Tenge",
code: "KZT",
num_code: 398,
multiplier: 2
},
%{
name: "Lao Kip",
code: "LAK",
num_code: 418,
multiplier: 2
},
%{
name: "Lebanese Pound",
code: "LBP",
num_code: 422,
multiplier: 2
},
%{
name: "Sri Lanka Rupee",
code: "LKR",
num_code: 144,
multiplier: 2
},
%{
name: "Liberian Dollar",
code: "LRD",
num_code: 430,
multiplier: 2
},
%{
name: "Loti",
code: "LSL",
num_code: 426,
multiplier: 2
},
%{
name: "Libyan Dinar",
code: "LYD",
num_code: 434,
multiplier: 3
},
%{
name: "Moroccan Dirham",
code: "MAD",
num_code: 504,
multiplier: 2
},
%{
name: "Moldovan Leu",
code: "MDL",
num_code: 498,
multiplier: 2
},
%{
name: "Malagasy Ariary",
code: "MGA",
num_code: 969,
multiplier: 2
},
%{
name: "Denar",
code: "MKD",
num_code: 807,
multiplier: 2
},
%{
name: "Kyat",
code: "MMK",
num_code: 104,
multiplier: 2
},
%{
name: "Tugrik",
code: "MNT",
num_code: 496,
multiplier: 2
},
%{
name: "Pataca",
code: "MOP",
num_code: 446,
multiplier: 2
},
%{
name: "Ouguiya",
code: "MRU",
num_code: 929,
multiplier: 2
},
%{
name: "Mauritius Rupee",
code: "MUR",
num_code: 480,
multiplier: 2
},
%{
name: "Rufiyaa",
code: "MVR",
num_code: 462,
multiplier: 2
},
%{
name: "Malawi Kwacha",
code: "MWK",
num_code: 454,
multiplier: 2
},
%{
name: "Mexican Peso",
code: "MXN",
num_code: 484,
multiplier: 2
},
%{
name: "Mexican Unidad de Inversion (UDI)",
code: "MXV",
num_code: 979,
multiplier: 2
},
%{
name: "Malaysian Ringgit",
code: "MYR",
num_code: 458,
multiplier: 2
},
%{
name: "Mozambique Metical",
code: "MZN",
num_code: 943,
multiplier: 2
},
%{
name: "Namibia Dollar",
code: "NAD",
num_code: 516,
multiplier: 2
},
%{
name: "Naira",
code: "NGN",
num_code: 566,
multiplier: 2
},
%{
name: "Cordoba Oro",
code: "NIO",
num_code: 558,
multiplier: 2
},
%{
name: "Norwegian Krone",
code: "NOK",
num_code: 578,
multiplier: 2
},
%{
name: "Nepalese Rupee",
code: "NPR",
num_code: 524,
multiplier: 2
},
%{
name: "New Zealand Dollar",
code: "NZD",
num_code: 554,
multiplier: 2
},
%{
name: "Rial Omani",
code: "OMR",
num_code: 512,
multiplier: 3
},
%{
name: "Balboa",
code: "PAB",
num_code: 590,
multiplier: 2
},
%{
name: "Sol",
code: "PEN",
num_code: 604,
multiplier: 2
},
%{
name: "Kina",
code: "PGK",
num_code: 598,
multiplier: 2
},
%{
name: "Philippine Piso",
code: "PHP",
num_code: 608,
multiplier: 2
},
%{
name: "Pakistan Rupee",
code: "PKR",
num_code: 586,
multiplier: 2
},
%{
name: "Zloty",
code: "PLN",
num_code: 985,
multiplier: 2
},
%{
name: "Guarani",
code: "PYG",
num_code: 600,
multiplier: 0
},
%{
name: "Qatari Rial",
code: "QAR",
num_code: 634,
multiplier: 2
},
%{
name: "Romanian Leu",
code: "RON",
num_code: 946,
multiplier: 2
},
%{
name: "Serbian Dinar",
code: "RSD",
num_code: 941,
multiplier: 2
},
%{
name: "Russian Ruble",
code: "RUB",
num_code: 643,
multiplier: 2
},
%{
name: "Rwanda Franc",
code: "RWF",
num_code: 646,
multiplier: 0
},
%{
name: "Saudi Riyal",
code: "SAR",
num_code: 682,
multiplier: 2
},
%{
name: "Solomon Islands Dollar",
code: "SBD",
num_code: 090,
multiplier: 2
},
%{
name: "Seychelles Rupee",
code: "SCR",
num_code: 690,
multiplier: 2
},
%{
name: "Sudanese Pound",
code: "SDG",
num_code: 938,
multiplier: 2
},
%{
name: "Swedish Krona",
code: "SEK",
num_code: 752,
multiplier: 2
},
%{
name: "Singapore Dollar",
code: "SGD",
num_code: 702,
multiplier: 2
},
%{
name: "Saint Helena Pound",
code: "SHP",
num_code: 654,
multiplier: 2
},
%{
name: "Leone",
code: "SLL",
num_code: 694,
multiplier: 2
},
%{
name: "Somali Shilling",
code: "SOS",
num_code: 706,
multiplier: 2
},
%{
name: "Surinam Dollar",
code: "SRD",
num_code: 968,
multiplier: 2
},
%{
name: "South Sudanese Pound",
code: "SSP",
num_code: 728,
multiplier: 2
},
%{
name: "Dobra",
code: "STN",
num_code: 930,
multiplier: 2
},
%{
name: "El Salvador Colon",
code: "SVC",
num_code: 222,
multiplier: 2
},
%{
name: "Syrian Pound",
code: "SYP",
num_code: 760,
multiplier: 2
},
%{
name: "Lilangeni",
code: "SZL",
num_code: 748,
multiplier: 2
},
%{
name: "Baht",
code: "THB",
num_code: 764,
multiplier: 2
},
%{
name: "Somoni",
code: "TJS",
num_code: 972,
multiplier: 2
},
%{
name: "Turkmenistan New Manat",
code: "TMT",
num_code: 934,
multiplier: 2
},
%{
name: "Tunisian Dinar",
code: "TND",
num_code: 788,
multiplier: 3
},
%{
name: "Pa’anga",
code: "TOP",
num_code: 776,
multiplier: 2
},
%{
name: "Turkish Lira",
code: "TRY",
num_code: 949,
multiplier: 2
},
%{
name: "Trinidad and Tobago Dollar",
code: "TTD",
num_code: 780,
multiplier: 2
},
%{
name: "New Taiwan Dollar",
code: "TWD",
num_code: 901,
multiplier: 2
},
%{
name: "Tanzanian Shilling",
code: "TZS",
num_code: 834,
multiplier: 2
},
%{
name: "Hryvnia",
code: "UAH",
num_code: 980,
multiplier: 2
},
%{
name: "Uganda Shilling",
code: "UGX",
num_code: 800,
multiplier: 0
},
%{
name: "US Dollar",
code: "USD",
num_code: 840,
multiplier: 2
},
%{
name: "US Dollar (Next day)",
code: "USN",
num_code: 997,
multiplier: 2
},
%{
name: "Uruguay Peso en Unidades Indexadas (URUIURUI)",
code: "UYI",
num_code: 940,
multiplier: 0
},
%{
name: "Peso Uruguayo",
code: "UYU",
num_code: 858,
multiplier: 2
},
%{
name: "Uzbekistan Sum",
code: "UZS",
num_code: 860,
multiplier: 2
},
%{
name: "Bolívar",
code: "VEF",
num_code: 937,
multiplier: 2
},
%{
name: "Dong",
code: "VND",
num_code: 704,
multiplier: 0
},
%{
name: "Vatu",
code: "VUV",
num_code: 548,
multiplier: 0
},
%{
name: "Tala",
code: "WST",
num_code: 882,
multiplier: 2
},
%{
name: "CFA Franc BEAC",
code: "XAF",
num_code: 950,
multiplier: 0
},
%{
name: "East Caribbean Dollar",
code: "XCD",
num_code: 951,
multiplier: 2
},
%{
name: "CFA Franc BCEAO",
code: "XOF",
num_code: 952,
multiplier: 0
},
%{
name: "CFP Franc",
code: "XPF",
num_code: 953,
multiplier: 0
},
%{
name: "Yemeni Rial",
code: "YER",
num_code: 886,
multiplier: 2
},
%{
name: "Rand",
code: "ZAR",
num_code: 710,
multiplier: 2
},
%{
name: "Zambian Kwacha",
code: "ZMW",
num_code: 967,
multiplier: 2
},
%{
name: "Zimbabwe Dollar",
code: "ZWL",
num_code: 932,
multiplier: 2
}
]
@doc """
Return list of currencies iso 4217
##Examples
iex> get_coins
[
%{
name: "UAE Dirham",
code: "AED",
num_code: 784,
multiplier: 2
},
...
]
"""
def get_coins do
@coins
end
@doc """
Return currency by code
##Examples
iex> get_by_code(BRL)
%{
name: "Brazilian Real",
code: "BRL",
num_code: 986,
multiplier: 2
}
"""
def get_by_code(code) do
@coins |> Enum.find(&(&1.code == code))
end
end
| 16.888139 | 60 | 0.434534 |
735c925067d1d0a38a35988622a682837c9db619 | 861 | ex | Elixir | lib/vite.ex | Virtual-Repetitions/vite_phx | 8f102e77ed36ea3b8f39afa0f9716c25aa9b7879 | [
"MIT"
] | null | null | null | lib/vite.ex | Virtual-Repetitions/vite_phx | 8f102e77ed36ea3b8f39afa0f9716c25aa9b7879 | [
"MIT"
] | null | null | null | lib/vite.ex | Virtual-Repetitions/vite_phx | 8f102e77ed36ea3b8f39afa0f9716c25aa9b7879 | [
"MIT"
] | null | null | null | defmodule Vite do
@moduledoc """
Documentation for `Vite`.
"""
alias Vite.{Config, Manifest, React, View}
defdelegate vite_client, to: View
defdelegate inlined_phx_manifest, to: View
defdelegate react_refresh_snippet, to: React
def vite_snippet(entrypoint_name, opts \\ []) do
current_env = Keyword.get(opts, :current_env, Config.current_env())
prefix = Keyword.get(opts, :prefix, "/")
case current_env do
:prod ->
case Manifest.entry(entrypoint_name) do
nil ->
{:safe, ""}
entry_chunk ->
View.entrypoint_snippet(entry_chunk, Manifest.descendent_chunks(entry_chunk),
prefix: prefix
)
end
_ ->
View.dev_entrypoint_snippet(entrypoint_name)
end
end
def is_prod() do
Vite.Config.current_env() == :prod
end
end
| 23.27027 | 89 | 0.630662 |
735cc72a390b27797a2030e69c041a05a382facd | 4,873 | ex | Elixir | lib/ex_ami/message.ex | palexanderm/ex_ami | 3f713e7a18feebe14fd89c8adfec4cc3af1c3f05 | [
"MIT"
] | 18 | 2015-02-18T23:30:48.000Z | 2020-12-07T11:02:27.000Z | lib/ex_ami/message.ex | palexanderm/ex_ami | 3f713e7a18feebe14fd89c8adfec4cc3af1c3f05 | [
"MIT"
] | 2 | 2015-09-03T12:31:13.000Z | 2018-01-17T00:49:13.000Z | lib/ex_ami/message.ex | palexanderm/ex_ami | 3f713e7a18feebe14fd89c8adfec4cc3af1c3f05 | [
"MIT"
] | 8 | 2016-05-11T01:17:28.000Z | 2021-12-24T20:56:26.000Z | defmodule ExAmi.Message do
use ExAmi.Logger
@eol "\r\n"
defmodule Message do
defstruct attributes: %{}, variables: %{}
def new, do: %__MODULE__{}
def new(attributes, variables),
do: %__MODULE__{attributes: attributes, variables: variables}
def new(opts), do: struct(new(), opts)
end
def new_message, do: Message.new()
def new_message(attributes, variables), do: Message.new(attributes, variables)
def new_action(name) do
action_id =
:os.timestamp()
|> Tuple.to_list()
|> Enum.map(&Integer.to_string(&1))
|> Enum.reduce("", &(&2 <> &1))
set_all(new_message(), [{"Action", name}, {"ActionID", action_id}])
end
def new_action(name, attributes) do
name
|> new_action()
|> set_all(attributes)
end
def new_action(name, attributes, variables) do
name
|> new_action()
|> set_all(attributes)
|> set_all_variables(variables)
end
def get(%Message{attributes: attributes}, key) do
case Map.fetch(attributes, key) do
{:ok, value} -> {:ok, value}
_ -> :notfound
end
end
def get_variable(%Message{variables: variables}, key) do
case Map.fetch(variables, key) do
{:ok, value} -> {:ok, value}
_ -> :notfound
end
end
def set(key, value) do
set(new_message(), key, value)
end
def set(%Message{} = message, key, value) do
message.attributes
|> Map.put(key, value)
|> new_message(message.variables)
end
def add_response_data(%Message{attributes: attributes} = message, line) do
response_data =
case attributes["ResponseData"] do
nil -> line
acc -> acc <> "\n" <> line
end
%Message{message | attributes: Map.put(attributes, "ResponseData", response_data)}
end
def set_all(%Message{} = message, attributes) do
Enum.reduce(attributes, message, fn {key, value}, acc -> set(acc, key, value) end)
end
def set_variable(%Message{variables: variables, attributes: attributes}, key, value),
do: new_message(attributes, Map.put(variables, key, value))
def set_all_variables(%Message{} = message, variables) do
Enum.reduce(variables, message, fn {key, value}, acc -> set_variable(acc, key, value) end)
end
def marshall(%Message{attributes: attributes, variables: variables}) do
Enum.reduce(Map.to_list(attributes), "", fn {k, v}, acc -> marshall(acc, k, v) end) <>
Enum.reduce(Map.to_list(variables), "", fn {k, v}, acc -> marshall_variable(acc, k, v) end) <>
@eol
end
def marshall(key, value), do: key <> ": " <> value <> @eol
def marshall(acc, key, value), do: acc <> marshall(key, value)
def marshall_variable(key, value), do: marshall("Variable", key <> "=" <> value)
def marshall_variable(acc, key, value), do: acc <> marshall("Variable", key <> "=" <> value)
def explode_lines(text), do: String.split(text, "\r\n", trim: true)
def format_log(%{attributes: attributes}) do
cond do
value = Map.get(attributes, "Event") ->
format_log("Event", value, attributes)
value = Map.get(attributes, "Response") ->
format_log("Response", value, attributes)
true ->
{:error, :notfound}
end
end
def format_log(key, value, attributes) do
attributes
|> Map.delete(key)
|> Map.to_list()
|> Enum.reduce(key <> ": \"" <> value <> "\"", fn {k, v}, acc ->
acc <> ", " <> k <> ": \"" <> v <> "\""
end)
end
def unmarshall(text) do
do_unmarshall(new_message(), explode_lines(text))
end
defp do_unmarshall(message, []), do: message
defp do_unmarshall(message, [line | tail]) do
~r/^([^\s]+): (.*)/
|> Regex.run(line)
|> case do
[_, key, value] ->
set(message, key, value)
nil ->
add_response_data(message, line)
end
|> do_unmarshall(tail)
end
def is_response(%Message{} = message), do: is_type(message, "Response")
def is_event(%Message{} = message), do: is_type(message, "Event")
def is_response_success(%Message{} = message) do
{:ok, value} = get(message, "Response")
value == "Success"
end
def is_response_error(%Message{} = message) do
{:ok, value} = get(message, "Response")
value == "Error"
end
def is_response_complete(%Message{} = message) do
case get(message, "Message") do
:notfound ->
true
{:ok, response_text} ->
!String.match?(response_text, ~r/ollow/)
end
end
def is_event_last_for_response(%Message{} = message) do
with :notfound <- get(message, "EventList"),
:notfound <- get(message, "Event") do
false
else
{:ok, response_text} ->
String.match?(response_text, ~r/omplete/)
_ ->
false
end
end
defp is_type(%Message{} = message, type) do
case get(message, type) do
{:ok, _} -> true
_ -> false
end
end
end
| 26.198925 | 100 | 0.611328 |
735cd4d4d398ed4b83a54c5d5366ddfe6119298a | 175 | ex | Elixir | lib/styx.ex | Vandise/styx | ca1224830c0c6052efb05748cf6e18670eae4400 | [
"MIT"
] | null | null | null | lib/styx.ex | Vandise/styx | ca1224830c0c6052efb05748cf6e18670eae4400 | [
"MIT"
] | null | null | null | lib/styx.ex | Vandise/styx | ca1224830c0c6052efb05748cf6e18670eae4400 | [
"MIT"
] | null | null | null | defmodule Styx do
@moduledoc """
Styx Data Pipeline Collector
"""
@doc """
Styx data collector
## Help
./styx
"""
def main(_args) do
:ok
end
end
| 9.722222 | 30 | 0.565714 |
735cdbe8e74e7eafd3a7084d4f10bd47563b364b | 1,812 | ex | Elixir | clients/monitoring/lib/google_api/monitoring/v3/model/basic_authentication.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/monitoring/lib/google_api/monitoring/v3/model/basic_authentication.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/monitoring/lib/google_api/monitoring/v3/model/basic_authentication.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Monitoring.V3.Model.BasicAuthentication do
@moduledoc """
The authentication parameters to provide to the specified resource or URL that requires a username and password. Currently, only Basic HTTP authentication (https://tools.ietf.org/html/rfc7617) is supported in Uptime checks.
## Attributes
* `password` (*type:* `String.t`, *default:* `nil`) - The password to use when authenticating with the HTTP server.
* `username` (*type:* `String.t`, *default:* `nil`) - The username to use when authenticating with the HTTP server.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:password => String.t(),
:username => String.t()
}
field(:password)
field(:username)
end
defimpl Poison.Decoder, for: GoogleApi.Monitoring.V3.Model.BasicAuthentication do
def decode(value, options) do
GoogleApi.Monitoring.V3.Model.BasicAuthentication.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Monitoring.V3.Model.BasicAuthentication do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.24 | 225 | 0.738411 |
735d2682a26c1d9c82e75f8214bb0b10e7e0c4d2 | 4,085 | exs | Elixir | mix.exs | ROARforGood/exzeitable | 6126abf96d10ecd79bc413909e868c4d7999660c | [
"MIT"
] | null | null | null | mix.exs | ROARforGood/exzeitable | 6126abf96d10ecd79bc413909e868c4d7999660c | [
"MIT"
] | null | null | null | mix.exs | ROARforGood/exzeitable | 6126abf96d10ecd79bc413909e868c4d7999660c | [
"MIT"
] | null | null | null | defmodule Exzeitable.MixProject do
use Mix.Project
@version "0.5.1"
def project do
[
app: :exzeitable,
version: @version,
elixir: "~> 1.12",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
build_embedded: Mix.env() == :prod,
aliases: aliases(),
deps: deps(),
test_coverage: [tool: ExCoveralls],
dialyzer: [
flags: [
:no_behaviours,
:no_contracts,
:no_fail_call,
:no_fun_app,
:no_improper_lists,
:no_match,
:no_missing_calls,
:no_opaque,
:no_return,
:no_undefined_callbacks,
:no_unused,
:unknown,
:unmatched_returns
]
],
# Hex
description: "Dynamically updating, searchable, sortable datatables with Phoenix LiveView",
package: package(),
# Docs
name: "Exzeitable",
docs: docs()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
if Mix.env() == :prod do
[
extra_applications: [:logger, :postgrex, :ecto]
]
else
[
mod: {Exzeitable.Application, []},
extra_applications: [:logger, :postgrex, :ecto, :timex, :runtime_tools]
]
end
end
defp package do
[
maintainers: ["Alan Vardy"],
licenses: ["MIT"],
links: %{github: "https://github.com/alanvardy/exzeitable"},
files: [
"lib/exzeitable.ex",
"lib/exzeitable",
"mix.exs",
"README.md"
]
]
end
defp docs do
[
source_ref: "v#{@version}",
main: "README",
canonical: "http://hexdocs.pm/exzeitable",
source_url: "https://github.com/alanvardy/exzeitable",
logo: "screenshot.png",
filter_prefix: "Exzeitable",
extras: [
"README.md": [filename: "README"],
"CHANGELOG.md": [filename: "CHANGELOG"],
"CSS.md": [filename: "CSS"]
]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.6.2"},
{:phoenix_ecto, "~> 4.4"},
{:ecto_sql, "~> 3.6"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 3.0"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:phoenix_live_view, "~> 0.16.0"},
{:floki, ">= 0.30.0", only: :test},
{:phoenix_live_dashboard, "~> 0.5"},
{:esbuild, "~> 0.2", runtime: Mix.env() == :dev},
{:swoosh, "~> 1.3"},
{:telemetry_metrics, "~> 0.6"},
{:telemetry_poller, "~> 1.0"},
{:gettext, "~> 0.18"},
{:jason, "~> 1.2"},
{:plug_cowboy, "~> 2.5"},
{:timex, "~> 3.5", only: [:dev, :test, :systemtest]},
{:ex_check, "~>0.12", only: :dev, runtime: false},
{:credo, "~> 1.5", only: [:dev, :test], runtime: false},
{:dialyxir, "~> 1.1", only: [:dev, :test], runtime: false},
{:excoveralls, "~> 0.10", only: :test, runtime: false},
{:ex_doc, "~> 0.21", only: :dev, runtime: false},
{:inch_ex, github: "rrrene/inch_ex", only: [:dev, :test]}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to install project dependencies and perform other setup tasks, run:
#
# $ mix setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
setup: ["deps.get", "ecto.setup"],
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate --quiet", "test"],
"assets.deploy": ["esbuild default --minify", "phx.digest"],
"cypress.open": ["cmd ./cypress-open.sh"],
"cypress.run": ["cmd ./cypress-run.sh"]
]
end
end
| 28.172414 | 97 | 0.543696 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.