hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f7bb62a623d84e2c3f4cb06318e6fb767f6fd136 | 3,330 | ex | Elixir | lib/elixir_ecommerce/product.ex | abmBispo/elixir-ecommerce | 0507f7621d68ba8f0f65409a1a503683b7c0d37b | [
"MIT"
] | 4 | 2020-05-29T03:33:02.000Z | 2021-08-21T23:01:48.000Z | lib/elixir_ecommerce/product.ex | abmBispo/elixir-ecommerce | 0507f7621d68ba8f0f65409a1a503683b7c0d37b | [
"MIT"
] | 2 | 2020-07-29T01:50:46.000Z | 2021-08-31T20:10:47.000Z | lib/elixir_ecommerce/product.ex | abmBispo/elixir-ecommerce | 0507f7621d68ba8f0f65409a1a503683b7c0d37b | [
"MIT"
] | 1 | 2022-03-21T18:13:21.000Z | 2022-03-21T18:13:21.000Z | defmodule ElixirEcommerce.Product do
use Ecto.Schema
import Ecto.Query
import Ecto.Changeset
alias ElixirEcommerce.{
ProductImages,
Repo,
Department,
Product,
ProductImages,
ElasticsearchCluster,
ProductAttribute
}
@required_fields [:name, :amount, :price, :department, :description]
@cast_fields [:name, :amount, :price, :description]
schema "products" do
field :name, :string
field :description, :string
field :amount, :integer
field :price, :integer
belongs_to :department, Department
has_many :images, ProductImages, on_delete: :delete_all
has_many :attributes, ProductAttribute, on_delete: :delete_all
timestamps()
end
@doc false
def changeset(product, attrs) when is_map_key(attrs, :department) do
product
|> cast(attrs, @cast_fields)
|> put_assoc(:department, attrs[:department])
|> validate_required(@required_fields)
end
def changeset(product, attrs) do
product
|> cast(attrs, @cast_fields)
|> validate_required(@required_fields)
end
def create(attrs) do
%Product{}
|> Product.changeset(attrs)
|> Repo.insert()
|> create_product_images(attrs)
|> create_attributes(attrs)
end
def all(params \\ %{page: 1, page_size: 9}) do
Product
|> preload(:department)
|> preload(:images)
|> order_by(desc: :inserted_at)
|> Repo.paginate(params)
end
def retrieve(ids) when is_list(ids) do
Product
|> preload(:department)
|> where([p], p.id in ^ids)
|> Repo.all
end
def retrieve(id) when is_number(id) do
Product
|> preload(:images)
|> Repo.get!(id)
end
def retrieve(id) when is_binary(id) do
Product
|> preload(:images)
|> Repo.get!(id)
end
def retrieve(params) do
Product
|> preload(:department)
|> where(^params)
end
def text_search(query) do
{:ok, %{ "hits" => %{ "hits" => search }} } =
ElasticsearchCluster
|> Elasticsearch.post("/products/_doc/_search",
%{
query: %{
multi_match: %{
query: query,
fields: [:name, :description],
fuzziness: "AUTO"
}
}
}
)
search
|> Enum.map(fn product -> product["_id"] end)
|> Product.retrieve()
end
def update(%Product{} = product, attrs \\ %{}) do
product
|> Product.changeset(attrs)
|> Repo.update()
end
def delete(%Product{} = product) do
Repo.delete(product)
end
defp create_product_images({:error, errors}, attrs), do: {:error, errors}
defp create_product_images({:ok, product}, attrs) do
if is_map_key(attrs, :images) do
Enum.each attrs[:images], fn ({_, image}) ->
{:ok, _} = ProductImages.create(image: image, product: product)
end
end
{:ok, product}
end
defp create_attributes({:error, errors}, attrs), do: {:error, errors, attrs}
defp create_attributes({:ok, product}, attrs) do
if is_map_key(attrs, :attributes) do
Enum.each attrs[:attributes], fn ({_, attribute}) ->
{:ok, _} = ProductAttribute.create(title: attribute["title"], description: attribute["description"], product: product)
end
end
{:ok, product, attrs}
end
end
| 24.485294 | 126 | 0.608408 |
f7bb735ecfbd0483bfeb2dd35f4315cc03e00cad | 5,718 | exs | Elixir | test/ja_serializer/builder/pagination_links_test.exs | mbta/ja_serializer | efb1d4489809e31e4b54b4af9e85f0b3ceeb650b | [
"Apache-2.0"
] | null | null | null | test/ja_serializer/builder/pagination_links_test.exs | mbta/ja_serializer | efb1d4489809e31e4b54b4af9e85f0b3ceeb650b | [
"Apache-2.0"
] | 1 | 2021-06-25T13:28:34.000Z | 2021-06-25T13:28:34.000Z | test/ja_serializer/builder/pagination_links_test.exs | mbta/ja_serializer | efb1d4489809e31e4b54b4af9e85f0b3ceeb650b | [
"Apache-2.0"
] | null | null | null | defmodule JaSerializer.Builder.PaginationLinksTest do
use ExUnit.Case
alias JaSerializer.Builder.PaginationLinks
setup do
on_exit(fn ->
Application.delete_env(:ja_serializer, :page_key)
Application.delete_env(:ja_serializer, :page_base_url)
Application.delete_env(:ja_serializer, :page_size_key)
Application.delete_env(:ja_serializer, :page_number_key)
end)
end
test "pagination" do
data = %{
number: 10,
size: 20,
total: 30
}
conn = %Plug.Conn{query_params: %{}}
links = PaginationLinks.build(data, conn)
assert URI.decode(links[:first]) == "?page[number]=1&page[size]=20"
assert URI.decode(links[:prev]) == "?page[number]=9&page[size]=20"
assert URI.decode(links[:next]) == "?page[number]=11&page[size]=20"
assert URI.decode(links[:last]) == "?page[number]=30&page[size]=20"
end
test "pagination keys are configurable" do
Application.put_env(:ja_serializer, :page_key, "pages")
Application.put_env(:ja_serializer, :page_number_key, "offset")
Application.put_env(:ja_serializer, :page_size_key, "limit")
data = %{
number: 10,
size: 20,
total: 30
}
conn = %Plug.Conn{query_params: %{}}
links = PaginationLinks.build(data, conn)
assert URI.decode(links[:first]) == "?pages[limit]=20&pages[offset]=1"
assert URI.decode(links[:prev]) == "?pages[limit]=20&pages[offset]=9"
assert URI.decode(links[:next]) == "?pages[limit]=20&pages[offset]=11"
assert URI.decode(links[:last]) == "?pages[limit]=20&pages[offset]=30"
end
test "when current page is first, do not include first, prev links" do
data = %{
number: 1,
size: 20,
total: 30
}
conn = %Plug.Conn{query_params: %{}}
links =
data
|> PaginationLinks.build(conn)
|> Map.keys()
|> Enum.sort()
assert Enum.sort([:self, :last, :next]) == links
end
test "when current page is in the middle, includes all links" do
data = %{
number: 10,
size: 20,
total: 30
}
conn = %Plug.Conn{query_params: %{}}
links =
data
|> PaginationLinks.build(conn)
|> Map.keys()
|> Enum.sort()
assert Enum.sort([:self, :first, :prev, :last, :next]) == links
end
test "when current page is the last, do not include last, next links" do
data = %{
number: 30,
size: 20,
total: 30
}
conn = %Plug.Conn{query_params: %{}}
links =
data
|> PaginationLinks.build(conn)
|> Map.keys()
|> Enum.sort()
assert Enum.sort([:self, :first, :prev]) == links
end
test "when result contains no data, include only self link" do
data = %{
number: 1,
size: 20,
total: 0
}
conn = %Plug.Conn{query_params: %{}}
links =
data
|> PaginationLinks.build(conn)
|> Map.keys()
|> Enum.sort()
assert Enum.sort([:self]) == links
end
test "url is taken from current conn url, params forwarded" do
data = %{
number: 30,
size: 20,
total: 30
}
conn = %Plug.Conn{
query_params: %{"filter" => %{"foo" => "bar"}},
request_path: "/api/v1/posts/"
}
links = PaginationLinks.build(data, conn)
assert links[:first] ==
"/api/v1/posts/?filter[foo]=bar&page[number]=1&page[size]=20"
end
test "url opts override conn url, old page params ignored" do
data = %{
number: 30,
size: 20,
total: 30,
base_url: "/api/v2/posts"
}
conn = %Plug.Conn{
query_params: %{"page" => %{"page" => 1}},
request_path: "/api/v1/posts/"
}
links = PaginationLinks.build(data, conn)
assert links[:first] == "/api/v2/posts?page[number]=1&page[size]=20"
end
test "url opts override conn url, old page params ignored when page_key is nil" do
Application.put_env(:ja_serializer, :page_key, nil)
data = %{
number: 1,
size: 20,
total: 30
}
conn = %Plug.Conn{
query_params: %{"number" => 4}
}
links = PaginationLinks.build(data, conn)
assert links[:self] == "?number=1&size=20"
end
test "base_url can be configured globally" do
Application.put_env(
:ja_serializer,
:page_base_url,
"http://api.example.com"
)
data = %{
number: 10,
size: 20,
total: 30
}
conn = %Plug.Conn{query_params: %{}}
links = PaginationLinks.build(data, conn)
assert URI.decode(links[:first]) ==
"http://api.example.com?page[number]=1&page[size]=20"
assert URI.decode(links[:prev]) ==
"http://api.example.com?page[number]=9&page[size]=20"
assert URI.decode(links[:next]) ==
"http://api.example.com?page[number]=11&page[size]=20"
assert URI.decode(links[:last]) ==
"http://api.example.com?page[number]=30&page[size]=20"
end
test "base_url can be overridden locally" do
Application.put_env(
:ja_serializer,
:page_base_url,
"http://api.example.com"
)
data = %{
number: 10,
size: 20,
total: 30,
base_url: "http://api2.example.com"
}
conn = %Plug.Conn{query_params: %{}}
links = PaginationLinks.build(data, conn)
assert URI.decode(links[:first]) ==
"http://api2.example.com?page[number]=1&page[size]=20"
assert URI.decode(links[:prev]) ==
"http://api2.example.com?page[number]=9&page[size]=20"
assert URI.decode(links[:next]) ==
"http://api2.example.com?page[number]=11&page[size]=20"
assert URI.decode(links[:last]) ==
"http://api2.example.com?page[number]=30&page[size]=20"
end
end
| 24.435897 | 84 | 0.588492 |
f7bb740d003ba8d179fba00d39b4e62f69261b40 | 12,863 | ex | Elixir | lib/mapper/cp437.ex | nikneroz/exconv | 31a9f424462f88024af3afb32d2cb80160f07ebf | [
"MIT"
] | null | null | null | lib/mapper/cp437.ex | nikneroz/exconv | 31a9f424462f88024af3afb32d2cb80160f07ebf | [
"MIT"
] | 1 | 2020-07-16T09:38:33.000Z | 2020-07-16T09:38:33.000Z | lib/mapper/cp437.ex | nikneroz/exconv | 31a9f424462f88024af3afb32d2cb80160f07ebf | [
"MIT"
] | null | null | null | defmodule Exconv.Mapper.Cp437 do
def to_unicode(255), do: 160 # <<194, 160>> | " "
def to_unicode(254), do: 9632 # <<226, 150, 160>> | "■"
def to_unicode(253), do: 178 # <<194, 178>> | "²"
def to_unicode(252), do: 8319 # <<226, 129, 191>> | "ⁿ"
def to_unicode(251), do: 8730 # <<226, 136, 154>> | "√"
def to_unicode(250), do: 183 # <<194, 183>> | "·"
def to_unicode(249), do: 8729 # <<226, 136, 153>> | "∙"
def to_unicode(248), do: 176 # <<194, 176>> | "°"
def to_unicode(247), do: 8776 # <<226, 137, 136>> | "≈"
def to_unicode(246), do: 247 # <<195, 183>> | "÷"
def to_unicode(245), do: 8993 # <<226, 140, 161>> | "⌡"
def to_unicode(244), do: 8992 # <<226, 140, 160>> | "⌠"
def to_unicode(243), do: 8804 # <<226, 137, 164>> | "≤"
def to_unicode(242), do: 8805 # <<226, 137, 165>> | "≥"
def to_unicode(241), do: 177 # <<194, 177>> | "±"
def to_unicode(240), do: 8801 # <<226, 137, 161>> | "≡"
def to_unicode(239), do: 8745 # <<226, 136, 169>> | "∩"
def to_unicode(238), do: 949 # <<206, 181>> | "ε"
def to_unicode(237), do: 966 # <<207, 134>> | "φ"
def to_unicode(236), do: 8734 # <<226, 136, 158>> | "∞"
def to_unicode(235), do: 948 # <<206, 180>> | "δ"
def to_unicode(234), do: 937 # <<206, 169>> | "Ω"
def to_unicode(233), do: 920 # <<206, 152>> | "Θ"
def to_unicode(232), do: 934 # <<206, 166>> | "Φ"
def to_unicode(231), do: 964 # <<207, 132>> | "τ"
def to_unicode(230), do: 181 # <<194, 181>> | "µ"
def to_unicode(229), do: 963 # <<207, 131>> | "σ"
def to_unicode(228), do: 931 # <<206, 163>> | "Σ"
def to_unicode(227), do: 960 # <<207, 128>> | "π"
def to_unicode(226), do: 915 # <<206, 147>> | "Γ"
def to_unicode(225), do: 223 # <<195, 159>> | "ß"
def to_unicode(224), do: 945 # <<206, 177>> | "α"
def to_unicode(223), do: 9600 # <<226, 150, 128>> | "▀"
def to_unicode(222), do: 9616 # <<226, 150, 144>> | "▐"
def to_unicode(221), do: 9612 # <<226, 150, 140>> | "▌"
def to_unicode(220), do: 9604 # <<226, 150, 132>> | "▄"
def to_unicode(219), do: 9608 # <<226, 150, 136>> | "█"
def to_unicode(218), do: 9484 # <<226, 148, 140>> | "┌"
def to_unicode(217), do: 9496 # <<226, 148, 152>> | "┘"
def to_unicode(216), do: 9578 # <<226, 149, 170>> | "╪"
def to_unicode(215), do: 9579 # <<226, 149, 171>> | "╫"
def to_unicode(214), do: 9555 # <<226, 149, 147>> | "╓"
def to_unicode(213), do: 9554 # <<226, 149, 146>> | "╒"
def to_unicode(212), do: 9560 # <<226, 149, 152>> | "╘"
def to_unicode(211), do: 9561 # <<226, 149, 153>> | "╙"
def to_unicode(210), do: 9573 # <<226, 149, 165>> | "╥"
def to_unicode(209), do: 9572 # <<226, 149, 164>> | "╤"
def to_unicode(208), do: 9576 # <<226, 149, 168>> | "╨"
def to_unicode(207), do: 9575 # <<226, 149, 167>> | "╧"
def to_unicode(206), do: 9580 # <<226, 149, 172>> | "╬"
def to_unicode(205), do: 9552 # <<226, 149, 144>> | "═"
def to_unicode(204), do: 9568 # <<226, 149, 160>> | "╠"
def to_unicode(203), do: 9574 # <<226, 149, 166>> | "╦"
def to_unicode(202), do: 9577 # <<226, 149, 169>> | "╩"
def to_unicode(201), do: 9556 # <<226, 149, 148>> | "╔"
def to_unicode(200), do: 9562 # <<226, 149, 154>> | "╚"
def to_unicode(199), do: 9567 # <<226, 149, 159>> | "╟"
def to_unicode(198), do: 9566 # <<226, 149, 158>> | "╞"
def to_unicode(197), do: 9532 # <<226, 148, 188>> | "┼"
def to_unicode(196), do: 9472 # <<226, 148, 128>> | "─"
def to_unicode(195), do: 9500 # <<226, 148, 156>> | "├"
def to_unicode(194), do: 9516 # <<226, 148, 172>> | "┬"
def to_unicode(193), do: 9524 # <<226, 148, 180>> | "┴"
def to_unicode(192), do: 9492 # <<226, 148, 148>> | "└"
def to_unicode(191), do: 9488 # <<226, 148, 144>> | "┐"
def to_unicode(190), do: 9563 # <<226, 149, 155>> | "╛"
def to_unicode(189), do: 9564 # <<226, 149, 156>> | "╜"
def to_unicode(188), do: 9565 # <<226, 149, 157>> | "╝"
def to_unicode(187), do: 9559 # <<226, 149, 151>> | "╗"
def to_unicode(186), do: 9553 # <<226, 149, 145>> | "║"
def to_unicode(185), do: 9571 # <<226, 149, 163>> | "╣"
def to_unicode(184), do: 9557 # <<226, 149, 149>> | "╕"
def to_unicode(183), do: 9558 # <<226, 149, 150>> | "╖"
def to_unicode(182), do: 9570 # <<226, 149, 162>> | "╢"
def to_unicode(181), do: 9569 # <<226, 149, 161>> | "╡"
def to_unicode(180), do: 9508 # <<226, 148, 164>> | "┤"
def to_unicode(179), do: 9474 # <<226, 148, 130>> | "│"
def to_unicode(178), do: 9619 # <<226, 150, 147>> | "▓"
def to_unicode(177), do: 9618 # <<226, 150, 146>> | "▒"
def to_unicode(176), do: 9617 # <<226, 150, 145>> | "░"
def to_unicode(175), do: 187 # <<194, 187>> | "»"
def to_unicode(174), do: 171 # <<194, 171>> | "«"
def to_unicode(173), do: 161 # <<194, 161>> | "¡"
def to_unicode(172), do: 188 # <<194, 188>> | "¼"
def to_unicode(171), do: 189 # <<194, 189>> | "½"
def to_unicode(170), do: 172 # <<194, 172>> | "¬"
def to_unicode(169), do: 8976 # <<226, 140, 144>> | "⌐"
def to_unicode(168), do: 191 # <<194, 191>> | "¿"
def to_unicode(167), do: 186 # <<194, 186>> | "º"
def to_unicode(166), do: 170 # <<194, 170>> | "ª"
def to_unicode(165), do: 209 # <<195, 145>> | "Ñ"
def to_unicode(164), do: 241 # <<195, 177>> | "ñ"
def to_unicode(163), do: 250 # <<195, 186>> | "ú"
def to_unicode(162), do: 243 # <<195, 179>> | "ó"
def to_unicode(161), do: 237 # <<195, 173>> | "í"
def to_unicode(160), do: 225 # <<195, 161>> | "á"
def to_unicode(159), do: 402 # <<198, 146>> | "ƒ"
def to_unicode(158), do: 8359 # <<226, 130, 167>> | "₧"
def to_unicode(157), do: 165 # <<194, 165>> | "¥"
def to_unicode(156), do: 163 # <<194, 163>> | "£"
def to_unicode(155), do: 162 # <<194, 162>> | "¢"
def to_unicode(154), do: 220 # <<195, 156>> | "Ü"
def to_unicode(153), do: 214 # <<195, 150>> | "Ö"
def to_unicode(152), do: 255 # <<195, 191>> | "ÿ"
def to_unicode(151), do: 249 # <<195, 185>> | "ù"
def to_unicode(150), do: 251 # <<195, 187>> | "û"
def to_unicode(149), do: 242 # <<195, 178>> | "ò"
def to_unicode(148), do: 246 # <<195, 182>> | "ö"
def to_unicode(147), do: 244 # <<195, 180>> | "ô"
def to_unicode(146), do: 198 # <<195, 134>> | "Æ"
def to_unicode(145), do: 230 # <<195, 166>> | "æ"
def to_unicode(144), do: 201 # <<195, 137>> | "É"
def to_unicode(143), do: 197 # <<195, 133>> | "Å"
def to_unicode(142), do: 196 # <<195, 132>> | "Ä"
def to_unicode(141), do: 236 # <<195, 172>> | "ì"
def to_unicode(140), do: 238 # <<195, 174>> | "î"
def to_unicode(139), do: 239 # <<195, 175>> | "ï"
def to_unicode(138), do: 232 # <<195, 168>> | "è"
def to_unicode(137), do: 235 # <<195, 171>> | "ë"
def to_unicode(136), do: 234 # <<195, 170>> | "ê"
def to_unicode(135), do: 231 # <<195, 167>> | "ç"
def to_unicode(134), do: 229 # <<195, 165>> | "å"
def to_unicode(133), do: 224 # <<195, 160>> | "à"
def to_unicode(132), do: 228 # <<195, 164>> | "ä"
def to_unicode(131), do: 226 # <<195, 162>> | "â"
def to_unicode(130), do: 233 # <<195, 169>> | "é"
def to_unicode(129), do: 252 # <<195, 188>> | "ü"
def to_unicode(128), do: 199 # <<195, 135>> | "Ç"
def to_unicode(127), do: 127 # <<127>> | "\d"
def to_unicode(126), do: 126 # <<126>> | "~"
def to_unicode(125), do: 125 # <<125>> | "}"
def to_unicode(124), do: 124 # <<124>> | "|"
def to_unicode(123), do: 123 # <<123>> | "{"
def to_unicode(122), do: 122 # <<122>> | "z"
def to_unicode(121), do: 121 # <<121>> | "y"
def to_unicode(120), do: 120 # <<120>> | "x"
def to_unicode(119), do: 119 # <<119>> | "w"
def to_unicode(118), do: 118 # <<118>> | "v"
def to_unicode(117), do: 117 # <<117>> | "u"
def to_unicode(116), do: 116 # <<116>> | "t"
def to_unicode(115), do: 115 # <<115>> | "s"
def to_unicode(114), do: 114 # <<114>> | "r"
def to_unicode(113), do: 113 # <<113>> | "q"
def to_unicode(112), do: 112 # <<112>> | "p"
def to_unicode(111), do: 111 # <<111>> | "o"
def to_unicode(110), do: 110 # <<110>> | "n"
def to_unicode(109), do: 109 # <<109>> | "m"
def to_unicode(108), do: 108 # <<108>> | "l"
def to_unicode(107), do: 107 # <<107>> | "k"
def to_unicode(106), do: 106 # <<106>> | "j"
def to_unicode(105), do: 105 # <<105>> | "i"
def to_unicode(104), do: 104 # <<104>> | "h"
def to_unicode(103), do: 103 # <<103>> | "g"
def to_unicode(102), do: 102 # <<102>> | "f"
def to_unicode(101), do: 101 # <<101>> | "e"
def to_unicode(100), do: 100 # <<100>> | "d"
def to_unicode(99), do: 99 # <<99>> | "c"
def to_unicode(98), do: 98 # <<98>> | "b"
def to_unicode(97), do: 97 # <<97>> | "a"
def to_unicode(96), do: 96 # <<96>> | "`"
def to_unicode(95), do: 95 # <<95>> | "_"
def to_unicode(94), do: 94 # <<94>> | "^"
def to_unicode(93), do: 93 # <<93>> | "]"
def to_unicode(92), do: 92 # <<92>> | "\\"
def to_unicode(91), do: 91 # <<91>> | "["
def to_unicode(90), do: 90 # <<90>> | "Z"
def to_unicode(89), do: 89 # <<89>> | "Y"
def to_unicode(88), do: 88 # <<88>> | "X"
def to_unicode(87), do: 87 # <<87>> | "W"
def to_unicode(86), do: 86 # <<86>> | "V"
def to_unicode(85), do: 85 # <<85>> | "U"
def to_unicode(84), do: 84 # <<84>> | "T"
def to_unicode(83), do: 83 # <<83>> | "S"
def to_unicode(82), do: 82 # <<82>> | "R"
def to_unicode(81), do: 81 # <<81>> | "Q"
def to_unicode(80), do: 80 # <<80>> | "P"
def to_unicode(79), do: 79 # <<79>> | "O"
def to_unicode(78), do: 78 # <<78>> | "N"
def to_unicode(77), do: 77 # <<77>> | "M"
def to_unicode(76), do: 76 # <<76>> | "L"
def to_unicode(75), do: 75 # <<75>> | "K"
def to_unicode(74), do: 74 # <<74>> | "J"
def to_unicode(73), do: 73 # <<73>> | "I"
def to_unicode(72), do: 72 # <<72>> | "H"
def to_unicode(71), do: 71 # <<71>> | "G"
def to_unicode(70), do: 70 # <<70>> | "F"
def to_unicode(69), do: 69 # <<69>> | "E"
def to_unicode(68), do: 68 # <<68>> | "D"
def to_unicode(67), do: 67 # <<67>> | "C"
def to_unicode(66), do: 66 # <<66>> | "B"
def to_unicode(65), do: 65 # <<65>> | "A"
def to_unicode(64), do: 64 # <<64>> | "@"
def to_unicode(63), do: 63 # <<63>> | "?"
def to_unicode(62), do: 62 # <<62>> | ">"
def to_unicode(61), do: 61 # <<61>> | "="
def to_unicode(60), do: 60 # <<60>> | "<"
def to_unicode(59), do: 59 # <<59>> | ";"
def to_unicode(58), do: 58 # <<58>> | ":"
def to_unicode(57), do: 57 # <<57>> | "9"
def to_unicode(56), do: 56 # <<56>> | "8"
def to_unicode(55), do: 55 # <<55>> | "7"
def to_unicode(54), do: 54 # <<54>> | "6"
def to_unicode(53), do: 53 # <<53>> | "5"
def to_unicode(52), do: 52 # <<52>> | "4"
def to_unicode(51), do: 51 # <<51>> | "3"
def to_unicode(50), do: 50 # <<50>> | "2"
def to_unicode(49), do: 49 # <<49>> | "1"
def to_unicode(48), do: 48 # <<48>> | "0"
def to_unicode(47), do: 47 # <<47>> | "/"
def to_unicode(46), do: 46 # <<46>> | "."
def to_unicode(45), do: 45 # <<45>> | "-"
def to_unicode(44), do: 44 # <<44>> | ","
def to_unicode(43), do: 43 # <<43>> | "+"
def to_unicode(42), do: 42 # <<42>> | "*"
def to_unicode(41), do: 41 # <<41>> | ")"
def to_unicode(40), do: 40 # <<40>> | "("
def to_unicode(39), do: 39 # <<39>> | "'"
def to_unicode(38), do: 38 # <<38>> | "&"
def to_unicode(37), do: 37 # <<37>> | "%"
def to_unicode(36), do: 36 # <<36>> | "$"
def to_unicode(35), do: 35 # <<35>> | "#"
def to_unicode(34), do: 34 # <<34>> | "\""
def to_unicode(33), do: 33 # <<33>> | "!"
def to_unicode(32), do: 32 # <<32>> | " "
def to_unicode(31), do: nil # <<31>> | <<31>>
def to_unicode(30), do: nil # <<30>> | <<30>>
def to_unicode(29), do: nil # <<29>> | <<29>>
def to_unicode(28), do: nil # <<28>> | <<28>>
def to_unicode(27), do: 27 # <<27>> | "\e"
def to_unicode(26), do: nil # <<26>> | <<26>>
def to_unicode(25), do: nil # <<25>> | <<25>>
def to_unicode(24), do: nil # <<24>> | <<24>>
def to_unicode(23), do: nil # <<23>> | <<23>>
def to_unicode(22), do: nil # <<22>> | <<22>>
def to_unicode(21), do: nil # <<21>> | <<21>>
def to_unicode(20), do: nil # <<20>> | <<20>>
def to_unicode(19), do: nil # <<19>> | <<19>>
def to_unicode(18), do: nil # <<18>> | <<18>>
def to_unicode(17), do: nil # <<17>> | <<17>>
def to_unicode(16), do: nil # <<16>> | <<16>>
def to_unicode(15), do: nil # <<15>> | <<15>>
def to_unicode(14), do: nil # <<14>> | <<14>>
def to_unicode(13), do: 13 # <<13>> | "\r"
def to_unicode(12), do: 12 # <<12>> | "\f"
def to_unicode(11), do: 11 # <<11>> | "\v"
def to_unicode(10), do: 10 # <<92, 110>> | "\\n"
def to_unicode(9), do: 9 # <<9>> | "\t"
def to_unicode(8), do: 8 # <<8>> | "\b"
def to_unicode(7), do: 7 # <<7>> | "\a"
def to_unicode(6), do: nil # <<6>> | <<6>>
def to_unicode(5), do: nil # <<5>> | <<5>>
def to_unicode(4), do: nil # <<4>> | <<4>>
def to_unicode(3), do: nil # <<3>> | <<3>>
def to_unicode(2), do: nil # <<2>> | <<2>>
def to_unicode(1), do: nil # <<1>> | <<1>>
def to_unicode(0), do: nil # <<0>> | <<0>>
end | 49.856589 | 57 | 0.519163 |
f7bb76c404004ae46c274841e6f6bc1f0ce360c1 | 28,870 | exs | Elixir | lib/elixir/test/elixir/calendar/datetime_test.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 19,291 | 2015-01-01T02:42:49.000Z | 2022-03-31T21:01:40.000Z | lib/elixir/test/elixir/calendar/datetime_test.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 8,082 | 2015-01-01T04:16:23.000Z | 2022-03-31T22:08:02.000Z | lib/elixir/test/elixir/calendar/datetime_test.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 3,472 | 2015-01-03T04:11:56.000Z | 2022-03-29T02:07:30.000Z | Code.require_file("../test_helper.exs", __DIR__)
Code.require_file("holocene.exs", __DIR__)
Code.require_file("fakes.exs", __DIR__)
defmodule DateTimeTest do
use ExUnit.Case
doctest DateTime
test "sigil_U" do
assert ~U[2000-01-01T12:34:56Z] ==
%DateTime{
calendar: Calendar.ISO,
year: 2000,
month: 1,
day: 1,
hour: 12,
minute: 34,
second: 56,
std_offset: 0,
utc_offset: 0,
time_zone: "Etc/UTC",
zone_abbr: "UTC"
}
assert ~U[2000-01-01T12:34:56+00:00 Calendar.Holocene] ==
%DateTime{
calendar: Calendar.Holocene,
year: 2000,
month: 1,
day: 1,
hour: 12,
minute: 34,
second: 56,
std_offset: 0,
utc_offset: 0,
time_zone: "Etc/UTC",
zone_abbr: "UTC"
}
assert ~U[2000-01-01 12:34:56+00:00] ==
%DateTime{
calendar: Calendar.ISO,
year: 2000,
month: 1,
day: 1,
hour: 12,
minute: 34,
second: 56,
std_offset: 0,
utc_offset: 0,
time_zone: "Etc/UTC",
zone_abbr: "UTC"
}
assert ~U[2000-01-01 12:34:56Z Calendar.Holocene] ==
%DateTime{
calendar: Calendar.Holocene,
year: 2000,
month: 1,
day: 1,
hour: 12,
minute: 34,
second: 56,
std_offset: 0,
utc_offset: 0,
time_zone: "Etc/UTC",
zone_abbr: "UTC"
}
assert_raise ArgumentError,
~s/cannot parse "2001-50-50T12:34:56Z" as UTC DateTime for Calendar.ISO, reason: :invalid_date/,
fn -> Code.eval_string("~U[2001-50-50T12:34:56Z]") end
assert_raise ArgumentError,
~s/cannot parse "2001-01-01T12:34:65Z" as UTC DateTime for Calendar.ISO, reason: :invalid_time/,
fn -> Code.eval_string("~U[2001-01-01T12:34:65Z]") end
assert_raise ArgumentError,
~s/cannot parse "2001-01-01T12:34:56\+01:00" as UTC DateTime for Calendar.ISO, reason: :non_utc_offset/,
fn -> Code.eval_string("~U[2001-01-01T12:34:56+01:00]") end
assert_raise ArgumentError,
~s/cannot parse "2001-01-01 12:34:56Z notalias" as UTC DateTime for Calendar.ISO, reason: :invalid_format/,
fn -> Code.eval_string("~U[2001-01-01 12:34:56Z notalias]") end
assert_raise ArgumentError,
~s/cannot parse "2001-01-01T12:34:56Z notalias" as UTC DateTime for Calendar.ISO, reason: :invalid_format/,
fn -> Code.eval_string("~U[2001-01-01T12:34:56Z notalias]") end
assert_raise ArgumentError,
~s/cannot parse "2001-50-50T12:34:56Z" as UTC DateTime for Calendar.Holocene, reason: :invalid_date/,
fn -> Code.eval_string("~U[2001-50-50T12:34:56Z Calendar.Holocene]") end
assert_raise ArgumentError,
~s/cannot parse "2001-01-01T12:34:65Z" as UTC DateTime for Calendar.Holocene, reason: :invalid_time/,
fn -> Code.eval_string("~U[2001-01-01T12:34:65Z Calendar.Holocene]") end
assert_raise ArgumentError,
~s/cannot parse "2001-01-01T12:34:56+01:00 Calendar.Holocene" as UTC DateTime for Calendar.Holocene, reason: :non_utc_offset/,
fn -> Code.eval_string("~U[2001-01-01T12:34:56+01:00 Calendar.Holocene]") end
assert_raise UndefinedFunctionError, fn ->
Code.eval_string("~U[2001-01-01 12:34:56 UnknownCalendar]")
end
assert_raise UndefinedFunctionError, fn ->
Code.eval_string("~U[2001-01-01T12:34:56 UnknownCalendar]")
end
end
test "to_string/1" do
datetime = %DateTime{
year: 2000,
month: 2,
day: 29,
zone_abbr: "BRM",
hour: 23,
minute: 0,
second: 7,
microsecond: {0, 0},
utc_offset: -12600,
std_offset: 3600,
time_zone: "Brazil/Manaus"
}
assert to_string(datetime) == "2000-02-29 23:00:07-02:30 BRM Brazil/Manaus"
assert DateTime.to_string(datetime) == "2000-02-29 23:00:07-02:30 BRM Brazil/Manaus"
assert DateTime.to_string(Map.from_struct(datetime)) ==
"2000-02-29 23:00:07-02:30 BRM Brazil/Manaus"
assert to_string(%{datetime | calendar: FakeCalendar}) ==
"29/2/2000F23::0::7 Brazil/Manaus BRM -12600 3600"
assert DateTime.to_string(%{datetime | calendar: FakeCalendar}) ==
"29/2/2000F23::0::7 Brazil/Manaus BRM -12600 3600"
end
test "inspect/1" do
utc_datetime = ~U[2000-01-01 23:00:07.005Z]
assert inspect(utc_datetime) == "~U[2000-01-01 23:00:07.005Z]"
assert inspect(%{utc_datetime | calendar: FakeCalendar}) ==
"~U[1/1/2000F23::0::7 Etc/UTC UTC 0 0 FakeCalendar]"
datetime = %DateTime{
year: 2000,
month: 2,
day: 29,
zone_abbr: "BRM",
hour: 23,
minute: 0,
second: 7,
microsecond: {0, 0},
utc_offset: -12600,
std_offset: 3600,
time_zone: "Brazil/Manaus"
}
assert inspect(datetime) == "#DateTime<2000-02-29 23:00:07-02:30 BRM Brazil/Manaus>"
assert inspect(%{datetime | calendar: FakeCalendar}) ==
"#DateTime<29/2/2000F23::0::7 Brazil/Manaus BRM -12600 3600 FakeCalendar>"
end
test "from_iso8601/1 handles positive and negative offsets" do
assert DateTime.from_iso8601("2015-01-24T09:50:07-10:00") |> elem(1) ==
%DateTime{
microsecond: {0, 0},
month: 1,
std_offset: 0,
time_zone: "Etc/UTC",
utc_offset: 0,
year: 2015,
zone_abbr: "UTC",
day: 24,
hour: 19,
minute: 50,
second: 7
}
assert DateTime.from_iso8601("2015-01-24T09:50:07+10:00") |> elem(1) ==
%DateTime{
microsecond: {0, 0},
month: 1,
std_offset: 0,
time_zone: "Etc/UTC",
utc_offset: 0,
year: 2015,
zone_abbr: "UTC",
day: 23,
hour: 23,
minute: 50,
second: 7
}
assert DateTime.from_iso8601("0000-01-01T01:22:07+10:30") |> elem(1) ==
%DateTime{
microsecond: {0, 0},
month: 12,
std_offset: 0,
time_zone: "Etc/UTC",
utc_offset: 0,
year: -1,
zone_abbr: "UTC",
day: 31,
hour: 14,
minute: 52,
second: 7
}
end
test "from_iso8601/1 handles negative dates" do
assert DateTime.from_iso8601("-2015-01-24T09:50:07-10:00") |> elem(1) ==
%DateTime{
microsecond: {0, 0},
month: 1,
std_offset: 0,
time_zone: "Etc/UTC",
utc_offset: 0,
year: -2015,
zone_abbr: "UTC",
day: 24,
hour: 19,
minute: 50,
second: 7
}
assert DateTime.from_iso8601("-2015-01-24T09:50:07+10:00") |> elem(1) ==
%DateTime{
microsecond: {0, 0},
month: 1,
std_offset: 0,
time_zone: "Etc/UTC",
utc_offset: 0,
year: -2015,
zone_abbr: "UTC",
day: 23,
hour: 23,
minute: 50,
second: 7
}
assert DateTime.from_iso8601("-0001-01-01T01:22:07+10:30") |> elem(1) ==
%DateTime{
microsecond: {0, 0},
month: 12,
std_offset: 0,
time_zone: "Etc/UTC",
utc_offset: 0,
year: -2,
zone_abbr: "UTC",
day: 31,
hour: 14,
minute: 52,
second: 7
}
assert DateTime.from_iso8601("-0001-01-01T01:22:07-10:30") |> elem(1) ==
%DateTime{
microsecond: {0, 0},
month: 1,
std_offset: 0,
time_zone: "Etc/UTC",
utc_offset: 0,
year: -1,
zone_abbr: "UTC",
day: 1,
hour: 11,
minute: 52,
second: 7
}
assert DateTime.from_iso8601("-0001-12-31T23:22:07-10:30") |> elem(1) ==
%DateTime{
microsecond: {0, 0},
month: 1,
std_offset: 0,
time_zone: "Etc/UTC",
utc_offset: 0,
year: 0,
zone_abbr: "UTC",
day: 1,
hour: 9,
minute: 52,
second: 7
}
end
test "from_iso8601 handles invalid date, time, formats correctly" do
assert DateTime.from_iso8601("2015-01-23T23:50:07") == {:error, :missing_offset}
assert DateTime.from_iso8601("2015-01-23 23:50:61") == {:error, :invalid_time}
assert DateTime.from_iso8601("2015-01-32 23:50:07") == {:error, :invalid_date}
assert DateTime.from_iso8601("2015-01-23 23:50:07A") == {:error, :invalid_format}
assert DateTime.from_iso8601("2015-01-23T23:50:07.123-00:60") == {:error, :invalid_format}
end
test "from_unix/2" do
min_datetime = %DateTime{
calendar: Calendar.ISO,
day: 1,
hour: 0,
microsecond: {0, 0},
minute: 0,
month: 1,
second: 0,
std_offset: 0,
time_zone: "Etc/UTC",
utc_offset: 0,
year: -9999,
zone_abbr: "UTC"
}
assert DateTime.from_unix(-377_705_116_800) == {:ok, min_datetime}
assert DateTime.from_unix(-377_705_116_800_000_001, :microsecond) ==
{:error, :invalid_unix_time}
assert DateTime.from_unix(143_256_036_886_856, 1024) ==
{:ok,
%DateTime{
calendar: Calendar.ISO,
day: 17,
hour: 7,
microsecond: {320_312, 6},
minute: 5,
month: 3,
second: 22,
std_offset: 0,
time_zone: "Etc/UTC",
utc_offset: 0,
year: 6403,
zone_abbr: "UTC"
}}
max_datetime = %DateTime{
calendar: Calendar.ISO,
day: 31,
hour: 23,
microsecond: {999_999, 6},
minute: 59,
month: 12,
second: 59,
std_offset: 0,
time_zone: "Etc/UTC",
utc_offset: 0,
year: 9999,
zone_abbr: "UTC"
}
assert DateTime.from_unix(253_402_300_799_999_999, :microsecond) == {:ok, max_datetime}
assert DateTime.from_unix(253_402_300_800) == {:error, :invalid_unix_time}
minus_datetime = %DateTime{
calendar: Calendar.ISO,
day: 31,
hour: 23,
microsecond: {999_999, 6},
minute: 59,
month: 12,
second: 59,
std_offset: 0,
time_zone: "Etc/UTC",
utc_offset: 0,
year: 1969,
zone_abbr: "UTC"
}
assert DateTime.from_unix(-1, :microsecond) == {:ok, minus_datetime}
assert_raise ArgumentError, fn ->
DateTime.from_unix(0, :unknown_atom)
end
assert_raise ArgumentError, fn ->
DateTime.from_unix(0, "invalid type")
end
end
test "from_unix!/2" do
# with Unix times back to 0 Gregorian seconds
datetime = %DateTime{
calendar: Calendar.ISO,
day: 1,
hour: 0,
microsecond: {0, 0},
minute: 0,
month: 1,
second: 0,
std_offset: 0,
time_zone: "Etc/UTC",
utc_offset: 0,
year: 0,
zone_abbr: "UTC"
}
assert DateTime.from_unix!(-62_167_219_200) == datetime
assert_raise ArgumentError, fn ->
DateTime.from_unix!(-377_705_116_801)
end
assert_raise ArgumentError, fn ->
DateTime.from_unix!(0, :unknown_atom)
end
assert_raise ArgumentError, fn ->
DateTime.from_unix!(0, "invalid type")
end
end
test "to_unix/2 works with Unix times back to 0 Gregorian seconds" do
# with Unix times back to 0 Gregorian seconds
gregorian_0 = %DateTime{
calendar: Calendar.ISO,
day: 1,
hour: 0,
microsecond: {0, 0},
minute: 0,
month: 1,
second: 0,
std_offset: 0,
time_zone: "Etc/UTC",
utc_offset: 0,
year: 0,
zone_abbr: "UTC"
}
assert DateTime.to_unix(gregorian_0) == -62_167_219_200
assert DateTime.to_unix(Map.from_struct(gregorian_0)) == -62_167_219_200
min_datetime = %DateTime{gregorian_0 | year: -9999}
assert DateTime.to_unix(min_datetime) == -377_705_116_800
end
test "compare/2" do
datetime1 = %DateTime{
year: 2000,
month: 2,
day: 29,
zone_abbr: "CET",
hour: 23,
minute: 0,
second: 7,
microsecond: {0, 0},
utc_offset: 3600,
std_offset: 0,
time_zone: "Europe/Warsaw"
}
datetime2 = %DateTime{
year: 2000,
month: 2,
day: 29,
zone_abbr: "AMT",
hour: 23,
minute: 0,
second: 7,
microsecond: {0, 0},
utc_offset: -14400,
std_offset: 0,
time_zone: "America/Manaus"
}
datetime3 = %DateTime{
year: -99,
month: 2,
day: 28,
zone_abbr: "AMT",
hour: 23,
minute: 0,
second: 7,
microsecond: {0, 0},
utc_offset: -14400,
std_offset: 0,
time_zone: "America/Manaus"
}
assert DateTime.compare(datetime1, datetime1) == :eq
assert DateTime.compare(datetime1, datetime2) == :lt
assert DateTime.compare(datetime2, datetime1) == :gt
assert DateTime.compare(datetime3, datetime3) == :eq
assert DateTime.compare(datetime2, datetime3) == :gt
assert DateTime.compare(datetime3, datetime1) == :lt
assert DateTime.compare(Map.from_struct(datetime3), Map.from_struct(datetime1)) == :lt
end
test "convert/2" do
datetime_iso = %DateTime{
year: 2000,
month: 2,
day: 29,
zone_abbr: "CET",
hour: 23,
minute: 0,
second: 7,
microsecond: {0, 0},
utc_offset: 3600,
std_offset: 0,
time_zone: "Europe/Warsaw"
}
datetime_hol = %DateTime{
year: 12000,
month: 2,
day: 29,
zone_abbr: "CET",
hour: 23,
minute: 0,
second: 7,
microsecond: {0, 0},
utc_offset: 3600,
std_offset: 0,
time_zone: "Europe/Warsaw",
calendar: Calendar.Holocene
}
assert DateTime.convert(datetime_iso, Calendar.Holocene) == {:ok, datetime_hol}
assert datetime_iso
|> DateTime.convert!(Calendar.Holocene)
|> DateTime.convert!(Calendar.ISO) == datetime_iso
assert %{datetime_iso | microsecond: {123, 6}}
|> DateTime.convert!(Calendar.Holocene)
|> DateTime.convert!(Calendar.ISO) == %{datetime_iso | microsecond: {123, 6}}
assert DateTime.convert(datetime_iso, FakeCalendar) == {:error, :incompatible_calendars}
# Test passing non-struct map when converting to different calendar returns DateTime struct
assert DateTime.convert(Map.from_struct(datetime_iso), Calendar.Holocene) ==
{:ok, datetime_hol}
# Test passing non-struct map when converting to same calendar returns DateTime struct
assert DateTime.convert(Map.from_struct(datetime_iso), Calendar.ISO) ==
{:ok, datetime_iso}
end
test "from_iso8601/1 with tz offsets" do
assert DateTime.from_iso8601("2017-06-02T14:00:00+01:00")
|> elem(1) ==
%DateTime{
year: 2017,
month: 6,
day: 2,
zone_abbr: "UTC",
hour: 13,
minute: 0,
second: 0,
microsecond: {0, 0},
utc_offset: 0,
std_offset: 0,
time_zone: "Etc/UTC"
}
assert DateTime.from_iso8601("2017-06-02T14:00:00-04:00")
|> elem(1) ==
%DateTime{
year: 2017,
month: 6,
day: 2,
zone_abbr: "UTC",
hour: 18,
minute: 0,
second: 0,
microsecond: {0, 0},
utc_offset: 0,
std_offset: 0,
time_zone: "Etc/UTC"
}
assert DateTime.from_iso8601("2017-06-02T14:00:00+0100")
|> elem(1) ==
%DateTime{
year: 2017,
month: 6,
day: 2,
zone_abbr: "UTC",
hour: 13,
minute: 0,
second: 0,
microsecond: {0, 0},
utc_offset: 0,
std_offset: 0,
time_zone: "Etc/UTC"
}
assert DateTime.from_iso8601("2017-06-02T14:00:00-0400")
|> elem(1) ==
%DateTime{
year: 2017,
month: 6,
day: 2,
zone_abbr: "UTC",
hour: 18,
minute: 0,
second: 0,
microsecond: {0, 0},
utc_offset: 0,
std_offset: 0,
time_zone: "Etc/UTC"
}
assert DateTime.from_iso8601("2017-06-02T14:00:00+01")
|> elem(1) ==
%DateTime{
year: 2017,
month: 6,
day: 2,
zone_abbr: "UTC",
hour: 13,
minute: 0,
second: 0,
microsecond: {0, 0},
utc_offset: 0,
std_offset: 0,
time_zone: "Etc/UTC"
}
assert DateTime.from_iso8601("2017-06-02T14:00:00-04")
|> elem(1) ==
%DateTime{
year: 2017,
month: 6,
day: 2,
zone_abbr: "UTC",
hour: 18,
minute: 0,
second: 0,
microsecond: {0, 0},
utc_offset: 0,
std_offset: 0,
time_zone: "Etc/UTC"
}
end
test "truncate/2" do
datetime = %DateTime{
year: 2017,
month: 11,
day: 6,
zone_abbr: "CET",
hour: 0,
minute: 6,
second: 23,
microsecond: {0, 0},
utc_offset: 3600,
std_offset: 0,
time_zone: "Europe/Paris"
}
datetime_map = Map.from_struct(datetime)
assert DateTime.truncate(%{datetime | microsecond: {123_456, 6}}, :microsecond) ==
%{datetime | microsecond: {123_456, 6}}
# A struct should be returned when passing a map.
assert DateTime.truncate(%{datetime_map | microsecond: {123_456, 6}}, :microsecond) ==
%{datetime | microsecond: {123_456, 6}}
assert DateTime.truncate(%{datetime | microsecond: {0, 0}}, :millisecond) ==
%{datetime | microsecond: {0, 0}}
assert DateTime.truncate(%{datetime | microsecond: {000_100, 6}}, :millisecond) ==
%{datetime | microsecond: {0, 3}}
assert DateTime.truncate(%{datetime | microsecond: {000_999, 6}}, :millisecond) ==
%{datetime | microsecond: {0, 3}}
assert DateTime.truncate(%{datetime | microsecond: {001_000, 6}}, :millisecond) ==
%{datetime | microsecond: {1000, 3}}
assert DateTime.truncate(%{datetime | microsecond: {001_200, 6}}, :millisecond) ==
%{datetime | microsecond: {1000, 3}}
assert DateTime.truncate(%{datetime | microsecond: {123_456, 6}}, :millisecond) ==
%{datetime | microsecond: {123_000, 3}}
assert DateTime.truncate(%{datetime | microsecond: {123_456, 6}}, :second) ==
%{datetime | microsecond: {0, 0}}
end
test "diff/2" do
dt1 = %DateTime{
year: 100,
month: 2,
day: 28,
zone_abbr: "CET",
hour: 23,
minute: 0,
second: 7,
microsecond: {0, 0},
utc_offset: 3600,
std_offset: 0,
time_zone: "Europe/Warsaw"
}
dt2 = %DateTime{
year: -0004,
month: 2,
day: 29,
zone_abbr: "CET",
hour: 23,
minute: 0,
second: 7,
microsecond: {0, 0},
utc_offset: 3600,
std_offset: 0,
time_zone: "Europe/Warsaw"
}
assert DateTime.diff(dt1, dt2) == 3_281_904_000
# Test with a non-struct map conforming to Calendar.datetime
assert DateTime.diff(Map.from_struct(dt1), Map.from_struct(dt2)) == 3_281_904_000
end
describe "from_naive" do
test "uses default time zone database from config" do
Calendar.put_time_zone_database(FakeTimeZoneDatabase)
assert DateTime.from_naive(
~N[2018-07-01 12:34:25.123456],
"Europe/Copenhagen",
FakeTimeZoneDatabase
) ==
{:ok,
%DateTime{
day: 1,
hour: 12,
microsecond: {123_456, 6},
minute: 34,
month: 7,
second: 25,
std_offset: 3600,
time_zone: "Europe/Copenhagen",
utc_offset: 3600,
year: 2018,
zone_abbr: "CEST"
}}
after
Calendar.put_time_zone_database(Calendar.UTCOnlyTimeZoneDatabase)
end
test "with compatible calendar on unambiguous wall clock" do
holocene_ndt = %NaiveDateTime{
calendar: Calendar.Holocene,
year: 12018,
month: 7,
day: 1,
hour: 12,
minute: 34,
second: 25,
microsecond: {123_456, 6}
}
assert DateTime.from_naive(holocene_ndt, "Europe/Copenhagen", FakeTimeZoneDatabase) ==
{:ok,
%DateTime{
calendar: Calendar.Holocene,
day: 1,
hour: 12,
microsecond: {123_456, 6},
minute: 34,
month: 7,
second: 25,
std_offset: 3600,
time_zone: "Europe/Copenhagen",
utc_offset: 3600,
year: 12018,
zone_abbr: "CEST"
}}
end
test "with compatible calendar on ambiguous wall clock" do
holocene_ndt = %NaiveDateTime{
calendar: Calendar.Holocene,
year: 12018,
month: 10,
day: 28,
hour: 02,
minute: 30,
second: 00,
microsecond: {123_456, 6}
}
assert {:ambiguous, first_dt, second_dt} =
DateTime.from_naive(holocene_ndt, "Europe/Copenhagen", FakeTimeZoneDatabase)
assert %DateTime{calendar: Calendar.Holocene, zone_abbr: "CEST"} = first_dt
assert %DateTime{calendar: Calendar.Holocene, zone_abbr: "CET"} = second_dt
end
test "with compatible calendar on gap" do
holocene_ndt = %NaiveDateTime{
calendar: Calendar.Holocene,
year: 12019,
month: 03,
day: 31,
hour: 02,
minute: 30,
second: 00,
microsecond: {123_456, 6}
}
assert {:gap, first_dt, second_dt} =
DateTime.from_naive(holocene_ndt, "Europe/Copenhagen", FakeTimeZoneDatabase)
assert %DateTime{calendar: Calendar.Holocene, zone_abbr: "CET"} = first_dt
assert %DateTime{calendar: Calendar.Holocene, zone_abbr: "CEST"} = second_dt
end
test "with incompatible calendar" do
ndt = %{~N[2018-07-20 00:00:00] | calendar: FakeCalendar}
assert DateTime.from_naive(ndt, "Europe/Copenhagen", FakeTimeZoneDatabase) ==
{:error, :incompatible_calendars}
end
end
describe "from_naive!" do
test "raises on ambiguous wall clock" do
assert_raise ArgumentError, ~r"ambiguous", fn ->
DateTime.from_naive!(~N[2018-10-28 02:30:00], "Europe/Copenhagen", FakeTimeZoneDatabase)
end
end
test "raises on gap" do
assert_raise ArgumentError, ~r"gap", fn ->
DateTime.from_naive!(~N[2019-03-31 02:30:00], "Europe/Copenhagen", FakeTimeZoneDatabase)
end
end
end
describe "shift_zone" do
test "with compatible calendar" do
holocene_ndt = %NaiveDateTime{
calendar: Calendar.Holocene,
year: 12018,
month: 7,
day: 1,
hour: 12,
minute: 34,
second: 25,
microsecond: {123_456, 6}
}
{:ok, holocene_dt} =
DateTime.from_naive(holocene_ndt, "Europe/Copenhagen", FakeTimeZoneDatabase)
{:ok, dt} = DateTime.shift_zone(holocene_dt, "America/Los_Angeles", FakeTimeZoneDatabase)
assert dt == %DateTime{
calendar: Calendar.Holocene,
day: 1,
hour: 3,
microsecond: {123_456, 6},
minute: 34,
month: 7,
second: 25,
std_offset: 3600,
time_zone: "America/Los_Angeles",
utc_offset: -28800,
year: 12018,
zone_abbr: "PDT"
}
end
test "uses default time zone database from config" do
Calendar.put_time_zone_database(FakeTimeZoneDatabase)
{:ok, dt} = DateTime.from_naive(~N[2018-07-01 12:34:25.123456], "Europe/Copenhagen")
{:ok, dt} = DateTime.shift_zone(dt, "America/Los_Angeles")
assert dt == %DateTime{
day: 1,
hour: 3,
microsecond: {123_456, 6},
minute: 34,
month: 7,
second: 25,
std_offset: 3600,
time_zone: "America/Los_Angeles",
utc_offset: -28800,
year: 2018,
zone_abbr: "PDT"
}
after
Calendar.put_time_zone_database(Calendar.UTCOnlyTimeZoneDatabase)
end
end
describe "add" do
test "add with non-struct map that conforms to Calendar.datetime" do
dt_map = DateTime.from_naive!(~N[2018-08-28 00:00:00], "Etc/UTC") |> Map.from_struct()
assert DateTime.add(dt_map, 1, :second) == %DateTime{
calendar: Calendar.ISO,
year: 2018,
month: 8,
day: 28,
hour: 0,
minute: 0,
second: 1,
std_offset: 0,
time_zone: "Etc/UTC",
zone_abbr: "UTC",
utc_offset: 0,
microsecond: {0, 0}
}
end
test "error with UTC only database and non UTC datetime" do
dt =
DateTime.from_naive!(~N[2018-08-28 00:00:00], "Europe/Copenhagen", FakeTimeZoneDatabase)
assert_raise ArgumentError, fn ->
DateTime.add(dt, 1, :second)
end
end
test "add/2 with other calendars" do
assert ~N[2000-01-01 12:34:15.123456]
|> NaiveDateTime.convert!(Calendar.Holocene)
|> DateTime.from_naive!("Etc/UTC")
|> DateTime.add(10, :second) ==
%DateTime{
calendar: Calendar.Holocene,
year: 12000,
month: 1,
day: 1,
hour: 12,
minute: 34,
second: 25,
std_offset: 0,
time_zone: "Etc/UTC",
zone_abbr: "UTC",
utc_offset: 0,
microsecond: {123_456, 6}
}
end
end
describe "to_iso8601" do
test "to_iso8601/2 with a normal DateTime struct" do
datetime = DateTime.from_naive!(~N[2018-07-01 12:34:25.123456], "Etc/UTC")
assert DateTime.to_iso8601(datetime) == "2018-07-01T12:34:25.123456Z"
end
test "to_iso8601/2 with a non-struct map conforming to the Calendar.datetime type" do
datetime_map =
DateTime.from_naive!(~N[2018-07-01 12:34:25.123456], "Etc/UTC") |> Map.from_struct()
assert DateTime.to_iso8601(datetime_map) == "2018-07-01T12:34:25.123456Z"
end
end
describe "to_date/1" do
test "upcasting" do
assert catch_error(DateTime.to_date(~N[2000-02-29 12:23:34]))
end
end
describe "to_time/1" do
test "upcasting" do
assert catch_error(DateTime.to_time(~N[2000-02-29 12:23:34]))
end
end
describe "to_naive/1" do
test "upcasting" do
assert catch_error(DateTime.to_naive(~N[2000-02-29 12:23:34]))
end
end
end
| 29.489275 | 143 | 0.519397 |
f7bb8bc19d894b1e1aa332e628b0ab4155318a21 | 713 | exs | Elixir | apps/state/test/state/line_test.exs | fjlanasa/api | c39bc393aea572bfb81754b2ea1adf9dda9ce24a | [
"MIT"
] | 62 | 2019-01-17T12:34:39.000Z | 2022-03-20T21:49:47.000Z | apps/state/test/state/line_test.exs | fjlanasa/api | c39bc393aea572bfb81754b2ea1adf9dda9ce24a | [
"MIT"
] | 375 | 2019-02-13T15:30:50.000Z | 2022-03-30T18:50:41.000Z | apps/state/test/state/line_test.exs | fjlanasa/api | c39bc393aea572bfb81754b2ea1adf9dda9ce24a | [
"MIT"
] | 14 | 2019-01-16T19:35:57.000Z | 2022-02-26T18:55:54.000Z | defmodule State.LineTest do
use ExUnit.Case
alias Model.Line
setup do
State.Line.new_state([])
:ok
end
test "returns nil for unknown line" do
assert State.Line.by_id("1") == nil
end
test "it can add a line and query it" do
line = %Line{
id: "1",
short_name: "1st Line",
long_name: "First Line",
color: "00843D",
text_color: "FFFFFF",
sort_order: 1
}
State.Line.new_state([line])
assert State.Line.by_id("1") == %Line{
id: "1",
short_name: "1st Line",
long_name: "First Line",
color: "00843D",
text_color: "FFFFFF",
sort_order: 1
}
end
end
| 19.805556 | 42 | 0.532959 |
f7bba31412baca4c832ad3f2949990685a8ba64c | 643 | ex | Elixir | lib/wasabi_ex/errors.ex | goodhamgupta/wasabi_ex | 77e5e955aaf6be8cd1e1357cf681c6dc58afc258 | [
"Apache-2.0"
] | null | null | null | lib/wasabi_ex/errors.ex | goodhamgupta/wasabi_ex | 77e5e955aaf6be8cd1e1357cf681c6dc58afc258 | [
"Apache-2.0"
] | null | null | null | lib/wasabi_ex/errors.ex | goodhamgupta/wasabi_ex | 77e5e955aaf6be8cd1e1357cf681c6dc58afc258 | [
"Apache-2.0"
] | null | null | null | defmodule WasabiEx.Errors.NotImplemented do
defexception [:reason]
def exception(reason), do: %__MODULE__{reason: reason}
def message(%__MODULE__{reason: reason}), do: "WasabiEx::NotImplemented - #{reason}"
end
defmodule WasabiEx.Errors.ApiError do
defexception [:reason]
def exception(reason), do: %__MODULE__{reason: reason}
def message(%__MODULE__{reason: reason}), do: "WasabiEx::ApiError - #{reason}"
end
defmodule WasabiEx.Errors.InvalidParam do
defexception [:reason]
def exception(reason), do: %__MODULE__{reason: reason}
def message(%__MODULE__{reason: reason}), do: "WasabiEx::InvalidParam - #{reason}"
end
| 26.791667 | 86 | 0.74028 |
f7bbcfbfbfb5ea56eb082bd7912fcbd642ea9a44 | 608 | exs | Elixir | mix.exs | bgracie/domain_planner | c12d7b62092dc096777e2f602040fbf5fe1341bc | [
"MIT"
] | null | null | null | mix.exs | bgracie/domain_planner | c12d7b62092dc096777e2f602040fbf5fe1341bc | [
"MIT"
] | null | null | null | mix.exs | bgracie/domain_planner | c12d7b62092dc096777e2f602040fbf5fe1341bc | [
"MIT"
] | null | null | null | defmodule DomainPlanner.Mixfile do
use Mix.Project
def project do
[
app: :domain_planner,
version: "0.1.0",
elixir: "~> 1.5",
start_permanent: Mix.env == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:yamerl, "~> 0.6.0"}
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"},
]
end
end
| 20.266667 | 88 | 0.572368 |
f7bbfd528f6741ec1649a5dfccb4dc65c9d9e944 | 1,370 | ex | Elixir | lib/todo.ex | TwelveNights/todo | 484b46b800b17abfe9e560c1f11ebf772c6d3bb4 | [
"MIT"
] | null | null | null | lib/todo.ex | TwelveNights/todo | 484b46b800b17abfe9e560c1f11ebf772c6d3bb4 | [
"MIT"
] | null | null | null | lib/todo.ex | TwelveNights/todo | 484b46b800b17abfe9e560c1f11ebf772c6d3bb4 | [
"MIT"
] | null | null | null | defmodule Todo do
@moduledoc """
Documentation for Todo.
"""
alias Todo.{Repo, Task}
require Ecto.Query
@format_string "~4ts~55ts~12ts~n"
@doc """
Hello world.
## Examples
iex> Todo.hello
:world
"""
def add(message) do
%Task{task: message} |> Repo.insert!()
IO.puts("Added new item to do: #{message}")
end
def delete(id) do
Repo.get!(Task, id) |> Repo.delete!()
IO.puts("Deleted item with id: #{id}")
end
def show(all) do
:io.fwrite(@format_string, ['id', 'task', 'status'])
check = fn stuff ->
unless all do
Ecto.Query.where(stuff, completed: false)
else
stuff
end
end
collection =
Task
|> check.()
|> Repo.all()
Enum.each(collection, &print_pretty(&1))
end
def toggle(id) do
task = Repo.get!(Task, id)
task = Ecto.Changeset.change(task, completed: !task.completed)
struct = Repo.update!(task)
IO.puts(
"Updated task with id #{struct.id} to #{
if struct.completed do
'Done'
else
'Not Done'
end
}"
)
end
defp print_pretty(%Task{id: id, task: task, completed: completed}) do
:io.fwrite(@format_string, [
Integer.to_string(id),
task,
if completed do
'Done'
else
'Not Done'
end
])
end
end
| 17.792208 | 71 | 0.551825 |
f7bc14f8edadb4a8b8deda1b79eb56dfabb63d25 | 7,026 | exs | Elixir | test/opentelemetry_liveview_test.exs | aaronrenner/opentelemetry_liveview | 3f14bcbc39f321170fca2b51477c51adb15aa1ee | [
"Apache-2.0"
] | null | null | null | test/opentelemetry_liveview_test.exs | aaronrenner/opentelemetry_liveview | 3f14bcbc39f321170fca2b51477c51adb15aa1ee | [
"Apache-2.0"
] | null | null | null | test/opentelemetry_liveview_test.exs | aaronrenner/opentelemetry_liveview | 3f14bcbc39f321170fca2b51477c51adb15aa1ee | [
"Apache-2.0"
] | null | null | null | defmodule OpentelemetryLiveViewTest do
use ExUnit.Case, async: false
# require OpenTelemetry.Tracer
# require OpenTelemetry.Span
require Record
# alias PhoenixMeta, as: Meta
for {name, spec} <- Record.extract_all(from_lib: "opentelemetry/include/otel_span.hrl") do
Record.defrecord(name, spec)
end
for {name, spec} <- Record.extract_all(from_lib: "opentelemetry_api/include/opentelemetry.hrl") do
Record.defrecord(name, spec)
end
setup do
:application.stop(:opentelemetry)
:application.set_env(:opentelemetry, :tracer, :otel_tracer_default)
:application.set_env(:opentelemetry, :processors, [
{:otel_batch_processor, %{scheduled_delay_ms: 1}}
])
:application.start(:opentelemetry)
:otel_batch_processor.set_exporter(:otel_exporter_pid, self())
OpentelemetryLiveView.setup()
:ok
end
@meta %{socket: %{view: SomeWeb.SomeLive}}
@bad_key_error %{
kind: :error,
reason: {:badkey, :name, %{username: "foobar"}},
stacktrace: [
{MyStore.Users, :sort_by_name, 2, [file: 'lib/my_store/users.ex', line: 159]},
{Enum, :"-to_sort_fun/1-fun-0-", 3, [file: 'lib/enum.ex', line: 2542]},
{:lists, :sort, 2, [file: 'lists.erl', line: 969]}
]
}
test "records spans for the mount callback" do
:telemetry.execute(
[:phoenix, :live_view, :mount, :start],
%{system_time: System.system_time()},
@meta
)
:telemetry.execute(
[:phoenix, :live_view, :mount, :stop],
%{duration: System.convert_time_unit(42, :millisecond, :native)},
@meta
)
assert_receive {:span,
span(
name: "SomeWeb.SomeLive.mount",
kind: :internal,
attributes: attributes
)}
assert List.keysort(attributes, 0) == [
duration_ms: 42,
"liveview.callback": "mount",
"liveview.module": "SomeWeb.SomeLive"
]
end
test "records exceptions for the mount callback" do
:telemetry.execute(
[:phoenix, :live_view, :mount, :start],
%{system_time: System.system_time()},
@meta
)
:telemetry.execute(
[:phoenix, :live_view, :mount, :exception],
%{duration: System.convert_time_unit(42, :millisecond, :native)},
Map.merge(@meta, @bad_key_error)
)
attributes = assert_receive_bad_key_error_span("SomeWeb.SomeLive.mount")
assert List.keysort(attributes, 0) == [
{:duration_ms, 42},
{:"liveview.callback", "mount"},
{:"liveview.module", "SomeWeb.SomeLive"}
]
end
test "records spans for the handle_params callback" do
meta = Map.put(@meta, :uri, "https://foobar.com")
:telemetry.execute(
[:phoenix, :live_view, :handle_params, :start],
%{system_time: System.system_time()},
meta
)
:telemetry.execute(
[:phoenix, :live_view, :handle_params, :stop],
%{duration: System.convert_time_unit(42, :millisecond, :native)},
meta
)
assert_receive {:span,
span(
name: "SomeWeb.SomeLive.handle_params",
kind: :internal,
attributes: attributes
)}
assert List.keysort(attributes, 0) == [
duration_ms: 42,
"liveview.callback": "handle_params",
"liveview.module": "SomeWeb.SomeLive",
"liveview.uri": "https://foobar.com"
]
end
test "records exceptions for the handle_params callback" do
meta = Map.put(@meta, :uri, "https://foobar.com")
:telemetry.execute(
[:phoenix, :live_view, :handle_params, :start],
%{system_time: System.system_time()},
meta
)
:telemetry.execute(
[:phoenix, :live_view, :handle_params, :exception],
%{duration: System.convert_time_unit(42, :millisecond, :native)},
Map.merge(meta, @bad_key_error)
)
attributes = assert_receive_bad_key_error_span("SomeWeb.SomeLive.handle_params")
assert List.keysort(attributes, 0) == [
{:duration_ms, 42},
{:"liveview.callback", "handle_params"},
{:"liveview.module", "SomeWeb.SomeLive"},
"liveview.uri": "https://foobar.com"
]
end
test "records spans for the handle_event callback" do
meta = Map.put(@meta, :event, "some_event")
:telemetry.execute(
[:phoenix, :live_view, :handle_event, :start],
%{system_time: System.system_time()},
meta
)
:telemetry.execute(
[:phoenix, :live_view, :handle_event, :stop],
%{duration: System.convert_time_unit(42, :millisecond, :native)},
meta
)
assert_receive {:span,
span(
name: "SomeWeb.SomeLive.some_event",
kind: :internal,
attributes: attributes
)}
assert List.keysort(attributes, 0) == [
duration_ms: 42,
"liveview.callback": "handle_event",
"liveview.event": "some_event",
"liveview.module": "SomeWeb.SomeLive"
]
# for live_component
meta = %{socket: %{}, event: "some_event", component: SomeWeb.SomeComponent}
:telemetry.execute(
[:phoenix, :live_component, :handle_event, :start],
%{system_time: System.system_time()},
meta
)
:telemetry.execute(
[:phoenix, :live_component, :handle_event, :stop],
%{duration: System.convert_time_unit(42, :millisecond, :native)},
meta
)
assert_receive {:span,
span(
name: "SomeWeb.SomeComponent.some_event",
kind: :internal,
attributes: attributes
)}
assert List.keysort(attributes, 0) == [
duration_ms: 42,
"liveview.callback": "handle_event",
"liveview.event": "some_event",
"liveview.module": "SomeWeb.SomeComponent"
]
end
defp assert_receive_bad_key_error_span(name) do
expected_status = OpenTelemetry.status(:error, "Erlang error: :badkey")
assert_receive {:span,
span(
name: ^name,
attributes: attributes,
kind: :internal,
events: [
event(
name: "exception",
attributes: [
{"exception.type", "Elixir.ErlangError"},
{"exception.message", "Erlang error: :badkey"},
{"exception.stacktrace", _stacktrace},
{:key, :name},
{:map, %{username: "foobar"}}
]
)
],
status: ^expected_status
)}
attributes
end
end
| 29.771186 | 100 | 0.550811 |
f7bc3bf51214ebec241a69cfeafae044e8b82c84 | 655 | ex | Elixir | enigma/lib/enigma/score.ex | vmopuri/otp_p1 | 6f52e84a1e2a25e5ad0d922278d491f77ade13c7 | [
"MIT"
] | null | null | null | enigma/lib/enigma/score.ex | vmopuri/otp_p1 | 6f52e84a1e2a25e5ad0d922278d491f77ade13c7 | [
"MIT"
] | null | null | null | enigma/lib/enigma/score.ex | vmopuri/otp_p1 | 6f52e84a1e2a25e5ad0d922278d491f77ade13c7 | [
"MIT"
] | 4 | 2021-06-17T19:06:13.000Z | 2021-06-21T14:57:54.000Z | defmodule Enigma.Score do
defstruct [:red, :white]
def new(answer, move) do
%__MODULE__{red: reds(answer, move), white: whites(Enum.count(answer), reds(answer, move), misses(answer, move))}
end
defp reds(answer, move) do
answer
|> Enum.zip(move)
|> Enum.count(fn {x,y} -> x == y end)
end
defp misses(answer, move) do
Enum.count(move -- answer)
end
defp whites(answer_count, red, miss) do
answer_count - red - miss
end
def render_score(%{red: red_count, white: white_count} = _score) do
List.duplicate("R", red_count)
|> Kernel.++(List.duplicate("W", white_count))
|> Enum.join(" ")
end
end
| 21.833333 | 117 | 0.636641 |
f7bc6e90eb2af2b5a7319e10ad0c7507370f3488 | 1,415 | exs | Elixir | mix.exs | mnishiguchi/bit_flagger | 5bec0cef47870861e22554c42354ef30763b58a0 | [
"MIT"
] | 1 | 2021-09-03T23:40:16.000Z | 2021-09-03T23:40:16.000Z | mix.exs | mnishiguchi/bit_flagger | 5bec0cef47870861e22554c42354ef30763b58a0 | [
"MIT"
] | null | null | null | mix.exs | mnishiguchi/bit_flagger | 5bec0cef47870861e22554c42354ef30763b58a0 | [
"MIT"
] | 1 | 2021-09-03T23:43:50.000Z | 2021-09-03T23:43:50.000Z | defmodule BitFlagger.MixProject do
use Mix.Project
@version "0.1.1"
@source_url "https://github.com/mnishiguchi/bit_flagger"
def project do
[
app: :bit_flagger,
version: @version,
description: "Manipulate bit flags in Elixir",
elixir: "~> 1.11",
start_permanent: Mix.env() == :prod,
deps: deps(),
package: package(),
aliases: [],
dialyzer: dialyzer(),
docs: docs()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:credo, "~> 1.5", only: [:dev, :test], runtime: false},
{:dialyxir, "~> 1.1", only: [:dev, :test], runtime: false},
{:ex_doc, "~> 0.24", only: [:dev], runtime: false},
{:mix_test_watch, "~> 1.0", only: :dev, runtime: false}
]
end
defp dialyzer() do
[
flags: [:race_conditions, :unmatched_returns, :error_handling, :underspecs]
]
end
defp docs do
[
extras: ["README.md"],
source_ref: "v#{@version}",
source_url: @source_url
]
end
defp package do
%{
files: [
"lib",
"mix.exs",
"README.md",
"LICENSE*",
"CHANGELOG*"
],
licenses: ["MIT"],
links: %{
"GitHub" => @source_url
}
}
end
end
| 20.507246 | 81 | 0.539223 |
f7bc7ed7bfb85a6f736d4f760c4eea70c32188f4 | 1,816 | exs | Elixir | test/glimesh_web/live/user_live_components/report_button_test.exs | itsUnsmart/glimesh.tv | 22c532184bb5046f6c6d8232e8bd66ba534c01c1 | [
"MIT"
] | null | null | null | test/glimesh_web/live/user_live_components/report_button_test.exs | itsUnsmart/glimesh.tv | 22c532184bb5046f6c6d8232e8bd66ba534c01c1 | [
"MIT"
] | null | null | null | test/glimesh_web/live/user_live_components/report_button_test.exs | itsUnsmart/glimesh.tv | 22c532184bb5046f6c6d8232e8bd66ba534c01c1 | [
"MIT"
] | null | null | null | defmodule GlimeshWeb.UserLive.Components.ReportButtonTest do
use GlimeshWeb.ConnCase
use Bamboo.Test, shared: true
import Phoenix.LiveViewTest
import Glimesh.AccountsFixtures
@component GlimeshWeb.UserLive.Components.ReportButton
defp create_streamer(_) do
%{streamer: streamer_fixture()}
end
describe "report button unauthed user" do
setup :create_streamer
test "shows a report button that does nothing?", %{conn: conn, streamer: streamer} do
{:ok, _, html} =
live_isolated(conn, @component, session: %{"user" => nil, "streamer" => streamer})
assert String.contains?(html, "Report User") == false
end
end
describe "report button authed user" do
setup [:register_and_log_in_user, :create_streamer]
test "shows a report button for another user", %{
conn: conn,
user: user,
streamer: streamer
} do
{:ok, _, html} =
live_isolated(conn, @component, session: %{"user" => user, "streamer" => streamer})
assert html =~ "Report User"
assert html =~ "class=\"text-danger\""
end
test "can report another user", %{
conn: conn,
user: user,
streamer: streamer
} do
admin = admin_fixture()
{:ok, view, _} =
live_isolated(conn, @component, session: %{"user" => user, "streamer" => streamer})
button = view |> element("a", "Report User") |> render_click()
# Should render a modal at this point
assert button =~ "Submit Report"
assert render_submit(view, "save", %{"report_reason" => "other", "notes" => "Some notes"}) =~
"Report submitted, thank you!"
email =
GlimeshWeb.Emails.Email.user_report_alert(admin, user, streamer, "other", "Some notes")
assert_delivered_email(email)
end
end
end
| 28.375 | 99 | 0.638216 |
f7bc81897faf2743c7bfa63c63a006977f763183 | 4,835 | ex | Elixir | assets/node_modules/phoenix/lib/phoenix/router/scope.ex | xiongxin/web_chat | 4123887993083058a358358595c06970a5ac873f | [
"MIT"
] | null | null | null | assets/node_modules/phoenix/lib/phoenix/router/scope.ex | xiongxin/web_chat | 4123887993083058a358358595c06970a5ac873f | [
"MIT"
] | null | null | null | assets/node_modules/phoenix/lib/phoenix/router/scope.ex | xiongxin/web_chat | 4123887993083058a358358595c06970a5ac873f | [
"MIT"
] | null | null | null | defmodule Phoenix.Router.Scope do
alias Phoenix.Router.Scope
@moduledoc false
@stack :phoenix_router_scopes
@pipes :phoenix_pipeline_scopes
defstruct path: nil, alias: nil, as: nil, pipes: [], host: nil, private: %{}, assigns: %{}
@doc """
Initializes the scope.
"""
def init(module) do
Module.put_attribute(module, @stack, [%Scope{}])
Module.put_attribute(module, @pipes, MapSet.new)
end
@doc """
Builds a route based on the top of the stack.
"""
def route(line, module, kind, verb, path, plug, plug_opts, opts) do
path = validate_path(path)
private = Keyword.get(opts, :private, %{})
assigns = Keyword.get(opts, :assigns, %{})
as = Keyword.get(opts, :as, Phoenix.Naming.resource_name(plug, "Controller"))
{path, host, alias, as, pipes, private, assigns} =
join(module, path, plug, as, private, assigns)
Phoenix.Router.Route.build(line, kind, verb, path, host, alias, plug_opts, as, pipes, private, assigns)
end
@doc """
Validates a path is a string and contains a leading prefix.
"""
def validate_path("/" <> _ = path), do: path
def validate_path(path) when is_binary(path) do
IO.write :stderr, """
warning: router paths should begin with a forward slash, got: #{inspect path}
#{Exception.format_stacktrace}
"""
"/" <> path
end
def validate_path(path) do
raise ArgumentError, "router paths must be strings, got: #{inspect path}"
end
@doc """
Defines the given pipeline.
"""
def pipeline(module, pipe) when is_atom(pipe) do
update_pipes module, &MapSet.put(&1, pipe)
end
@doc """
Appends the given pipes to the current scope pipe through.
"""
def pipe_through(module, pipes) do
pipes = List.wrap(pipes)
update_stack(module, fn [scope|stack] ->
scope = put_in scope.pipes, scope.pipes ++ pipes
[scope|stack]
end)
end
@doc """
Pushes a scope into the module stack.
"""
def push(module, path) when is_binary(path) do
push(module, path: path)
end
def push(module, opts) when is_list(opts) do
path = with path when not is_nil(path) <- Keyword.get(opts, :path),
path <- validate_path(path),
do: Plug.Router.Utils.split(path)
alias = Keyword.get(opts, :alias)
alias = alias && Atom.to_string(alias)
scope = %Scope{path: path,
alias: alias,
as: Keyword.get(opts, :as),
host: Keyword.get(opts, :host),
pipes: [],
private: Keyword.get(opts, :private, %{}),
assigns: Keyword.get(opts, :assigns, %{})}
update_stack(module, fn stack -> [scope|stack] end)
end
@doc """
Pops a scope from the module stack.
"""
def pop(module) do
update_stack(module, fn [_|stack] -> stack end)
end
@doc """
Returns true if the module's definition is currently within a scope block
"""
def inside_scope?(module), do: length(get_stack(module)) > 1
defp join(module, path, alias, as, private, assigns) do
stack = get_stack(module)
{join_path(stack, path), find_host(stack), join_alias(stack, alias),
join_as(stack, as), join_pipe_through(stack), join_private(stack, private),
join_assigns(stack, assigns)}
end
defp join_path(stack, path) do
"/" <>
([Plug.Router.Utils.split(path)|extract(stack, :path)]
|> Enum.reverse()
|> Enum.concat()
|> Enum.join("/"))
end
defp join_alias(stack, alias) when is_atom(alias) do
[alias|extract(stack, :alias)]
|> Enum.reverse()
|> Module.concat()
end
defp join_as(_stack, nil), do: nil
defp join_as(stack, as) when is_atom(as) or is_binary(as) do
[as|extract(stack, :as)]
|> Enum.reverse()
|> Enum.join("_")
end
defp join_private(stack, private) do
Enum.reduce stack, private, &Map.merge(&1.private, &2)
end
defp join_assigns(stack, assigns) do
Enum.reduce stack, assigns, &Map.merge(&1.assigns, &2)
end
defp join_pipe_through(stack) do
for scope <- Enum.reverse(stack),
item <- scope.pipes,
do: item
end
defp find_host(stack) do
Enum.find_value(stack, & &1.host)
end
defp extract(stack, attr) do
for scope <- stack,
item = Map.fetch!(scope, attr),
do: item
end
defp get_stack(module) do
get_attribute(module, @stack)
end
defp update_stack(module, fun) do
update_attribute(module, @stack, fun)
end
defp update_pipes(module, fun) do
update_attribute(module, @pipes, fun)
end
defp get_attribute(module, attr) do
Module.get_attribute(module, attr) ||
raise "Phoenix router scope was not initialized"
end
defp update_attribute(module, attr, fun) do
Module.put_attribute(module, attr, fun.(get_attribute(module, attr)))
end
end
| 27.011173 | 107 | 0.633919 |
f7bc8545a7bcecdd942f73a8f88ecfc49d340b78 | 6,856 | ex | Elixir | clients/cloud_functions/lib/google_api/cloud_functions/v1/api/operations.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/cloud_functions/lib/google_api/cloud_functions/v1/api/operations.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/cloud_functions/lib/google_api/cloud_functions/v1/api/operations.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.CloudFunctions.V1.Api.Operations do
@moduledoc """
API calls for all endpoints tagged `Operations`.
"""
alias GoogleApi.CloudFunctions.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@doc """
Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
## Parameters
- connection (GoogleApi.CloudFunctions.V1.Connection): Connection to server
- operations_id (String.t): Part of `name`. The name of the operation resource.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
## Returns
{:ok, %GoogleApi.CloudFunctions.V1.Model.Operation{}} on success
{:error, info} on failure
"""
@spec cloudfunctions_operations_get(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.CloudFunctions.V1.Model.Operation.t()} | {:error, Tesla.Env.t()}
def cloudfunctions_operations_get(connection, operations_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/operations/{operationsId}", %{
"operationsId" => URI.encode_www_form(operations_id)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.CloudFunctions.V1.Model.Operation{}])
end
@doc """
Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `\"/v1/{name=users/*}/operations\"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
## Parameters
- connection (GoogleApi.CloudFunctions.V1.Connection): Connection to server
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :filter (String.t): The standard list filter.
- :name (String.t): The name of the operation's parent resource.
- :pageSize (integer()): The standard list page size.
- :pageToken (String.t): The standard list page token.
## Returns
{:ok, %GoogleApi.CloudFunctions.V1.Model.ListOperationsResponse{}} on success
{:error, info} on failure
"""
@spec cloudfunctions_operations_list(Tesla.Env.client(), keyword()) ::
{:ok, GoogleApi.CloudFunctions.V1.Model.ListOperationsResponse.t()}
| {:error, Tesla.Env.t()}
def cloudfunctions_operations_list(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:filter => :query,
:name => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/operations")
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.CloudFunctions.V1.Model.ListOperationsResponse{}]
)
end
end
| 47.282759 | 669 | 0.690198 |
f7bc8901b830363eb8c2a5190c7eb2688361feb1 | 1,053 | ex | Elixir | create_fun_umbrella/apps/create_fun_cms/test/support/conn_case.ex | Vorzious/CreateFun | 5744c913ef706bc29062fa90a8ec5de12d267dab | [
"MIT"
] | null | null | null | create_fun_umbrella/apps/create_fun_cms/test/support/conn_case.ex | Vorzious/CreateFun | 5744c913ef706bc29062fa90a8ec5de12d267dab | [
"MIT"
] | 9 | 2018-06-17T09:54:03.000Z | 2018-06-17T09:55:20.000Z | create_fun_umbrella/apps/create_fun_cms/test/support/conn_case.ex | Vorzious/CreateFun | 5744c913ef706bc29062fa90a8ec5de12d267dab | [
"MIT"
] | 1 | 2018-06-05T18:38:01.000Z | 2018-06-05T18:38:01.000Z | defmodule CreateFunCms.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common datastructures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
import CreateFunCms.Router.Helpers
# The default endpoint for testing
@endpoint CreateFunEndpoint.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(CreateFun.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(CreateFun.Repo, {:shared, self()})
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 27 | 71 | 0.723647 |
f7bc8b79b2d807244b7e499c5a5ead65d1c60910 | 1,482 | exs | Elixir | mix.exs | brettwise/recase | 1e5c6d466be999b923ce2344c54de3c87a157ff0 | [
"MIT"
] | null | null | null | mix.exs | brettwise/recase | 1e5c6d466be999b923ce2344c54de3c87a157ff0 | [
"MIT"
] | null | null | null | mix.exs | brettwise/recase | 1e5c6d466be999b923ce2344c54de3c87a157ff0 | [
"MIT"
] | null | null | null | defmodule Recase.Mixfile do
use Mix.Project
@version "0.6.0"
@url "https://github.com/sobolevn/recase"
def project do
[
app: :recase,
version: @version,
elixir: "~> 1.6",
deps: deps(),
# Hex:
docs: docs(),
description: description(),
package: package(),
source_url: @url,
homepage_url: @url,
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
# Test coverage:
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test
],
# Dialyzer:
dialyzer: [plt_add_deps: :apps_direct]
]
end
def application do
# Specify extra applications you'll use from Erlang/Elixir
[extra_applications: [:logger]]
end
defp deps do
[
{:excoveralls, "~> 0.5", only: :test},
{:credo, "~> 1.0", only: [:dev, :test], runtime: false},
{:dialyxir, "~> 1.0", only: [:dev], runtime: false},
# Documentation:
{:ex_doc, "~> 0.21.0", only: :dev, runtime: false}
]
end
defp description do
"Convert strings to any case you need."
end
defp docs do
[extras: ["README.md"], main: "readme"]
end
defp package do
[
maintainers: ["Nikita Sobolev"],
licenses: ["MIT"],
links: %{"GitHub" => @url},
files: ~w(mix.exs README.md lib)
]
end
end
| 21.171429 | 62 | 0.554656 |
f7bd20b0d21a8d381a67bf36745116fea96c6268 | 2,038 | exs | Elixir | apps/ewallet_db/priv/repo/migrations/20180704072742_update_transactions_owners.exs | jimpeebles/ewallet | ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405 | [
"Apache-2.0"
] | null | null | null | apps/ewallet_db/priv/repo/migrations/20180704072742_update_transactions_owners.exs | jimpeebles/ewallet | ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405 | [
"Apache-2.0"
] | null | null | null | apps/ewallet_db/priv/repo/migrations/20180704072742_update_transactions_owners.exs | jimpeebles/ewallet | ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWalletDB.Repo.Migrations.UpdateTransactionsOwners do
use Ecto.Migration
import Ecto.Query
alias EWalletDB.Repo
def up do
query = from(t in "transaction",
select: [t.uuid,
t.from,
t.to],
where: (is_nil(t.from_account_uuid) and is_nil(t.from_user_uuid)) or
(is_nil(t.to_account_uuid) and is_nil(t.to_user_uuid)),
lock: "FOR UPDATE")
for [uuid, from, to] <- Repo.all(query) do
[from_account_uuid, from_user_uuid] = Repo.one(from(t in "wallet",
select: [t.account_uuid,
t.user_uuid],
where: t.address == ^from))
[to_account_uuid, to_user_uuid] = Repo.one(from(t in "wallet",
select: [t.account_uuid,
t.user_uuid],
where: t.address == ^to))
query = from(t in "transaction",
where: t.uuid == ^uuid,
update: [set: [
from_account_uuid: ^from_account_uuid,
from_user_uuid: ^from_user_uuid,
to_account_uuid: ^to_account_uuid,
to_user_uuid: ^to_user_uuid
]])
Repo.update_all(query, [])
end
end
def down do
# do nothing
end
end
| 36.392857 | 85 | 0.557409 |
f7bd6503f75b9e46047726e617ef786db854bce3 | 358 | exs | Elixir | config/dev.sample.exs | beamkenya/ex_jenga | 03a936a04d99614043d120d0e3ee787f1b8a5b8d | [
"AML",
"MIT"
] | 1 | 2021-09-14T09:50:22.000Z | 2021-09-14T09:50:22.000Z | config/dev.sample.exs | beamkenya/ex_jenga | 03a936a04d99614043d120d0e3ee787f1b8a5b8d | [
"AML",
"MIT"
] | 15 | 2021-04-23T11:28:49.000Z | 2021-06-23T04:42:35.000Z | config/dev.sample.exs | beamkenya/ex_jenga | 03a936a04d99614043d120d0e3ee787f1b8a5b8d | [
"AML",
"MIT"
] | null | null | null | use Mix.Config
config :tesla, adapter: Tesla.Adapter.Hackney
config :ex_jenga,
# When changed to "false" one will use the live endpoint url
sandbox: true,
jenga: [
api_key: "=======API KEY HERE ========",
username: "=====USERNAME HERE=====",
password: "=======PASSWORD HERE =======",
private_key: "=======PRIVATE KEY HERE ======="
]
| 27.538462 | 62 | 0.578212 |
f7bd68ad09a40e63dd6d624d9004ca820b850ffb | 2,353 | ex | Elixir | clients/bigtable_admin/lib/google_api/bigtable_admin/v2/model/list_instances_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/bigtable_admin/lib/google_api/bigtable_admin/v2/model/list_instances_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/bigtable_admin/lib/google_api/bigtable_admin/v2/model/list_instances_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.BigtableAdmin.V2.Model.ListInstancesResponse do
@moduledoc """
Response message for BigtableInstanceAdmin.ListInstances.
## Attributes
* `failedLocations` (*type:* `list(String.t)`, *default:* `nil`) - Locations from which Instance information could not be retrieved,
due to an outage or some other transient condition.
Instances whose Clusters are all in one of the failed locations
may be missing from `instances`, and Instances with at least one
Cluster in a failed location may only have partial information returned.
Values are of the form `projects/<project>/locations/<zone_id>`
* `instances` (*type:* `list(GoogleApi.BigtableAdmin.V2.Model.Instance.t)`, *default:* `nil`) - The list of requested instances.
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - DEPRECATED: This field is unused and ignored.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:failedLocations => list(String.t()),
:instances => list(GoogleApi.BigtableAdmin.V2.Model.Instance.t()),
:nextPageToken => String.t()
}
field(:failedLocations, type: :list)
field(:instances, as: GoogleApi.BigtableAdmin.V2.Model.Instance, type: :list)
field(:nextPageToken)
end
defimpl Poison.Decoder, for: GoogleApi.BigtableAdmin.V2.Model.ListInstancesResponse do
def decode(value, options) do
GoogleApi.BigtableAdmin.V2.Model.ListInstancesResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.BigtableAdmin.V2.Model.ListInstancesResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.568966 | 136 | 0.734807 |
f7bd8207a9f6350ead46c996d11115afb2eb73c0 | 824 | ex | Elixir | lib/amazon_product_advertising_client/item_search.ex | mikeover/elixir-amazon-product-advertising-client | 33bce0e0d9fe9c934eb036a4deb1198ca95aef67 | [
"MIT"
] | null | null | null | lib/amazon_product_advertising_client/item_search.ex | mikeover/elixir-amazon-product-advertising-client | 33bce0e0d9fe9c934eb036a4deb1198ca95aef67 | [
"MIT"
] | null | null | null | lib/amazon_product_advertising_client/item_search.ex | mikeover/elixir-amazon-product-advertising-client | 33bce0e0d9fe9c934eb036a4deb1198ca95aef67 | [
"MIT"
] | 1 | 2018-07-15T05:14:55.000Z | 2018-07-15T05:14:55.000Z | defmodule AmazonProductAdvertisingClient.ItemSearch do
@moduledoc """
The [ItemSearch](http://docs.aws.amazon.com/AWSECommerceService/latest/DG/ItemSearch.html) operation
"""
alias __MODULE__
alias AmazonProductAdvertisingClient.Config
defstruct "Availability": "Available",
"BrowseNode": nil,
"BrowseNodeId": nil,
"Condition": "New",
"ItemPage": nil,
"Keywords": nil,
"MaximumPrice": nil,
"MinimumPrice": nil,
"Operation": "ItemSearch",
"ResponseGroup": nil,
"SearchIndex": "All",
"Sort": nil,
"Title": nil,
"MerchantId": "Amazon",
@doc """
Execute an ItemSearch operation
"""
def execute(search_params \\ %ItemSearch{}, config \\ %Config{}) do
AmazonProductAdvertisingClient.call_api search_params, config
end
end
| 25.75 | 102 | 0.657767 |
f7bd9f5d42227af173f974d660f40645e63c5d47 | 1,085 | exs | Elixir | mix.exs | lbighetti/exop | 206381c493610d3161f8fddd4d00028477daa7e3 | [
"MIT"
] | null | null | null | mix.exs | lbighetti/exop | 206381c493610d3161f8fddd4d00028477daa7e3 | [
"MIT"
] | null | null | null | mix.exs | lbighetti/exop | 206381c493610d3161f8fddd4d00028477daa7e3 | [
"MIT"
] | null | null | null | defmodule Exop.Mixfile do
use Mix.Project
@description """
A library that provides a few macros which allow
you to encapsulate business logic and validate incoming
params over predefined contract.
"""
def project do
[
app: :exop,
version: "1.4.2",
elixir: ">= 1.6.0",
name: "Exop",
description: @description,
package: package(),
deps: deps(),
source_url: "https://github.com/madeinussr/exop",
docs: [extras: ["README.md"]],
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod
]
end
def application do
[
applications: [:logger]
]
end
defp deps do
[
{:mock, "~> 0.1", only: :test},
{:ex_doc, "~> 0.20", only: [:dev, :test, :docs]},
{:dialyxir, "~> 1.0.0-rc.4", only: [:dev], runtime: false}
]
end
defp package do
[
files: ["lib", "mix.exs", "README.md", "LICENSE"],
maintainers: ["Andrey Chernykh"],
licenses: ["MIT"],
links: %{"Github" => "https://github.com/madeinussr/exop"}
]
end
end
| 22.142857 | 64 | 0.556682 |
f7bdae862623f3d1b6ed923087cb6f01c3d5a432 | 3,842 | exs | Elixir | elixir/ocr-numbers/test/ocr_numbers_test.exs | paulfioravanti/exercism | b3bf15814b89b0ef50b9bf062b3c800e80e37e75 | [
"MIT"
] | 6 | 2019-06-19T15:43:20.000Z | 2020-07-17T19:46:09.000Z | elixir/ocr-numbers/test/ocr_numbers_test.exs | paulfioravanti/exercism | b3bf15814b89b0ef50b9bf062b3c800e80e37e75 | [
"MIT"
] | 10 | 2021-05-10T21:02:55.000Z | 2021-05-11T20:29:41.000Z | elixir/ocr-numbers/test/ocr_numbers_test.exs | paulfioravanti/exercism | b3bf15814b89b0ef50b9bf062b3c800e80e37e75 | [
"MIT"
] | 1 | 2019-06-25T10:42:14.000Z | 2019-06-25T10:42:14.000Z | defmodule OcrNumbersTest do
use ExUnit.Case
test "Recognizes 0" do
number =
OcrNumbers.convert([
" _ ",
"| |",
"|_|",
" "
])
assert number == {:ok, "0"}
end
test "Recognizes 1" do
number =
OcrNumbers.convert([
" ",
" |",
" |",
" "
])
assert number == {:ok, "1"}
end
test "Unreadable but correctly sized inputs return ?" do
number =
OcrNumbers.convert([
" ",
" _",
" |",
" "
])
assert number == {:ok, "?"}
end
test "Input with a number of lines that is not a multiple of four raises an error" do
number =
OcrNumbers.convert([
" _ ",
"| |",
" "
])
assert number == {:error, 'invalid line count'}
end
test "Input with a number of columns that is not a multiple of three raises an error" do
number =
OcrNumbers.convert([
" ",
" |",
" |",
" "
])
assert number == {:error, 'invalid column count'}
end
test "Recognizes 110101100" do
number =
OcrNumbers.convert([
" _ _ _ _ ",
" | || | || | | || || |",
" | ||_| ||_| | ||_||_|",
" "
])
assert number == {:ok, "110101100"}
end
test "Garbled numbers in a string are replaced with ?" do
number =
OcrNumbers.convert([
" _ _ _ ",
" | || | || | || || |",
" | | _| ||_| | ||_||_|",
" "
])
assert number == {:ok, "11?10?1?0"}
end
test "Recognizes 2" do
number =
OcrNumbers.convert([
" _ ",
" _|",
"|_ ",
" "
])
assert number == {:ok, "2"}
end
test "Recognizes 3" do
number =
OcrNumbers.convert([
" _ ",
" _|",
" _|",
" "
])
assert number == {:ok, "3"}
end
test "Recognizes 4" do
number =
OcrNumbers.convert([
" ",
"|_|",
" |",
" "
])
assert number == {:ok, "4"}
end
test "Recognizes 5" do
number =
OcrNumbers.convert([
" _ ",
"|_ ",
" _|",
" "
])
assert number == {:ok, "5"}
end
test "Recognizes 6" do
number =
OcrNumbers.convert([
" _ ",
"|_ ",
"|_|",
" "
])
assert number == {:ok, "6"}
end
test "Regonizes 7" do
number =
OcrNumbers.convert([
" _ ",
" |",
" |",
" "
])
assert number == {:ok, "7"}
end
test "Recognizes 8" do
number =
OcrNumbers.convert([
" _ ",
"|_|",
"|_|",
" "
])
assert number == {:ok, "8"}
end
test "Recognizes 9" do
number =
OcrNumbers.convert([
" _ ",
"|_|",
" _|",
" "
])
assert number == {:ok, "9"}
end
test "Recognizes string of decimal numbers" do
number =
OcrNumbers.convert([
" _ _ _ _ _ _ _ _ ",
" | _| _||_||_ |_ ||_||_|| |",
" ||_ _| | _||_| ||_| _||_|",
" "
])
assert number == {:ok, "1234567890"}
end
test "Numbers separated by empty lines are recognized. Lines are joined by commas." do
number =
OcrNumbers.convert([
" _ _ ",
" | _| _|",
" ||_ _|",
" ",
" _ _ ",
"|_||_ |_ ",
" | _||_|",
" ",
" _ _ _ ",
" ||_||_|",
" ||_| _|",
" "
])
assert number == {:ok, "123,456,789"}
end
end
| 17.869767 | 90 | 0.367257 |
f7bddb80d8158c2189bd3d0603a49ec7dd5bd5c4 | 49 | ex | Elixir | lib/novel_reader/format/gravitytales.ex | jinyeow/novel_reader | 090c1076110f441d82d99f27f82958b79bf73d63 | [
"MIT"
] | null | null | null | lib/novel_reader/format/gravitytales.ex | jinyeow/novel_reader | 090c1076110f441d82d99f27f82958b79bf73d63 | [
"MIT"
] | null | null | null | lib/novel_reader/format/gravitytales.ex | jinyeow/novel_reader | 090c1076110f441d82d99f27f82958b79bf73d63 | [
"MIT"
] | null | null | null | defmodule NovelReader.Format.Gravitytales do
end
| 16.333333 | 44 | 0.877551 |
f7bdde1c6b59bc1b8ef1b6a1371a1150b97f0b81 | 945 | ex | Elixir | debian/postrm.ex | michaelw/python-ssh-ldap-pubkey-deb | 34d50226d9aa3c47b10247b82d5cbf75984eff5b | [
"MIT"
] | null | null | null | debian/postrm.ex | michaelw/python-ssh-ldap-pubkey-deb | 34d50226d9aa3c47b10247b82d5cbf75984eff5b | [
"MIT"
] | null | null | null | debian/postrm.ex | michaelw/python-ssh-ldap-pubkey-deb | 34d50226d9aa3c47b10247b82d5cbf75984eff5b | [
"MIT"
] | null | null | null | #!/bin/sh
# postrm script for python-ssh-ldap-pubkey
#
# see: dh_installdeb(1)
set -e
# summary of how this script can be called:
# * <postrm> `remove'
# * <postrm> `purge'
# * <old-postrm> `upgrade' <new-version>
# * <new-postrm> `failed-upgrade' <old-version>
# * <new-postrm> `abort-install'
# * <new-postrm> `abort-install' <old-version>
# * <new-postrm> `abort-upgrade' <old-version>
# * <disappearer's-postrm> `disappear' <overwriter>
# <overwriter-version>
# for details, see http://www.debian.org/doc/debian-policy/ or
# the debian-policy package
case "$1" in
purge|remove|upgrade|failed-upgrade|abort-install|abort-upgrade|disappear)
;;
*)
echo "postrm called with unknown argument \`$1'" >&2
exit 1
;;
esac
# dh_installdeb will replace this with shell code automatically
# generated by other debhelper scripts.
#DEBHELPER#
exit 0
| 24.868421 | 78 | 0.630688 |
f7be6fcca15cd912ac3fa8877548f32f48e84563 | 611 | ex | Elixir | apps/artemis/lib/artemis/contexts/user_recognition/create_user_recognition.ex | artemis-platform/artemis_teams | 9930c3d9528e37b76f0525390e32b66eed7eadde | [
"MIT"
] | 2 | 2020-04-23T02:29:18.000Z | 2020-07-07T13:13:17.000Z | apps/artemis/lib/artemis/contexts/user_recognition/create_user_recognition.ex | chrislaskey/artemis_teams | 9930c3d9528e37b76f0525390e32b66eed7eadde | [
"MIT"
] | 4 | 2020-04-26T20:35:36.000Z | 2020-11-10T22:13:19.000Z | apps/artemis/lib/artemis/contexts/user_recognition/create_user_recognition.ex | chrislaskey/artemis_teams | 9930c3d9528e37b76f0525390e32b66eed7eadde | [
"MIT"
] | null | null | null | defmodule Artemis.CreateUserRecognition do
use Artemis.Context
alias Artemis.Repo
alias Artemis.UserRecognition
def call!(params, user) do
case call(params, user) do
{:error, _} -> raise(Artemis.Context.Error, "Error creating user recognition")
{:ok, result} -> result
end
end
def call(params, user) do
with_transaction(fn ->
params
|> insert_record
|> Event.broadcast("user-recognition:created", params, user)
end)
end
defp insert_record(params) do
%UserRecognition{}
|> UserRecognition.changeset(params)
|> Repo.insert()
end
end
| 21.821429 | 84 | 0.671031 |
f7be80fa2ca0996df0cdf7e826edafb58da83f72 | 35,852 | ex | Elixir | lib/chat_api/slack/helpers.ex | jalford14/papercups | fbe5e5ce4ff51702a0f898c39a9be47c33e9cb4a | [
"MIT"
] | null | null | null | lib/chat_api/slack/helpers.ex | jalford14/papercups | fbe5e5ce4ff51702a0f898c39a9be47c33e9cb4a | [
"MIT"
] | null | null | null | lib/chat_api/slack/helpers.ex | jalford14/papercups | fbe5e5ce4ff51702a0f898c39a9be47c33e9cb4a | [
"MIT"
] | null | null | null | defmodule ChatApi.Slack.Helpers do
@moduledoc """
Utility methods for interacting with Slack
"""
require Logger
alias ChatApi.{
Companies,
Conversations,
Customers,
Slack,
SlackAuthorizations,
SlackConversationThreads,
Users
}
alias ChatApi.Conversations.Conversation
alias ChatApi.Customers.Customer
alias ChatApi.Messages.Message
alias ChatApi.SlackAuthorizations.SlackAuthorization
alias ChatApi.SlackConversationThreads.SlackConversationThread
alias ChatApi.Users.User
@spec get_user_email(binary(), binary()) :: nil | binary()
def get_user_email(slack_user_id, access_token) do
case Slack.Client.retrieve_user_info(slack_user_id, access_token) do
{:ok, nil} ->
Logger.debug("Invalid Slack token - returning nil for user email")
nil
{:ok, response} ->
try do
Slack.Extractor.extract_slack_user_email!(response)
rescue
error ->
Logger.error("Unable to retrieve Slack user email: #{inspect(error)}")
nil
end
error ->
Logger.error("Unable to retrieve Slack user info: #{inspect(error)}")
nil
end
end
@spec get_slack_username(binary(), binary()) :: nil | binary()
def get_slack_username(slack_user_id, access_token) do
with {:ok, response} <- Slack.Client.retrieve_user_info(slack_user_id, access_token),
%{body: %{"ok" => true, "user" => %{"name" => username} = user}} <- response do
[
get_in(user, ["profile", "display_name"]),
get_in(user, ["profile", "real_name"]),
username
]
|> Enum.filter(fn value ->
case value do
nil -> false
"" -> false
value when not is_binary(value) -> false
_value -> true
end
end)
|> List.first()
else
error ->
Logger.error("Unable to retrieve Slack username: #{inspect(error)}")
nil
end
end
@spec find_or_create_customer_from_slack_event(SlackAuthorization.t(), map()) ::
{:ok, Customer.t()} | {:error, any()}
def find_or_create_customer_from_slack_event(authorization, %{
"channel" => slack_channel_id,
"user" => slack_user_id
})
when not is_nil(slack_user_id) and not is_nil(slack_channel_id) do
find_or_create_customer_from_slack_user_id(authorization, slack_user_id, slack_channel_id)
end
def find_or_create_customer_from_slack_event(authorization, %{"bot" => slack_bot_id})
when not is_nil(slack_bot_id) do
find_or_create_customer_from_slack_bot_id(authorization, slack_bot_id)
end
@spec find_or_create_customer_from_slack_bot_id(any(), binary()) ::
{:ok, Customer.t()} | {:error, any()}
def find_or_create_customer_from_slack_bot_id(authorization, slack_bot_id) do
with %{access_token: access_token, account_id: account_id} <- authorization,
{:ok, %{body: %{"ok" => true, "bot" => bot}}} <-
Slack.Client.retrieve_bot_info(slack_bot_id, access_token) do
attrs = customer_params_for_slack_bot(bot)
Customers.find_or_create_by_external_id(slack_bot_id, account_id, attrs)
else
# NB: This may occur in test mode, or when the Slack.Client is disabled
{:ok, error} ->
Logger.error("Error creating customer from Slack bot user: #{inspect(error)}")
error
error ->
Logger.error("Error creating customer from Slack bot user: #{inspect(error)}")
error
end
end
@spec find_or_create_customer_from_slack_user_id(any(), binary(), binary()) ::
{:ok, Customer.t()} | {:error, any()}
def find_or_create_customer_from_slack_user_id(authorization, slack_user_id, slack_channel_id) do
with %{access_token: access_token, account_id: account_id} <- authorization,
{:ok, %{body: %{"ok" => true, "user" => user}}} <-
Slack.Client.retrieve_user_info(slack_user_id, access_token),
%{"profile" => %{"email" => email}} <- user do
company_attrs =
case Companies.find_by_slack_channel(account_id, slack_channel_id) do
%{id: company_id} -> %{company_id: company_id}
_ -> %{}
end
attrs = customer_params_for_slack_user(user, company_attrs)
Customers.find_or_create_by_email(email, account_id, attrs)
else
# NB: This may occur in test mode, or when the Slack.Client is disabled
{:ok, error} ->
Logger.error("Error creating customer from Slack user: #{inspect(error)}")
error
error ->
Logger.error("Error creating customer from Slack user: #{inspect(error)}")
error
end
end
@spec create_or_update_customer_from_slack_event(SlackAuthorization.t(), map()) ::
{:ok, Customer.t()} | {:error, any()}
def create_or_update_customer_from_slack_event(authorization, %{
"channel" => slack_channel_id,
"user" => slack_user_id
})
when not is_nil(slack_user_id) and not is_nil(slack_channel_id) do
create_or_update_customer_from_slack_user_id(authorization, slack_user_id, slack_channel_id)
end
def create_or_update_customer_from_slack_event(authorization, %{"bot" => slack_bot_id})
when not is_nil(slack_bot_id) do
create_or_update_customer_from_slack_bot_id(authorization, slack_bot_id)
end
@spec create_or_update_customer_from_slack_bot_id(any(), binary()) ::
{:ok, Customer.t()} | {:error, any()}
def create_or_update_customer_from_slack_bot_id(authorization, slack_bot_id) do
with %{access_token: access_token, account_id: account_id} <- authorization,
{:ok, %{body: %{"ok" => true, "bot" => bot}}} <-
Slack.Client.retrieve_bot_info(slack_bot_id, access_token) do
create_or_update_customer_from_slack_bot(bot, account_id)
else
# NB: This may occur in test mode, or when the Slack.Client is disabled
{:ok, error} ->
Logger.error("Error creating customer from Slack bot user: #{inspect(error)}")
error
error ->
Logger.error("Error creating customer from Slack bot user: #{inspect(error)}")
error
end
end
# NB: this is basically the same as `find_or_create_customer_from_slack_user_id` above,
# but keeping both with duplicate code for now since we may get rid of one in the near future
@spec create_or_update_customer_from_slack_user_id(any(), binary(), binary()) ::
{:ok, Customer.t()} | {:error, any()}
def create_or_update_customer_from_slack_user_id(authorization, slack_user_id, slack_channel_id) do
with %{access_token: access_token, account_id: account_id} <- authorization,
{:ok, %{body: %{"ok" => true, "user" => user}}} <-
Slack.Client.retrieve_user_info(slack_user_id, access_token) do
case Companies.find_by_slack_channel(account_id, slack_channel_id) do
%{id: company_id} ->
create_or_update_customer_from_slack_user(user, account_id, %{company_id: company_id})
_ ->
create_or_update_customer_from_slack_user(user, account_id)
end
else
# NB: This may occur in test mode, or when the Slack.Client is disabled
{:ok, error} ->
Logger.error("Error creating customer from Slack user: #{inspect(error)}")
error
error ->
Logger.error("Error creating customer from Slack user: #{inspect(error)}")
error
end
end
@spec create_or_update_customer_from_slack_user_id(any(), binary()) ::
{:ok, Customer.t()} | {:error, any()}
def create_or_update_customer_from_slack_user_id(authorization, slack_user_id) do
with %{access_token: access_token, account_id: account_id} <- authorization,
{:ok, %{body: %{"ok" => true, "user" => user}}} <-
Slack.Client.retrieve_user_info(slack_user_id, access_token) do
create_or_update_customer_from_slack_user(user, account_id)
else
# NB: This may occur in test mode, or when the Slack.Client is disabled
{:ok, error} ->
Logger.error("Error creating customer from Slack user: #{inspect(error)}")
error
error ->
Logger.error("Error creating customer from Slack user: #{inspect(error)}")
error
end
end
@spec customer_params_for_slack_user(map(), map()) :: map()
def customer_params_for_slack_user(slack_user, attrs \\ %{})
def customer_params_for_slack_user(%{"profile" => profile} = slack_user, attrs) do
%{
name: Map.get(profile, "real_name"),
time_zone: Map.get(slack_user, "tz"),
profile_photo_url: Map.get(profile, "image_original")
}
|> Enum.reject(fn {_k, v} -> is_nil(v) end)
|> Map.new()
|> Map.merge(attrs)
end
def customer_params_for_slack_user(slack_user, _attrs) do
Logger.error("Unexpected Slack user: #{inspect(slack_user)}")
%{}
end
@spec create_or_update_customer_from_slack_user(map(), binary(), map()) ::
{:ok, Customer.t()} | {:error, any()}
def create_or_update_customer_from_slack_user(slack_user, account_id, attrs \\ %{})
def create_or_update_customer_from_slack_user(
%{"profile" => %{"email" => email}} = slack_user,
account_id,
attrs
) do
params = customer_params_for_slack_user(slack_user, attrs)
Customers.create_or_update_by_email(email, account_id, params)
end
def create_or_update_customer_from_slack_user(slack_user, _account_id, _attrs) do
{:error, "Invalid Slack user: #{inspect(slack_user)}"}
end
@spec customer_params_for_slack_bot(map()) :: map()
def customer_params_for_slack_bot(slack_bot) do
%{
name: Map.get(slack_bot, "name"),
profile_photo_url: get_in(slack_bot, ["icons", "image_72"])
}
|> Enum.reject(fn {_k, v} -> is_nil(v) end)
|> Map.new()
end
@spec create_or_update_customer_from_slack_bot(map(), binary()) ::
{:ok, Customer.t()} | {:error, any()}
def create_or_update_customer_from_slack_bot(slack_bot, account_id)
def create_or_update_customer_from_slack_bot(
%{"id" => slack_bot_id} = slack_bot,
account_id
) do
params = customer_params_for_slack_bot(slack_bot)
Customers.create_or_update_by_external_id(slack_bot_id, account_id, params)
end
def create_or_update_customer_from_slack_bot(slack_bot, _account_id) do
{:error, "Invalid Slack bot: #{inspect(slack_bot)}"}
end
@spec find_matching_customer(SlackAuthorization.t() | nil, binary()) :: Customer.t() | nil
def find_matching_customer(
%SlackAuthorization{access_token: access_token, account_id: account_id},
slack_user_id
) do
slack_user_id
|> get_user_email(access_token)
|> Customers.find_by_email(account_id)
end
def find_matching_customer(_authorization, _slack_user_id), do: nil
@spec find_matching_user(SlackAuthorization.t(), binary()) :: User.t() | nil
def find_matching_user(
%SlackAuthorization{access_token: access_token, account_id: account_id},
slack_user_id
) do
slack_user_id
|> get_user_email(access_token)
|> Users.find_user_by_email(account_id)
end
def find_matching_user(_authorization, _slack_user_id), do: nil
@spec find_matching_bot_customer(any(), binary()) :: Customer.t() | nil
def find_matching_bot_customer(%SlackAuthorization{account_id: account_id}, slack_bot_id) do
Customers.find_by_external_id(slack_bot_id, account_id)
end
def find_matching_bot_customer(_authorization, _slack_bot_id), do: nil
@spec get_admin_sender_id(any(), binary(), binary()) :: binary()
def get_admin_sender_id(authorization, slack_user_id, fallback) do
case find_matching_user(authorization, slack_user_id) do
%{id: id} -> id
_ -> fallback
end
end
@doc """
Checks for a matching `User` for the Slack message event if the accumulator is `nil`.
If a matching `User` or `Customer` has already been found, just return it.
"""
@spec maybe_find_user(User.t() | Customer.t() | nil, SlackAuthorization.t(), map()) ::
User.t() | Customer.t() | nil
def maybe_find_user(nil, authorization, %{"user" => slack_user_id}) do
find_matching_user(authorization, slack_user_id)
end
def maybe_find_user(%User{} = user, _, _), do: user
def maybe_find_user(%Customer{} = customer, _, _), do: customer
def maybe_find_user(nil, _, _), do: nil
@doc """
Checks for a matching `Customer` for the Slack message event if the accumulator is `nil`.
If a matching `User` or `Customer` has already been found, just return it.
"""
@spec maybe_find_customer(User.t() | Customer.t() | nil, SlackAuthorization.t(), map()) ::
User.t() | Customer.t() | nil
def maybe_find_customer(nil, authorization, %{"bot_id" => slack_bot_id}) do
find_matching_bot_customer(authorization, slack_bot_id)
end
def maybe_find_customer(nil, authorization, %{"user" => slack_user_id}) do
find_matching_customer(authorization, slack_user_id)
end
def maybe_find_customer(%Customer{} = customer, _, _), do: customer
def maybe_find_customer(%User{} = user, _, _), do: user
def maybe_find_customer(nil, _, _), do: nil
@doc """
Fetches the matching `User` or `Customer` for the Slack message event.
"""
@spec get_sender_info(SlackAuthorization.t(), map()) :: User.t() | Customer.t() | nil
def get_sender_info(authorization, slack_message) do
nil
|> maybe_find_user(authorization, slack_message)
|> maybe_find_customer(authorization, slack_message)
|> case do
%User{} = user -> user
%Customer{} = customer -> customer
_ -> nil
end
end
@doc """
Updates the params with a "user_id" field if a "customer_id" has not already been set.
"""
@spec maybe_set_user_id(map(), SlackAuthorization.t(), map()) :: map()
def maybe_set_user_id(%{"customer_id" => customer_id} = params, _authorization, _event)
when not is_nil(customer_id),
do: params
def maybe_set_user_id(params, authorization, %{"user" => slack_user_id}) do
case find_matching_user(authorization, slack_user_id) do
%User{id: user_id} ->
Map.merge(params, %{"user_id" => user_id})
_ ->
params
end
end
def maybe_set_user_id(params, _authorization, _event), do: params
@doc """
Updates the params with a "customer_id" field if a "user_id" has not already been set.
"""
@spec maybe_set_customer_id(map(), SlackAuthorization.t(), map()) :: map()
def maybe_set_customer_id(%{"user_id" => user_id} = params, _authorization, _event)
when not is_nil(user_id),
do: params
def maybe_set_customer_id(params, authorization, event) do
case create_or_update_customer_from_slack_event(authorization, event) do
{:ok, %Customer{id: customer_id}} ->
Map.merge(params, %{"customer_id" => customer_id})
_ ->
params
end
end
@spec format_sender_id_v2!(SlackAuthorization.t(), map()) :: map()
def format_sender_id_v2!(authorization, event) do
%{}
|> maybe_set_user_id(authorization, event)
|> maybe_set_customer_id(authorization, event)
|> case do
params when map_size(params) == 1 ->
params
_invalid ->
raise "Unable to find matching user or customer ID for Slack event #{inspect(event)} on account authorization #{
inspect(authorization)
}"
end
end
@spec format_sender_id!(any(), binary(), binary()) :: map()
def format_sender_id!(authorization, slack_user_id, slack_channel_id) do
# TODO: what's the best way to handle these nested `case` statements?
# TODO: handle updating the customer's company_id if it's not set yet?
# TODO: should we check if the slack_user_id is a workspace admin, or something like that?
case find_matching_user(authorization, slack_user_id) do
%{id: user_id} ->
%{"user_id" => user_id}
_ ->
case find_matching_customer(authorization, slack_user_id) do
%{id: customer_id} ->
%{"customer_id" => customer_id}
_ ->
case create_or_update_customer_from_slack_user_id(
authorization,
slack_user_id,
slack_channel_id
) do
{:ok, customer} ->
%{"customer_id" => customer.id}
_ ->
raise "Unable to find matching user or customer ID for Slack user #{
inspect(slack_user_id)
} on account authorization #{inspect(authorization)}"
end
end
end
end
@spec is_primary_channel?(any(), binary()) :: boolean()
def is_primary_channel?(authorization, slack_channel_id) do
case authorization do
%{channel: channel, channel_id: channel_id} ->
channel == slack_channel_id || channel_id == slack_channel_id
_ ->
false
end
end
@spec is_private_slack_channel?(binary()) :: boolean()
def is_private_slack_channel?("G" <> _rest), do: true
def is_private_slack_channel?("C" <> _rest), do: false
def is_private_slack_channel?(_), do: false
# TODO: not sure the most idiomatic way to handle this, but basically this
# just formats how we show the name/email of the customer if they exist
@spec identify_customer(Customer.t()) :: binary()
def identify_customer(%Customer{email: email, name: name}) do
case [name, email] do
[nil, nil] -> "Anonymous User"
[x, nil] -> x
[nil, y] -> y
[x, y] -> "#{x} (#{y})"
end
end
@spec create_new_slack_conversation_thread(binary(), map()) ::
{:ok, SlackConversationThread.t()} | {:error, Ecto.Changeset.t()}
def create_new_slack_conversation_thread(conversation_id, response) do
with conversation <- Conversations.get_conversation_with!(conversation_id, account: :users),
primary_user_id <- get_conversation_primary_user_id(conversation) do
# TODO: This is just a temporary workaround to handle having a user_id
# in the message when an agent responds on Slack. At the moment, if anyone
# responds to a thread on Slack, we just assume it's the assignee.
assign_and_broadcast_conversation_updated(conversation, primary_user_id)
response
|> Slack.Extractor.extract_slack_conversation_thread_info!()
|> Map.merge(%{
conversation_id: conversation_id,
account_id: conversation.account_id
})
|> SlackConversationThreads.create_slack_conversation_thread()
end
end
@spec assign_and_broadcast_conversation_updated(Conversation.t(), binary()) :: Conversation.t()
def assign_and_broadcast_conversation_updated(conversation, primary_user_id) do
# TODO: how should we handle errors here?
{:ok, conversation} =
Conversations.update_conversation(conversation, %{assignee_id: primary_user_id})
conversation
|> Conversations.Notification.broadcast_conversation_update_to_admin!()
|> Conversations.Notification.notify(:webhooks, event: "conversation:updated")
end
@spec get_conversation_primary_user_id(Conversation.t()) :: binary()
def get_conversation_primary_user_id(conversation) do
# TODO: do a round robin here instead of just getting the first user every time?
conversation
|> Map.get(:account)
|> Map.get(:users)
|> fetch_valid_user()
end
@spec fetch_valid_user(list()) :: binary()
def fetch_valid_user([]),
do: raise("No users associated with the conversation's account")
def fetch_valid_user(users) do
users
|> Enum.reject(& &1.disabled_at)
|> Enum.sort_by(& &1.inserted_at)
|> List.first()
|> Map.get(:id)
end
@spec get_message_type(Message.t()) :: atom()
def get_message_type(%Message{customer_id: nil}), do: :agent
def get_message_type(%Message{user_id: nil}), do: :customer
def get_message_type(_message), do: :unknown
@spec is_bot_message?(map()) :: boolean()
def is_bot_message?(%{"bot_id" => bot_id}) when not is_nil(bot_id), do: true
def is_bot_message?(_), do: false
@spec is_agent_message?(SlackAuthorization.t(), map()) :: boolean()
def is_agent_message?(authorization, %{"user" => slack_user_id})
when not is_nil(slack_user_id) do
case find_matching_user(authorization, slack_user_id) do
%User{} -> true
_ -> false
end
end
def is_agent_message?(_authorization, _), do: false
@spec is_customer_message?(SlackAuthorization.t(), map()) :: boolean()
def is_customer_message?(authorization, slack_message) do
!is_bot_message?(slack_message) && !is_agent_message?(authorization, slack_message)
end
@spec sanitize_slack_message(binary(), SlackAuthorization.t()) :: binary()
def sanitize_slack_message(text, %SlackAuthorization{
access_token: access_token
}) do
text
|> sanitize_slack_user_ids(access_token)
|> sanitize_slack_links()
|> sanitize_slack_mailto_links()
|> sanitize_private_note()
end
@spec get_slack_message_metadata(binary()) :: map() | nil
def get_slack_message_metadata(text) do
%{
mentions: Slack.Helpers.find_slack_user_mentions(text),
links: Slack.Helpers.find_slack_links(text),
mailto_links: Slack.Helpers.find_slack_mailto_links(text)
}
|> Enum.filter(fn {_key, value} ->
case value do
nil -> false
[] -> false
"" -> false
_ -> true
end
end)
|> case do
[] -> nil
list -> Map.new(list)
end
end
@slack_user_id_regex ~r/<@U(.*?)>/
@slack_link_regex ~r/<http(.*?)>/
@slack_mailto_regex ~r/<mailto(.*?)>/
@spec find_slack_user_mentions(binary()) :: [binary()]
def find_slack_user_mentions(text) do
@slack_user_id_regex
|> Regex.scan(text)
|> Enum.map(fn [match, _id] -> match end)
end
@spec sanitize_slack_user_ids(binary(), binary()) :: binary()
def sanitize_slack_user_ids(text, access_token) do
case Regex.scan(@slack_user_id_regex, text) do
[] ->
text
results ->
Enum.reduce(results, text, fn [match, id], acc ->
# TODO: figure out best way to handle unrecognized user IDs
slack_user_id = "U#{id}"
case get_slack_username(slack_user_id, access_token) do
nil -> acc
username -> String.replace(acc, match, "@#{username}")
end
end)
end
end
@spec find_slack_links(binary()) :: [binary()]
def find_slack_links(text) do
@slack_link_regex
|> Regex.scan(text)
|> Enum.map(fn [match, _] -> match end)
end
@spec sanitize_slack_links(binary()) :: binary()
def sanitize_slack_links(text) do
case Regex.scan(@slack_link_regex, text) do
[] ->
text
results ->
Enum.reduce(results, text, fn [match, _], acc ->
markdown = slack_link_to_markdown(match)
String.replace(acc, match, markdown)
end)
end
end
@spec find_slack_mailto_links(binary()) :: [binary()]
def find_slack_mailto_links(text) do
@slack_mailto_regex
|> Regex.scan(text)
|> Enum.map(fn [match, _] -> match end)
end
@spec sanitize_slack_mailto_links(binary()) :: binary()
def sanitize_slack_mailto_links(text) do
case Regex.scan(@slack_mailto_regex, text) do
[] ->
text
results ->
Enum.reduce(results, text, fn [match, _], acc ->
markdown = slack_link_to_markdown(match)
String.replace(acc, match, markdown)
end)
end
end
@private_note_prefix_v1 ~S(\\)
@private_note_prefix_v2 ~S(;;)
@private_note_prefix_regex_v1 ~r/^\\\\/
@private_note_prefix_regex_v2 ~r/^;;/
@spec sanitize_private_note(binary()) :: binary()
def sanitize_private_note(text) do
text
|> String.replace(@private_note_prefix_regex_v1, "")
|> String.replace(@private_note_prefix_regex_v2, "")
|> String.trim()
end
@spec parse_message_type_params(binary()) :: map()
def parse_message_type_params(text) do
case text do
@private_note_prefix_v1 <> _note -> %{"private" => true, "type" => "note"}
@private_note_prefix_v2 <> _note -> %{"private" => true, "type" => "note"}
_ -> %{}
end
end
@spec slack_link_to_markdown(binary()) :: binary()
def slack_link_to_markdown(text) do
text
|> String.replace(["<", ">"], "")
|> String.split("|")
|> case do
[link] -> "[#{link}](#{link})"
[link, display] -> "[#{display}](#{link})"
_ -> text
end
end
@spec slack_ts_to_utc(binary() | nil) :: DateTime.t()
def slack_ts_to_utc(nil), do: DateTime.utc_now()
def slack_ts_to_utc(ts) do
with {unix, _} <- Float.parse(ts),
microseconds <- round(unix * 1_000_000),
{:ok, datetime} <- DateTime.from_unix(microseconds, :microsecond) do
datetime
else
_ -> DateTime.utc_now()
end
end
#####################
# Formatters
#####################
@spec get_dashboard_conversation_url(binary()) :: binary()
def get_dashboard_conversation_url(conversation_id) do
url = System.get_env("BACKEND_URL") || ""
base =
if Application.get_env(:chat_api, :environment) == :dev do
"http://localhost:3000"
else
"https://" <> url
end
"#{base}/conversations/all?cid=#{conversation_id}"
end
@spec format_message_body(Message.t()) :: binary()
def format_message_body(%Message{body: nil}), do: ""
def format_message_body(%Message{private: true, type: "note", body: nil}), do: "\\\\ _Note_"
def format_message_body(%Message{private: true, type: "note", body: body}), do: "\\\\ _#{body}_"
# TODO: handle messages that are too long better (rather than just slicing them)
def format_message_body(%Message{body: body}) do
case String.length(body) do
n when n > 2500 -> String.slice(body, 0..2500) <> "..."
_ -> body
end
end
@spec prepend_sender_prefix(binary(), Message.t()) :: binary()
def prepend_sender_prefix(text, %Message{} = message) do
case message do
%Message{user: %User{} = user} ->
"*:female-technologist: #{Slack.Notification.format_user_name(user)}*: #{text}"
%Message{customer: %Customer{} = customer} ->
"*:wave: #{identify_customer(customer)}*: #{text}"
%Message{customer_id: nil, user_id: user_id} when not is_nil(user_id) ->
"*:female-technologist: Agent*: #{text}"
_ ->
Logger.error("Unrecognized message format: #{inspect(message)}")
text
end
end
@spec prepend_sender_prefix(binary(), Message.t(), Conversation.t()) :: binary()
def prepend_sender_prefix(text, %Message{} = message, %Conversation{} = conversation) do
case message do
%Message{user: %User{} = user} ->
"*:female-technologist: #{Slack.Notification.format_user_name(user)}*: #{text}"
%Message{customer: %Customer{} = customer} ->
"*:wave: #{identify_customer(customer)}*: #{text}"
%Message{customer_id: nil, user_id: user_id} when not is_nil(user_id) ->
"*:female-technologist: Agent*: #{text}"
%Message{customer_id: customer_id, user_id: nil} when not is_nil(customer_id) ->
"*:wave: #{identify_customer(conversation.customer)}*: #{text}"
_ ->
Logger.error("Unrecognized message format: #{inspect(message)}")
text
end
end
@spec append_attachments_text(binary() | nil, Message.t()) :: binary()
def append_attachments_text(text, %Message{attachments: [_ | _] = attachments}) do
attachments_text =
attachments
|> Stream.map(fn file -> "> <#{file.file_url}|#{file.filename}>" end)
|> Enum.join("\n")
text <> "\n\n" <> attachments_text
end
def append_attachments_text(text, _message), do: text
@spec get_message_text(map()) :: binary()
def get_message_text(%{
conversation: %Conversation{customer: %Customer{}} = conversation,
message: %Message{} = message,
authorization: _authorization,
thread: nil
}) do
dashboard_link = "<#{get_dashboard_conversation_url(conversation.id)}|dashboard>"
formatted_text =
message
|> format_message_body()
|> prepend_sender_prefix(message, conversation)
|> append_attachments_text(message)
[
formatted_text,
"Reply to this thread to start chatting, or view in the #{dashboard_link} :rocket:",
"(Start a message with `;;` or `\\\\` to send an <https://github.com/papercups-io/papercups/pull/562|internal note>.)"
]
|> Enum.reject(&is_nil/1)
|> Enum.join("\n\n")
end
@slack_chat_write_customize_scope "chat:write.customize"
def get_message_text(%{
conversation: %Conversation{} = conversation,
message: %Message{} = message,
authorization: %SlackAuthorization{} = authorization,
thread: %SlackConversationThread{}
}) do
if SlackAuthorizations.has_authorization_scope?(
authorization,
@slack_chat_write_customize_scope
) do
message
|> format_message_body()
|> append_attachments_text(message)
else
message
|> format_message_body()
|> prepend_sender_prefix(message, conversation)
|> append_attachments_text(message)
end
end
@spec get_message_payload(binary(), map()) :: map()
def get_message_payload(text, %{
channel: channel,
conversation: conversation,
customer: %Customer{
name: name,
email: email,
current_url: current_url,
browser: browser,
os: os,
time_zone: time_zone
},
thread: nil
}) do
%{
"channel" => channel,
"unfurl_links" => false,
"blocks" => [
%{
"type" => "section",
"text" => %{
"type" => "mrkdwn",
"text" => text
}
},
%{
"type" => "section",
"fields" => [
%{
"type" => "mrkdwn",
"text" => "*Name:*\n#{name || "Anonymous User"}"
},
%{
"type" => "mrkdwn",
"text" => "*Email:*\n#{email || "N/A"}"
},
%{
"type" => "mrkdwn",
"text" => "*URL:*\n#{current_url || "N/A"}"
},
%{
"type" => "mrkdwn",
"text" => "*Browser:*\n#{browser || "N/A"}"
},
%{
"type" => "mrkdwn",
"text" => "*OS:*\n#{os || "N/A"}"
},
%{
"type" => "mrkdwn",
"text" => "*Timezone:*\n#{time_zone || "N/A"}"
},
%{
"type" => "mrkdwn",
"text" => "*Status:*\n#{get_slack_conversation_status(conversation)}"
}
]
},
%{
"type" => "divider"
},
%{
"type" => "actions",
"elements" => [
%{
"type" => "button",
"text" => %{
"type" => "plain_text",
"text" => "Mark as resolved"
},
"value" => conversation.id,
"action_id" => "close_conversation",
"style" => "primary"
}
]
}
]
}
end
def get_message_payload(text, %{
channel: channel,
customer: _customer,
message: %Message{user: %User{} = user} = message,
thread: %SlackConversationThread{slack_thread_ts: slack_thread_ts}
}) do
%{
"channel" => channel,
"text" => text,
"thread_ts" => slack_thread_ts,
# TODO: figure out where these methods should live
"username" => Slack.Notification.format_user_name(user),
"icon_url" => Slack.Notification.slack_icon_url(user),
"reply_broadcast" => reply_broadcast_enabled?(message)
}
end
def get_message_payload(text, %{
channel: channel,
customer: _customer,
message: %Message{customer: %Customer{} = customer} = message,
thread: %SlackConversationThread{slack_thread_ts: slack_thread_ts}
}) do
%{
"channel" => channel,
"text" => text,
"thread_ts" => slack_thread_ts,
"username" => identify_customer(customer),
"icon_emoji" => ":wave:",
"reply_broadcast" => reply_broadcast_enabled?(message)
}
end
def get_message_payload(text, params) do
raise "Unrecognized params for Slack payload: #{text} #{inspect(params)}"
end
@spec update_fields_with_conversation_status([map()], Conversation.t()) :: [map()]
def update_fields_with_conversation_status(fields, conversation) do
status = get_slack_conversation_status(conversation)
if Enum.any?(fields, &is_slack_conversation_status_field?/1) do
Enum.map(fields, fn field ->
if is_slack_conversation_status_field?(field) do
Map.merge(field, %{
"type" => "mrkdwn",
"text" => "*Status:*\n#{status}"
})
else
field
end
end)
else
fields ++
[
%{
"type" => "mrkdwn",
"text" => "*Status:*\n#{status}"
}
]
end
end
@spec update_action_elements_with_conversation_status(Conversation.t()) :: [map()]
def update_action_elements_with_conversation_status(%Conversation{id: id, status: status}) do
case status do
"open" ->
[
%{
"type" => "button",
"text" => %{
"type" => "plain_text",
"text" => "Mark as resolved"
},
"value" => id,
"action_id" => "close_conversation",
"style" => "primary"
}
]
"closed" ->
[
%{
"type" => "button",
"text" => %{
"type" => "plain_text",
"text" => "Reopen conversation"
},
"value" => id,
"action_id" => "open_conversation"
}
]
end
end
@spec get_slack_conversation_status(Conversation.t()) :: binary()
def get_slack_conversation_status(conversation) do
case conversation do
%{status: "closed"} ->
":white_check_mark: Closed"
%{closed_at: closed_at} when not is_nil(closed_at) ->
":white_check_mark: Closed"
%{status: "open", first_replied_at: nil} ->
":wave: Unhandled"
%{status: "open", first_replied_at: first_replied_at} when not is_nil(first_replied_at) ->
":speech_balloon: In progress"
end
end
@spec is_slack_conversation_status_field?(map()) :: boolean()
def is_slack_conversation_status_field?(%{"text" => text} = _field) do
text =~ "*Status:*" || text =~ "*Conversation status:*" || text =~ "*Conversation Status:*"
end
def is_slack_conversation_status_field?(_field), do: false
@spec send_internal_notification(binary()) :: any()
def send_internal_notification(message) do
Logger.info(message)
# Putting in an async Task for now, since we don't care if this succeeds
# or fails (and we also don't want it to block anything)
Task.start(fn -> Slack.Notification.log(message) end)
end
@spec reply_broadcast_enabled?(Message.t()) :: boolean()
# We only want to enable this for messages from customers
defp reply_broadcast_enabled?(%Message{
account_id: account_id,
customer: %Customer{} = _customer
}) do
# TODO: figure out a better way to enable feature flags for certain accounts,
# or just make this configurable in account settings (or something like that)
case System.get_env("PAPERCUPS_FEATURE_FLAGGED_ACCOUNTS") do
ids when is_binary(ids) -> ids |> String.split(" ") |> Enum.member?(account_id)
_ -> false
end
end
defp reply_broadcast_enabled?(_message), do: false
end
| 33.196296 | 124 | 0.633688 |
f7bea02c8c47fe417efd6c8c41c0b69893be11ad | 4,716 | exs | Elixir | test/grapevine/featured/implementation_test.exs | sb8244/grapevine | effaaa01294d30114090c20f9cc40b8665d834f2 | [
"MIT"
] | null | null | null | test/grapevine/featured/implementation_test.exs | sb8244/grapevine | effaaa01294d30114090c20f9cc40b8665d834f2 | [
"MIT"
] | null | null | null | test/grapevine/featured/implementation_test.exs | sb8244/grapevine | effaaa01294d30114090c20f9cc40b8665d834f2 | [
"MIT"
] | null | null | null | defmodule Grapevine.Featured.ImplementationTest do
use Grapevine.DataCase
alias Grapevine.Featured.Implementation
alias GrapevineData.Games
alias GrapevineData.Statistics
describe "determining the amount of milliseconds to delay" do
test "for the next cycle" do
now =
Timex.now()
|> Timex.set([hour: 20, minute: 0, second: 0])
|> DateTime.truncate(:second)
delay = Implementation.calculate_next_cycle_delay(now)
assert delay == 36000000
end
test "process is rebooted same day but before cycle runs" do
now =
Timex.now()
|> Timex.set([hour: 4, minute: 0, second: 0])
|> DateTime.truncate(:second)
delay = Implementation.calculate_next_cycle_delay(now)
assert delay == 3600 * 2 * 1000
end
end
describe "selecting games to feature" do
test "updates the sort order for all games" do
user = create_user()
game1 = create_game(user, %{name: "Game 1", short_name: "Game1"})
game2 = create_game(user, %{name: "Game 2", short_name: "Game2"})
game3 = create_game(user, %{name: "Game 3", short_name: "Game3"})
Games.seen_on_telnet(game1)
{:ok, _stats} = Statistics.record_mssp_players(game1, 2, Timex.now())
Games.seen_on_socket(game2)
Games.seen_on_telnet(game3)
Implementation.select_featured()
Enum.each([game1, game2, game3], fn game ->
{:ok, game} = Games.get(game.id)
assert game.featured_order
end)
end
test "selects from all three" do
user = create_user()
game1 = create_game(user, %{name: "Game 1", short_name: "Game1"})
game2 = create_game(user, %{name: "Game 2", short_name: "Game2"})
game3 = create_game(user, %{name: "Game 3", short_name: "Game3"})
Games.seen_on_telnet(game1)
{:ok, _stats} = Statistics.record_mssp_players(game1, 2, Timex.now())
Games.seen_on_telnet(game2)
Games.seen_on_socket(game3)
games = Implementation.featured_games()
game_ids =
games
|> Enum.map(& &1.id)
|> Enum.sort()
assert game_ids == [game1.id, game2.id, game3.id]
end
test "top games based on player count" do
user = create_user()
game1 = create_game(user, %{name: "Game 1", short_name: "Game1"})
game2 = create_game(user, %{name: "Game 2", short_name: "Game2"})
_game3 = create_game(user, %{name: "Game 3", short_name: "Game3"})
Games.seen_on_telnet(game1)
{:ok, _stats} = Statistics.record_mssp_players(game1, 2, Timex.now())
Games.seen_on_telnet(game2)
{:ok, _stats} = Statistics.record_mssp_players(game2, 3, Timex.now())
games = Implementation.top_games_player_count(select: 2)
game_ids =
games
|> Enum.map(& &1.id)
|> Enum.sort()
assert game_ids == [game1.id, game2.id]
end
test "random games connected to the chat network" do
user = create_user()
game1 = create_game(user, %{name: "Game 1", short_name: "Game1"})
game2 = create_game(user, %{name: "Game 2", short_name: "Game2"})
_game3 = create_game(user, %{name: "Game 3", short_name: "Game3"})
Games.seen_on_socket(game1)
Games.seen_on_socket(game2)
games = Implementation.random_games_using_grapevine(select: 2)
game_ids =
games
|> Enum.map(& &1.id)
|> Enum.sort()
assert game_ids == [game1.id, game2.id]
end
test "random games not already picked using client or chat" do
user = create_user()
game1 = create_game(user, %{name: "Game 1", short_name: "Game1"})
game2 = create_game(user, %{name: "Game 2", short_name: "Game2"})
_game3 = create_game(user, %{name: "Game 3", short_name: "Game3"})
Games.seen_on_socket(game1)
Games.seen_on_socket(game2)
games = Implementation.random_games_using_grapevine(select: 2, already_picked: [game1.id])
game_ids =
games
|> Enum.map(& &1.id)
|> Enum.sort()
assert game_ids == [game2.id]
end
test "random selection of games that have not been picked" do
user = create_user()
game1 = create_game(user, %{name: "Game 1", short_name: "Game1"})
game2 = create_game(user, %{name: "Game 2", short_name: "Game2"})
game3 = create_game(user, %{name: "Game 3", short_name: "Game3"})
Games.seen_on_telnet(game1)
Games.seen_on_telnet(game2)
Games.seen_on_telnet(game3)
games = Implementation.random_games(select: 2, already_picked: [game1.id])
game_ids =
games
|> Enum.map(& &1.id)
|> Enum.sort()
assert game_ids == [game2.id, game3.id]
end
end
end
| 30.823529 | 96 | 0.624894 |
f7beb83a800249317457ba6930e1bc485f2d4c4c | 3,452 | ex | Elixir | clients/testing/lib/google_api/testing/v1/model/test_specification.ex | MechimCook/elixir-google-api | 0240ede69ec77115076724d223f9d1f849ff1d6b | [
"Apache-2.0"
] | null | null | null | clients/testing/lib/google_api/testing/v1/model/test_specification.ex | MechimCook/elixir-google-api | 0240ede69ec77115076724d223f9d1f849ff1d6b | [
"Apache-2.0"
] | null | null | null | clients/testing/lib/google_api/testing/v1/model/test_specification.ex | MechimCook/elixir-google-api | 0240ede69ec77115076724d223f9d1f849ff1d6b | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Testing.V1.Model.TestSpecification do
@moduledoc """
A description of how to run the test.
## Attributes
- androidInstrumentationTest (AndroidInstrumentationTest): An Android instrumentation test. Defaults to: `null`.
- androidRoboTest (AndroidRoboTest): An Android robo test. Defaults to: `null`.
- androidTestLoop (AndroidTestLoop): An Android Application with a Test Loop. Defaults to: `null`.
- disablePerformanceMetrics (boolean()): Disables performance metrics recording; may reduce test latency. Defaults to: `null`.
- disableVideoRecording (boolean()): Disables video recording; may reduce test latency. Defaults to: `null`.
- iosTestSetup (IosTestSetup): Test setup requirements for iOS. Defaults to: `null`.
- iosXcTest (IosXcTest): An iOS XCTest, via an .xctestrun file. Defaults to: `null`.
- testSetup (TestSetup): Test setup requirements for Android e.g. files to install, bootstrap scripts. Defaults to: `null`.
- testTimeout (String.t): Max time a test execution is allowed to run before it is automatically cancelled. The default value is 5 min. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:androidInstrumentationTest =>
GoogleApi.Testing.V1.Model.AndroidInstrumentationTest.t(),
:androidRoboTest => GoogleApi.Testing.V1.Model.AndroidRoboTest.t(),
:androidTestLoop => GoogleApi.Testing.V1.Model.AndroidTestLoop.t(),
:disablePerformanceMetrics => any(),
:disableVideoRecording => any(),
:iosTestSetup => GoogleApi.Testing.V1.Model.IosTestSetup.t(),
:iosXcTest => GoogleApi.Testing.V1.Model.IosXcTest.t(),
:testSetup => GoogleApi.Testing.V1.Model.TestSetup.t(),
:testTimeout => any()
}
field(:androidInstrumentationTest, as: GoogleApi.Testing.V1.Model.AndroidInstrumentationTest)
field(:androidRoboTest, as: GoogleApi.Testing.V1.Model.AndroidRoboTest)
field(:androidTestLoop, as: GoogleApi.Testing.V1.Model.AndroidTestLoop)
field(:disablePerformanceMetrics)
field(:disableVideoRecording)
field(:iosTestSetup, as: GoogleApi.Testing.V1.Model.IosTestSetup)
field(:iosXcTest, as: GoogleApi.Testing.V1.Model.IosXcTest)
field(:testSetup, as: GoogleApi.Testing.V1.Model.TestSetup)
field(:testTimeout)
end
defimpl Poison.Decoder, for: GoogleApi.Testing.V1.Model.TestSpecification do
def decode(value, options) do
GoogleApi.Testing.V1.Model.TestSpecification.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Testing.V1.Model.TestSpecification do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 47.287671 | 158 | 0.744496 |
f7beeb841e95570f96f0216cee030b8154baea89 | 13,548 | exs | Elixir | test/ecto/adapters/postgres/sql_test.exs | yrashk/ecto | 1462d5ad4cbb7bf74c292ec405852bc196808daf | [
"Apache-2.0"
] | 1 | 2016-08-15T21:23:28.000Z | 2016-08-15T21:23:28.000Z | test/ecto/adapters/postgres/sql_test.exs | yrashk/ecto | 1462d5ad4cbb7bf74c292ec405852bc196808daf | [
"Apache-2.0"
] | null | null | null | test/ecto/adapters/postgres/sql_test.exs | yrashk/ecto | 1462d5ad4cbb7bf74c292ec405852bc196808daf | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Adapters.Postgres.SQLTest do
use ExUnit.Case, async: true
import Ecto.Query
alias Ecto.Adapters.Postgres.SQL
alias Ecto.Queryable
alias Ecto.Query.Planner
defmodule Model do
use Ecto.Model
schema "model" do
field :x, :integer
field :y, :integer
has_many :comments, Ecto.Adapters.Postgres.SQLTest.Model2,
references: :x,
foreign_key: :z
has_one :permalink, Ecto.Adapters.Postgres.SQLTest.Model3,
references: :y,
foreign_key: :id
end
end
defmodule Model2 do
use Ecto.Model
schema "model2" do
belongs_to :post, Ecto.Adapters.Postgres.SQLTest.Model,
references: :x,
foreign_key: :z
end
end
defmodule Model3 do
use Ecto.Model
schema "model3" do
field :list1, {:array, :string}
field :list2, {:array, :integer}
field :binary, :binary
end
end
defp normalize(query) do
{query, _params} = Planner.prepare(query, %{})
Planner.normalize(query, %{}, [])
end
test "from" do
query = Model |> select([r], r.x) |> normalize
assert SQL.all(query) == ~s{SELECT m0."x" FROM "model" AS m0}
end
test "from without model" do
query = "posts" |> select([r], r.x) |> normalize
assert SQL.all(query) == ~s{SELECT p0."x" FROM "posts" AS p0}
end
test "select" do
query = Model |> select([r], {r.x, r.y}) |> normalize
assert SQL.all(query) == ~s{SELECT m0."x", m0."y" FROM "model" AS m0}
query = Model |> select([r], [r.x, r.y]) |> normalize
assert SQL.all(query) == ~s{SELECT m0."x", m0."y" FROM "model" AS m0}
end
test "distinct" do
query = Model |> distinct([r], r.x) |> select([r], {r.x, r.y}) |> normalize
assert SQL.all(query) == ~s{SELECT DISTINCT ON (m0."x") m0."x", m0."y" FROM "model" AS m0}
query = Model |> distinct([r], 2) |> select([r], r.x) |> normalize
assert SQL.all(query) == ~s{SELECT DISTINCT ON (2) m0."x" FROM "model" AS m0}
query = Model |> distinct([r], [r.x, r.y]) |> select([r], {r.x, r.y}) |> normalize
assert SQL.all(query) == ~s{SELECT DISTINCT ON (m0."x", m0."y") m0."x", m0."y" FROM "model" AS m0}
end
test "where" do
query = Model |> where([r], r.x == 42) |> where([r], r.y != 43) |> select([r], r.x) |> normalize
assert SQL.all(query) == ~s{SELECT m0."x" FROM "model" AS m0 WHERE (m0."x" = 42) AND (m0."y" != 43)}
end
test "order by" do
query = Model |> order_by([r], r.x) |> select([r], r.x) |> normalize
assert SQL.all(query) == ~s{SELECT m0."x" FROM "model" AS m0 ORDER BY m0."x"}
query = Model |> order_by([r], [r.x, r.y]) |> select([r], r.x) |> normalize
assert SQL.all(query) == ~s{SELECT m0."x" FROM "model" AS m0 ORDER BY m0."x", m0."y"}
query = Model |> order_by([r], [asc: r.x, desc: r.y]) |> select([r], r.x) |> normalize
assert SQL.all(query) == ~s{SELECT m0."x" FROM "model" AS m0 ORDER BY m0."x", m0."y" DESC}
end
test "limit and offset" do
query = Model |> limit([r], 3) |> select([], 0) |> normalize
assert SQL.all(query) == ~s{SELECT 0 FROM "model" AS m0 LIMIT 3}
query = Model |> offset([r], 5) |> select([], 0) |> normalize
assert SQL.all(query) == ~s{SELECT 0 FROM "model" AS m0 OFFSET 5}
query = Model |> offset([r], 5) |> limit([r], 3) |> select([], 0) |> normalize
assert SQL.all(query) == ~s{SELECT 0 FROM "model" AS m0 LIMIT 3 OFFSET 5}
end
test "lock" do
query = Model |> lock(true) |> select([], 0) |> normalize
assert SQL.all(query) == ~s{SELECT 0 FROM "model" AS m0 FOR UPDATE}
query = Model |> lock("FOR SHARE NOWAIT") |> select([], 0) |> normalize
assert SQL.all(query) == ~s{SELECT 0 FROM "model" AS m0 FOR SHARE NOWAIT}
end
test "string escape" do
query = Model |> select([], "'\\ ") |> normalize
assert SQL.all(query) == ~s{SELECT '''\\ ' FROM "model" AS m0}
query = Model |> select([], "'") |> normalize
assert SQL.all(query) == ~s{SELECT '''' FROM "model" AS m0}
end
test "binary ops" do
query = Model |> select([r], r.x == 2) |> normalize
assert SQL.all(query) == ~s{SELECT m0."x" = 2 FROM "model" AS m0}
query = Model |> select([r], r.x != 2) |> normalize
assert SQL.all(query) == ~s{SELECT m0."x" != 2 FROM "model" AS m0}
query = Model |> select([r], r.x <= 2) |> normalize
assert SQL.all(query) == ~s{SELECT m0."x" <= 2 FROM "model" AS m0}
query = Model |> select([r], r.x >= 2) |> normalize
assert SQL.all(query) == ~s{SELECT m0."x" >= 2 FROM "model" AS m0}
query = Model |> select([r], r.x < 2) |> normalize
assert SQL.all(query) == ~s{SELECT m0."x" < 2 FROM "model" AS m0}
query = Model |> select([r], r.x > 2) |> normalize
assert SQL.all(query) == ~s{SELECT m0."x" > 2 FROM "model" AS m0}
end
test "is_nil" do
query = Model |> select([r], is_nil(r.x)) |> normalize
assert SQL.all(query) == ~s{SELECT m0."x" IS NULL FROM "model" AS m0}
query = Model |> select([r], not is_nil(r.x)) |> normalize
assert SQL.all(query) == ~s{SELECT NOT (m0."x" IS NULL) FROM "model" AS m0}
end
test "fragments" do
query = Model |> select([r], fragment("downcase(?)", r.x)) |> normalize
assert SQL.all(query) == ~s{SELECT downcase(m0."x") FROM "model" AS m0}
value = 13
query = Model |> select([r], fragment("downcase(?, ?)", r.x, ^value)) |> normalize
assert SQL.all(query) == ~s{SELECT downcase(m0."x", $1) FROM "model" AS m0}
end
test "literals" do
query = Model |> select([], nil) |> normalize
assert SQL.all(query) == ~s{SELECT NULL FROM "model" AS m0}
query = Model |> select([], true) |> normalize
assert SQL.all(query) == ~s{SELECT TRUE FROM "model" AS m0}
query = Model |> select([], false) |> normalize
assert SQL.all(query) == ~s{SELECT FALSE FROM "model" AS m0}
query = Model |> select([], "abc") |> normalize
assert SQL.all(query) == ~s{SELECT 'abc' FROM "model" AS m0}
query = Model |> select([], <<0, ?a,?b,?c>>) |> normalize
assert SQL.all(query) == ~s{SELECT '\\x00616263' FROM "model" AS m0}
query = Model |> select([], uuid(<<0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15>>)) |> normalize
assert SQL.all(query) == ~s{SELECT '000102030405060708090A0B0C0D0E0F' FROM "model" AS m0}
query = Model |> select([], uuid("\0\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0E\x0F")) |> normalize
assert SQL.all(query) == ~s{SELECT '000102030405060708090A0B0C0D0E0F' FROM "model" AS m0}
query = Model |> select([], 123) |> normalize
assert SQL.all(query) == ~s{SELECT 123 FROM "model" AS m0}
query = Model |> select([], 123.0) |> normalize
assert SQL.all(query) == ~s{SELECT 123.0::float FROM "model" AS m0}
end
test "nested expressions" do
z = 123
query = from(r in Model, []) |> select([r], r.x > 0 and (r.y > ^(-z)) or true) |> normalize
assert SQL.all(query) == ~s{SELECT ((m0."x" > 0) AND (m0."y" > $1)) OR TRUE FROM "model" AS m0}
end
test "in expression" do
query = Model |> select([e], 1 in []) |> normalize
assert SQL.all(query) == ~s{SELECT 1 = ANY (ARRAY[]) FROM "model" AS m0}
query = Model |> select([e], 1 in [1,e.x,3]) |> normalize
assert SQL.all(query) == ~s{SELECT 1 = ANY (ARRAY[1, m0."x", 3]) FROM "model" AS m0}
end
test "having" do
query = Model |> having([p], p.x == p.x) |> select([], 0) |> normalize
assert SQL.all(query) == ~s{SELECT 0 FROM "model" AS m0 HAVING (m0."x" = m0."x")}
query = Model |> having([p], p.x == p.x) |> having([p], p.y == p.y) |> select([], 0) |> normalize
assert SQL.all(query) == ~s{SELECT 0 FROM "model" AS m0 HAVING (m0."x" = m0."x") AND (m0."y" = m0."y")}
end
test "group by" do
query = Model |> group_by([r], r.x) |> select([r], r.x) |> normalize
assert SQL.all(query) == ~s{SELECT m0."x" FROM "model" AS m0 GROUP BY m0."x"}
query = Model |> group_by([r], 2) |> select([r], r.x) |> normalize
assert SQL.all(query) == ~s{SELECT m0."x" FROM "model" AS m0 GROUP BY 2}
query = Model |> group_by([r], [r.x, r.y]) |> select([r], r.x) |> normalize
assert SQL.all(query) == ~s{SELECT m0."x" FROM "model" AS m0 GROUP BY m0."x", m0."y"}
end
test "sigils" do
query = Model |> select([], ~s"abc" in ~w(abc def)) |> normalize
assert SQL.all(query) == ~s{SELECT 'abc' = ANY (ARRAY['abc', 'def']) FROM "model" AS m0}
end
test "interpolated values" do
query = Model
|> select([], ^0)
|> join(:inner, [], Model2, ^true)
|> join(:inner, [], Model2, ^false)
|> where([], ^true)
|> where([], ^false)
|> group_by([], ^1)
|> group_by([], ^2)
|> having([], ^true)
|> having([], ^false)
|> order_by([], ^3)
|> order_by([], ^4)
|> limit([], ^5)
|> offset([], ^6)
|> normalize
result =
"SELECT $1 FROM \"model\" AS m0 INNER JOIN \"model2\" AS m1 ON $2 " <>
"INNER JOIN \"model2\" AS m2 ON $3 WHERE ($4) AND ($5) " <>
"GROUP BY $6, $7 HAVING ($8) AND ($9) " <>
"ORDER BY $10, $11 LIMIT $12 OFFSET $13"
assert SQL.all(query) == String.rstrip(result)
end
## *_all
test "update all" do
query = Model |> Queryable.to_query |> normalize
assert SQL.update_all(query, [x: 0]) ==
~s{UPDATE "model" AS m0 SET "x" = 0}
query = from(e in Model, where: e.x == 123) |> normalize
assert SQL.update_all(query, [x: 0]) ==
~s{UPDATE "model" AS m0 SET "x" = 0 WHERE (m0."x" = 123)}
query = Model |> Queryable.to_query |> normalize
assert SQL.update_all(query, [x: 0, y: "123"]) ==
~s{UPDATE "model" AS m0 SET "x" = 0, "y" = '123'}
query = Model |> Queryable.to_query |> normalize
assert SQL.update_all(query, [x: quote do: ^0]) ==
~s{UPDATE "model" AS m0 SET "x" = $1}
end
test "delete all" do
query = Model |> Queryable.to_query |> normalize
assert SQL.delete_all(query) == ~s{DELETE FROM "model" AS m0}
query = from(e in Model, where: e.x == 123) |> normalize
assert SQL.delete_all(query) ==
~s{DELETE FROM "model" AS m0 WHERE (m0."x" = 123)}
end
## Joins
test "join" do
query = Model |> join(:inner, [p], q in Model2, p.x == q.z) |> select([], 0) |> normalize
assert SQL.all(query) ==
~s{SELECT 0 FROM "model" AS m0 INNER JOIN "model2" AS m1 ON m0."x" = m1."z"}
query = Model |> join(:inner, [p], q in Model2, p.x == q.z)
|> join(:inner, [], Model, true) |> select([], 0) |> normalize
assert SQL.all(query) ==
~s{SELECT 0 FROM "model" AS m0 INNER JOIN "model2" AS m1 ON m0."x" = m1."z" } <>
~s{INNER JOIN "model" AS m2 ON TRUE}
end
test "join with nothing bound" do
query = Model |> join(:inner, [], q in Model2, q.z == q.z) |> select([], 0) |> normalize
assert SQL.all(query) ==
~s{SELECT 0 FROM "model" AS m0 INNER JOIN "model2" AS m1 ON m1."z" = m1."z"}
end
test "join without model" do
query = "posts" |> join(:inner, [p], q in "comments", p.x == q.z) |> select([], 0) |> normalize
assert SQL.all(query) ==
~s{SELECT 0 FROM "posts" AS p0 INNER JOIN "comments" AS c0 ON p0."x" = c0."z"}
end
## Associations
test "association join belongs_to" do
query = Model2 |> join(:inner, [c], p in c.post) |> select([], 0) |> normalize
assert SQL.all(query) ==
"SELECT 0 FROM \"model2\" AS m0 INNER JOIN \"model\" AS m1 ON m1.\"x\" = m0.\"z\""
end
test "association join has_many" do
query = Model |> join(:inner, [p], c in p.comments) |> select([], 0) |> normalize
assert SQL.all(query) ==
"SELECT 0 FROM \"model\" AS m0 INNER JOIN \"model2\" AS m1 ON m1.\"z\" = m0.\"x\""
end
test "association join has_one" do
query = Model |> join(:inner, [p], pp in p.permalink) |> select([], 0) |> normalize
assert SQL.all(query) ==
"SELECT 0 FROM \"model\" AS m0 INNER JOIN \"model3\" AS m1 ON m1.\"id\" = m0.\"y\""
end
test "association join with on" do
query = Model |> join(:inner, [p], c in p.comments, 1 == 2) |> select([], 0) |> normalize
assert SQL.all(query) ==
"SELECT 0 FROM \"model\" AS m0 INNER JOIN \"model2\" AS m1 ON (1 = 2) AND (m1.\"z\" = m0.\"x\")"
end
test "join produces correct bindings" do
query = from(p in Model, join: c in Model2, on: true)
query = from(p in query, join: c in Model2, on: true, select: {p.id, c.id})
query = normalize(query)
assert SQL.all(query) ==
"SELECT m0.\"id\", m2.\"id\" FROM \"model\" AS m0 INNER JOIN \"model2\" AS m1 ON TRUE INNER JOIN \"model2\" AS m2 ON TRUE"
end
# Model based
test "insert" do
query = SQL.insert("model", [:x, :y], [:id])
assert query == ~s{INSERT INTO "model" ("x", "y") VALUES ($1, $2) RETURNING "id"}
query = SQL.insert("model", [], [:id])
assert query == ~s{INSERT INTO "model" DEFAULT VALUES RETURNING "id"}
query = SQL.insert("model", [], [])
assert query == ~s{INSERT INTO "model" DEFAULT VALUES}
end
test "update" do
query = SQL.update("model", [:id], [:x, :y], [:z])
assert query == ~s{UPDATE "model" SET "x" = $2, "y" = $3 WHERE "id" = $1 RETURNING "z"}
query = SQL.update("model", [:id], [:x, :y], [])
assert query == ~s{UPDATE "model" SET "x" = $2, "y" = $3 WHERE "id" = $1}
end
test "delete" do
query = SQL.delete("model", [:x, :y])
assert query == ~s{DELETE FROM "model" WHERE "x" = $1 AND "y" = $2}
end
end
| 36.815217 | 133 | 0.561559 |
f7bf1b02e857e24de9640ba801ff5440cdbe6232 | 1,627 | ex | Elixir | lib/event_serializer/config.ex | quiqupltd/event_serializer | 988a203ddc078e18fbff8183e964fb10e6b0000d | [
"MIT"
] | null | null | null | lib/event_serializer/config.ex | quiqupltd/event_serializer | 988a203ddc078e18fbff8183e964fb10e6b0000d | [
"MIT"
] | 5 | 2018-10-16T17:21:13.000Z | 2019-01-21T15:16:41.000Z | lib/event_serializer/config.ex | quiqupltd/event_serializer | 988a203ddc078e18fbff8183e964fb10e6b0000d | [
"MIT"
] | 2 | 2018-10-12T11:39:56.000Z | 2018-10-16T17:06:35.000Z | defmodule EventSerializer.Config do
@moduledoc """
Helpers for getting config
"""
def topic_names do
:event_serializer
|> EnvConfig.get(:topic_names)
|> topic_names()
end
defp topic_names(nil), do: nil
defp topic_names(list) when is_list(list), do: parse(list)
defp topic_names(csv) when is_binary(csv) do
csv |> String.split(",", trim: true) |> Enum.map(&String.trim/1)
end
defp topic_names({mod, fun, args}) when is_atom(mod) and is_atom(fun) do
apply(mod, fun, args)
end
defp topic_names(fun) when is_function(fun, 0) do
fun.()
end
defp topic_names({:system, varname, _default}) when is_binary(varname) do
System.get_env(varname)
end
defp topic_names({:system, varname}) when is_binary(varname) do
System.get_env(varname)
end
defp parse(term) when is_list(term) do
case Enum.all?(term, fn value -> is_tuple(value) end) do
true -> Enum.map(term, &topic_names/1)
false -> term
end
end
def schema_registry_url do
EnvConfig.get(:event_serializer, :schema_registry_url)
end
def enabled? do
enabled(EnvConfig.get(:event_serializer, :enabled))
end
def avlizer_confluent do
EnvConfig.get(:event_serializer, :avlizer_confluent, :avlizer_confluent)
end
def schema_registry_adapter do
EnvConfig.get(
:event_serializer,
:schema_registry_adapter,
EventSerializer.SchemaRegistryAdapter
)
end
def schema_registry do
EnvConfig.get(:event_serializer, :schema_registry, EventSerializer.SchemaRegistryCache)
end
defp enabled(false), do: false
defp enabled(_), do: true
end
| 23.57971 | 91 | 0.70252 |
f7bf2e54edaf9bef5b350b8d9ccce2219f9e0868 | 63 | exs | Elixir | test/test_helper.exs | mattludwigs/harald | 82e67a71d9940d8572fd217eaf29575e81533151 | [
"MIT"
] | null | null | null | test/test_helper.exs | mattludwigs/harald | 82e67a71d9940d8572fd217eaf29575e81533151 | [
"MIT"
] | null | null | null | test/test_helper.exs | mattludwigs/harald | 82e67a71d9940d8572fd217eaf29575e81533151 | [
"MIT"
] | null | null | null | ExUnit.start()
:ok = Application.ensure_started(:stream_data)
| 15.75 | 46 | 0.777778 |
f7bf6a0f895ae8a1e38c866376d6df98091aa862 | 144 | exs | Elixir | rclex_node/test/rclex_node_test.exs | rclex/rcjp2021_demo | 3f8c980b1f50052ed953f02a886d7282c1de4ad4 | [
"Apache-2.0"
] | null | null | null | rclex_node/test/rclex_node_test.exs | rclex/rcjp2021_demo | 3f8c980b1f50052ed953f02a886d7282c1de4ad4 | [
"Apache-2.0"
] | 7 | 2021-05-27T08:40:05.000Z | 2021-11-30T09:25:13.000Z | rclex_node/test/rclex_node_test.exs | rclex/rcjp2021_demo | 3f8c980b1f50052ed953f02a886d7282c1de4ad4 | [
"Apache-2.0"
] | null | null | null | defmodule RclexNodeTest do
use ExUnit.Case
doctest RclexNode
test "greets the world" do
assert RclexNode.hello() == :world
end
end
| 16 | 38 | 0.722222 |
f7bf7486e33c211ca7e72cc0cb647684dac7e4d9 | 311 | ex | Elixir | core/handler/helper_modules.ex | IvanPereyra-23/PaaS | 0179c7b57645473308b0a295a70b6284ed220fbf | [
"Apache-2.0"
] | 1 | 2020-08-27T18:43:11.000Z | 2020-08-27T18:43:11.000Z | core/handler/helper_modules.ex | IvanPereyra-23/PaaS | 0179c7b57645473308b0a295a70b6284ed220fbf | [
"Apache-2.0"
] | null | null | null | core/handler/helper_modules.ex | IvanPereyra-23/PaaS | 0179c7b57645473308b0a295a70b6284ed220fbf | [
"Apache-2.0"
] | 1 | 2020-08-27T18:43:21.000Z | 2020-08-27T18:43:21.000Z | # Copyright(c) 2015-2020 ACCESS CO., LTD. All rights reserved.
defmodule AntikytheraCore.Handler.HelperModules do
use Croma.Struct, recursive_new?: true, fields: [
top: Croma.Atom,
router: Croma.Atom,
logger: Croma.Atom,
metrics_uploader: Croma.Atom,
]
end
| 28.272727 | 62 | 0.639871 |
f7bfabb9e2b0dc4f395801f9760207450c16efce | 6,511 | ex | Elixir | lib/tesla/middleware/logger.ex | jordan0day/tesla | 9aaccf4d8914e8e9d9573bc8b39d36c10fe20b58 | [
"MIT"
] | null | null | null | lib/tesla/middleware/logger.ex | jordan0day/tesla | 9aaccf4d8914e8e9d9573bc8b39d36c10fe20b58 | [
"MIT"
] | null | null | null | lib/tesla/middleware/logger.ex | jordan0day/tesla | 9aaccf4d8914e8e9d9573bc8b39d36c10fe20b58 | [
"MIT"
] | null | null | null | defmodule Tesla.Middleware.Logger.Formatter do
@moduledoc false
# Heavily based on Elixir's Logger.Formatter
# https://github.com/elixir-lang/elixir/blob/v1.6.4/lib/logger/lib/logger/formatter.ex
@default_format "$method $url -> $status ($time ms)"
@keys ~w(method url status time)
@type format :: [atom | binary]
@spec compile(binary | nil) :: format
def compile(nil), do: compile(@default_format)
def compile(binary) do
~r/(?<h>)\$[a-z]+(?<t>)/
|> Regex.split(binary, on: [:h, :t], trim: true)
|> Enum.map(&compile_key/1)
end
defp compile_key("$" <> key) when key in @keys, do: String.to_atom(key)
defp compile_key("$" <> key), do: raise(ArgumentError, "$#{key} is an invalid format pattern.")
defp compile_key(part), do: part
@spec format(Tesla.Env.t(), Tesla.Env.result(), integer, format) :: IO.chardata()
def format(request, response, time, format) do
Enum.map(format, &output(&1, request, response, time))
end
defp output(:method, env, _, _), do: env.method |> to_string() |> String.upcase()
defp output(:url, env, _, _), do: env.url
defp output(:status, _, {:ok, env}, _), do: to_string(env.status)
defp output(:status, _, {:error, reason}, _), do: "error: " <> inspect(reason)
defp output(:time, _, _, time), do: :io_lib.format("~.3f", [time / 1000])
defp output(binary, _, _, _), do: binary
end
defmodule Tesla.Middleware.Logger do
@behaviour Tesla.Middleware
@moduledoc """
Log requests using Elixir's Logger.
With the default settings it logs request method, url, response status and time taken in milliseconds.
### Example usage
```
defmodule MyClient do
use Tesla
plug Tesla.Middleware.Logger
end
```
### Options
- `:log_level` - custom function for calculating log level (see below)
- `:filter_headers` - sanitizes sensitive headers before logging in debug mode (see below)
## Custom log format
The default log format is `"$method $url -> $status ($time ms)"`
which shows in logs like:
```
2018-03-25 18:32:40.397 [info] GET https://bitebot.io -> 200 (88.074 ms)
```
Because log format is processed during compile time it needs to be set in config:
```
config :tesla, Tesla.Middleware.Logger, format: "$method $url ====> $status / time=$time"
```
## Custom log levels
By default, the following log levels will be used:
- `:error` - for errors, 5xx and 4xx responses
- `:warn` - for 3xx responses
- `:info` - for 2xx responses
You can customize this setting by providing your own `log_level/1` function:
```
defmodule MyClient do
use Tesla
plug Tesla.Middleware.Logger, log_level: &my_log_level/1
def my_log_level(env) do
case env.status do
404 -> :info
_ -> :default
end
end
end
```
### Logger Debug output
When the Elixir Logger log level is set to `:debug`
Tesla Logger will show full request & response.
If you want to disable detailed request/response logging
but keep the `:debug` log level (i.e. in development)
you can set `debug: false` in your config:
```
# config/dev.local.exs
config :tesla, Tesla.Middleware.Logger, debug: false
```
#### Filter headers
To sanitize sensitive headers such as `authorization` in
debug logs, add them to the `:filter_headers` option.
`:filter_headers` expects a list of header names as strings.
```
# config/dev.local.exs
config :tesla, Tesla.Middleware.Logger,
filter_headers: ["authorization"]
```
"""
alias Tesla.Middleware.Logger.Formatter
@config Application.get_env(:tesla, __MODULE__, [])
@format Formatter.compile(@config[:format])
@type log_level :: :info | :warn | :error
require Logger
def call(env, next, opts) do
{time, response} = :timer.tc(Tesla, :run, [env, next])
level = log_level(response, opts)
Logger.log(level, fn -> Formatter.format(env, response, time, @format) end)
if Keyword.get(@config, :debug, true) do
Logger.debug(fn -> debug(env, response, opts) end)
end
response
end
defp log_level({:error, _}, _), do: :error
defp log_level({:ok, env}, opts) do
case Keyword.get(opts, :log_level) do
nil ->
default_log_level(env)
fun when is_function(fun) ->
case fun.(env) do
:default -> default_log_level(env)
level -> level
end
atom when is_atom(atom) ->
atom
end
end
@spec default_log_level(Tesla.Env.t()) :: log_level
def default_log_level(env) do
cond do
env.status >= 400 -> :error
env.status >= 300 -> :warn
true -> :info
end
end
@debug_no_query "(no query)"
@debug_no_headers "(no headers)"
@debug_no_body "(no body)"
@debug_stream "[Elixir.Stream]"
defp debug(request, {:ok, response}, opts) do
[
"\n>>> REQUEST >>>\n",
debug_query(request.query),
?\n,
debug_headers(request.headers, opts),
?\n,
debug_body(request.body),
?\n,
"\n<<< RESPONSE <<<\n",
debug_headers(response.headers, opts),
?\n,
debug_body(response.body)
]
end
defp debug(request, {:error, error}, opts) do
[
"\n>>> REQUEST >>>\n",
debug_query(request.query),
?\n,
debug_headers(request.headers, opts),
?\n,
debug_body(request.body),
?\n,
"\n<<< RESPONSE ERROR <<<\n",
inspect(error)
]
end
defp debug_query([]), do: @debug_no_query
defp debug_query(query) do
query
|> Enum.flat_map(&Tesla.encode_pair/1)
|> Enum.map(fn {k, v} -> ["Query: ", to_string(k), ": ", to_string(v), ?\n] end)
end
defp debug_headers([], _opts), do: @debug_no_headers
defp debug_headers(headers, opts) do
filtered = Keyword.get(opts, :filter_headers, [])
Enum.map(headers, fn {k, v} ->
v = if k in filtered, do: "[FILTERED]", else: v
[k, ": ", v, ?\n]
end)
end
defp debug_body(nil), do: @debug_no_body
defp debug_body([]), do: @debug_no_body
defp debug_body(%Stream{}), do: @debug_stream
defp debug_body(stream) when is_function(stream), do: @debug_stream
defp debug_body(%Tesla.Multipart{} = mp) do
[
"[Tesla.Multipart]\n",
"boundary: ",
mp.boundary,
?\n,
"content_type_params: ",
inspect(mp.content_type_params),
?\n
| Enum.map(mp.parts, &[inspect(&1), ?\n])
]
end
defp debug_body(data) when is_binary(data) or is_list(data), do: data
defp debug_body(term), do: inspect(term)
end
| 26.360324 | 104 | 0.631393 |
f7bfb2a74e5052b063711c3f5c05c66cdaa8c6c6 | 3,524 | exs | Elixir | test/plug/telemetry_test.exs | tomciopp/plug | af7fba19e8bce208129d858b924c7a49b93beef1 | [
"Apache-2.0"
] | null | null | null | test/plug/telemetry_test.exs | tomciopp/plug | af7fba19e8bce208129d858b924c7a49b93beef1 | [
"Apache-2.0"
] | null | null | null | test/plug/telemetry_test.exs | tomciopp/plug | af7fba19e8bce208129d858b924c7a49b93beef1 | [
"Apache-2.0"
] | null | null | null | Application.ensure_all_started(:telemetry)
defmodule Plug.TelemetryTest do
use ExUnit.Case, async: true
use Plug.Test
defmodule MyPlug do
use Plug.Builder
plug Plug.Telemetry, event_prefix: [:pipeline]
plug :send_resp, 200
defp send_resp(conn, status) do
Plug.Conn.send_resp(conn, status, "Response")
end
end
defmodule MyNoSendPlug do
use Plug.Builder
plug Plug.Telemetry, event_prefix: [:nosend, :pipeline]
end
defmodule MyCrashingPlug do
use Plug.Builder
plug Plug.Telemetry, event_prefix: [:crashing, :pipeline]
plug :raise_error
plug :send_resp, 200
defp raise_error(_conn, _) do
raise "Crash!"
end
defp send_resp(conn, status) do
Plug.Conn.send_resp(conn, status, "Response")
end
end
setup do
start_handler_id = {:start, :rand.uniform(100)}
stop_handler_id = {:stop, :rand.uniform(100)}
on_exit(fn ->
:telemetry.detach(start_handler_id)
:telemetry.detach(stop_handler_id)
end)
{:ok, start_handler: start_handler_id, stop_handler: stop_handler_id}
end
test "emits an event before the pipeline and before sending the response", %{
start_handler: start_handler,
stop_handler: stop_handler
} do
attach(start_handler, [:pipeline, :start])
attach(stop_handler, [:pipeline, :stop])
MyPlug.call(conn(:get, "/"), [])
assert_received {:event, [:pipeline, :start], measurements, metadata}
assert map_size(measurements) == 1
assert %{time: time} = measurements
assert is_integer(time)
assert map_size(metadata) == 1
assert %{conn: conn} = metadata
assert_received {:event, [:pipeline, :stop], measurements, metadata}
assert map_size(measurements) == 1
assert %{duration: duration} = measurements
assert is_integer(duration)
assert is_integer(time)
assert map_size(metadata) == 1
assert %{conn: conn} = metadata
assert conn.state == :set
assert conn.status == 200
end
test "doesn't emit a stop event if the response is not sent", %{
start_handler: start_handler,
stop_handler: stop_handler
} do
attach(start_handler, [:nosend, :pipeline, :start])
attach(stop_handler, [:nosend, :pipeline, :stop])
MyNoSendPlug.call(conn(:get, "/"), [])
assert_received {:event, [:nosend, :pipeline, :start], _, _}
refute_received {:event, [:nosend, :pipeline, :stop], _, _}
end
test "raises if event prefix is not provided" do
assert_raise ArgumentError, ~r/^:event_prefix is required$/, fn ->
Plug.Telemetry.init([])
end
end
test "raises if event prefix is not a list of atoms" do
assert_raise ArgumentError, ~r/^expected :event_prefix to be a list of atoms, got: 1$/, fn ->
Plug.Telemetry.init(event_prefix: 1)
end
end
test "doesn't emit a stop event when the pipeline crashes", %{
start_handler: start_handler,
stop_handler: stop_handler
} do
attach(start_handler, [:crashing, :pipeline, :start])
attach(stop_handler, [:crashing, :pipeline, :stop])
assert_raise RuntimeError, fn ->
MyCrashingPlug.call(conn(:get, "/"), [])
end
assert_received {:event, [:crashing, :pipeline, :start], _, _}
refute_received {:event, [:crashing, :pipeline, :stop], _, _}
end
defp attach(handler_id, event) do
:telemetry.attach(
handler_id,
event,
fn event, measurements, metadata, _ ->
send(self(), {:event, event, measurements, metadata})
end,
nil
)
end
end
| 27.107692 | 97 | 0.66714 |
f7bfc73b675b3b00fe8eddf023a78095e0c25cfd | 166 | ex | Elixir | test/e2e/test/support/helpers.ex | gregjohnsonsaltaire/hologram | aa8e9ea0d599def864c263cc37cc8ee31f02ac4a | [
"MIT"
] | 40 | 2022-01-19T20:27:36.000Z | 2022-03-31T18:17:41.000Z | test/e2e/test/support/helpers.ex | gregjohnsonsaltaire/hologram | aa8e9ea0d599def864c263cc37cc8ee31f02ac4a | [
"MIT"
] | 42 | 2022-02-03T22:52:43.000Z | 2022-03-26T20:57:32.000Z | test/e2e/test/support/helpers.ex | gregjohnsonsaltaire/hologram | aa8e9ea0d599def864c263cc37cc8ee31f02ac4a | [
"MIT"
] | 3 | 2022-02-10T04:00:37.000Z | 2022-03-08T22:07:45.000Z | defmodule HologramE2E.Test.Helpers do
alias Hologram.Compiler
def compile(opts \\ []) do
Keyword.put(opts, :force, true)
|> Compiler.compile()
end
end
| 18.444444 | 37 | 0.686747 |
f7bffb84c02a1ce1b08a5947c79a4f125ffe9dff | 1,121 | exs | Elixir | clients/manufacturers/mix.exs | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/manufacturers/mix.exs | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/manufacturers/mix.exs | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | defmodule GoogleApi.Manufacturers.V1.Mixfile do
use Mix.Project
@version "0.1.0"
def project do
[app: :google_api_manufacturers,
version: @version,
elixir: "~> 1.4",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/GoogleCloudPlatform/elixir-google-api/tree/master/clients/manufacturers"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.1.0"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Public API for managing Manufacturer Center related data.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/GoogleCloudPlatform/elixir-google-api/tree/master/clients/manufacturers",
"Homepage" => "https://developers.google.com/manufacturers/"
}
]
end
end
| 23.354167 | 113 | 0.606601 |
f7bfff3039248556b111a98cd98fa4c25acf8c06 | 1,022 | ex | Elixir | backend/lib/aptamer/application.ex | ui-icts/aptamer-web | a28502c22a4e55ab1fbae8bbeaa6b11c9a477c06 | [
"MIT"
] | null | null | null | backend/lib/aptamer/application.ex | ui-icts/aptamer-web | a28502c22a4e55ab1fbae8bbeaa6b11c9a477c06 | [
"MIT"
] | 7 | 2019-02-08T18:28:49.000Z | 2022-02-12T06:44:59.000Z | backend/lib/aptamer/application.ex | ui-icts/aptamer-web | a28502c22a4e55ab1fbae8bbeaa6b11c9a477c06 | [
"MIT"
] | null | null | null | defmodule Aptamer.Application do
use Application
# See http://elixir-lang.org/docs/stable/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec
# Define workers and child supervisors to be supervised
children = [
# Start the Ecto repository
Aptamer.Repo,
# Start the endpoint when the application starts
AptamerWeb.Endpoint
# Start your own worker by calling: Aptamer.Worker.start_link(arg1, arg2, arg3)
# worker(Aptamer.Worker, [arg1, arg2, arg3]),
]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Aptamer.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
AptamerWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 31.9375 | 85 | 0.720157 |
f7c0435ccd1a688ed2977042323e43ad6cb872c9 | 1,739 | ex | Elixir | clients/notebooks/lib/google_api/notebooks/v1/model/accelerator_config.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/notebooks/lib/google_api/notebooks/v1/model/accelerator_config.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/notebooks/lib/google_api/notebooks/v1/model/accelerator_config.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Notebooks.V1.Model.AcceleratorConfig do
@moduledoc """
Definition of a hardware accelerator. Note that not all combinations of `type` and `core_count` are valid. Check [GPUs on Compute Engine](/compute/docs/gpus/#gpus-list) to find a valid combination. TPUs are not supported.
## Attributes
* `coreCount` (*type:* `String.t`, *default:* `nil`) - Count of cores of this accelerator.
* `type` (*type:* `String.t`, *default:* `nil`) - Type of this accelerator.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:coreCount => String.t() | nil,
:type => String.t() | nil
}
field(:coreCount)
field(:type)
end
defimpl Poison.Decoder, for: GoogleApi.Notebooks.V1.Model.AcceleratorConfig do
def decode(value, options) do
GoogleApi.Notebooks.V1.Model.AcceleratorConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Notebooks.V1.Model.AcceleratorConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.78 | 223 | 0.725129 |
f7c0803456355b50537ffec72e66870995105f77 | 1,401 | ex | Elixir | apps/andi/lib/andi_web/views/display_names.ex | msomji/smartcitiesdata | fc96abc1ef1306f7af6bd42bbcb4ed041a6d922c | [
"Apache-2.0"
] | null | null | null | apps/andi/lib/andi_web/views/display_names.ex | msomji/smartcitiesdata | fc96abc1ef1306f7af6bd42bbcb4ed041a6d922c | [
"Apache-2.0"
] | null | null | null | apps/andi/lib/andi_web/views/display_names.ex | msomji/smartcitiesdata | fc96abc1ef1306f7af6bd42bbcb4ed041a6d922c | [
"Apache-2.0"
] | null | null | null | defmodule AndiWeb.Views.DisplayNames do
@moduledoc false
@display_names %{
id: "ID",
benefitRating: "Benefit",
cadence: "Cadence",
contactEmail: "Maintainer Email",
contactName: "Maintainer Name",
dataJsonUrl: "Data JSON URL",
dataTitle: "Dataset Title",
description: "Description",
format: "Format",
homepage: "Homepage URL",
issuedDate: "Release Date",
itemType: "Item Type",
keywords: "Keywords",
language: "Language",
license: "License",
logoUrl: "Logo URL",
modifiedDate: "Last Updated",
orgTitle: "Organization Title",
orgId: "Organization",
publishFrequency: "Update Frequency",
spatial: "Spatial Boundaries",
temporal: "Temporal Boundaries",
dataName: "Data Name",
orgName: "Organization Name",
private: "Level of Access",
riskRating: "Risk",
schema: "Schema",
selector: "Selector",
sourceFormat: "Source Format",
sourceHeaders: "Headers",
sourceQueryParams: "Query Parameters",
sourceType: "Source Type",
sourceUrl: "Base URL",
topLevelSelector: "Top Level Selector",
name: "Name",
type: "Type",
week: "Week",
year: "Year",
month: "Month",
day: "Day",
hour: "Hour",
minute: "Minute",
second: "Second",
date: "Date",
time: "Time"
}
def get(field_key) do
Map.get(@display_names, field_key)
end
end
| 25.017857 | 43 | 0.630264 |
f7c082976a0f9f8e16d806c77d4e3457adf5247f | 522 | ex | Elixir | lib/phoenix/supervisor.ex | bitgamma/phoenix | c06fc0fa03a95f2c863c49711da58fedfa56c374 | [
"MIT"
] | null | null | null | lib/phoenix/supervisor.ex | bitgamma/phoenix | c06fc0fa03a95f2c863c49711da58fedfa56c374 | [
"MIT"
] | null | null | null | lib/phoenix/supervisor.ex | bitgamma/phoenix | c06fc0fa03a95f2c863c49711da58fedfa56c374 | [
"MIT"
] | null | null | null | defmodule Phoenix.Supervisor do
use Supervisor
def start_link do
Supervisor.start_link(__MODULE__, [])
end
def init([]) do
code_reloader = Application.get_env(:phoenix, :code_reloader)
[]
|> child(Phoenix.CodeReloader.Server, [], code_reloader)
|> child(Phoenix.Transports.LongPoller.Supervisor, [], true)
|> supervise(strategy: :one_for_one)
end
defp child(children, mod, args, true), do: [worker(mod, args) | children]
defp child(children, _mod, _args, false), do: children
end
| 26.1 | 75 | 0.697318 |
f7c09b6adf3381c522efc10dd041844855ff0529 | 107 | exs | Elixir | config/ci.exs | am-kantox/tarearbol | 37bac59178940df4c72bf942dd08d8acca505130 | [
"MIT"
] | 49 | 2017-07-22T12:25:46.000Z | 2022-02-12T20:29:36.000Z | config/ci.exs | am-kantox/tarearbol | 37bac59178940df4c72bf942dd08d8acca505130 | [
"MIT"
] | 15 | 2017-07-21T13:17:32.000Z | 2021-02-25T05:40:11.000Z | config/ci.exs | am-kantox/tarearbol | 37bac59178940df4c72bf942dd08d8acca505130 | [
"MIT"
] | 4 | 2017-10-26T10:28:00.000Z | 2019-09-13T08:04:01.000Z | import Config
config :cloister,
sentry: ~w|[email protected] [email protected]|a,
consensus: 4
| 17.833333 | 58 | 0.719626 |
f7c0a24f536503da4b615e719d99979acfc9263c | 207 | exs | Elixir | test/controllers/page_controller_test.exs | obahareth/twitter-relations-grapher | 952df9569d7d794a5446ea4f14cf5e1cde08f5df | [
"MIT"
] | 1 | 2017-06-07T22:53:52.000Z | 2017-06-07T22:53:52.000Z | test/controllers/page_controller_test.exs | obahareth/twitter-relations-grapher | 952df9569d7d794a5446ea4f14cf5e1cde08f5df | [
"MIT"
] | null | null | null | test/controllers/page_controller_test.exs | obahareth/twitter-relations-grapher | 952df9569d7d794a5446ea4f14cf5e1cde08f5df | [
"MIT"
] | null | null | null | defmodule TwitterGrapher.PageControllerTest do
use TwitterGrapher.ConnCase
test "GET /", %{conn: conn} do
conn = get conn, "/"
assert html_response(conn, 200) =~ "Welcome to Phoenix!"
end
end
| 23 | 60 | 0.695652 |
f7c0bf3d39e7a4f3ef3ddb46b4fa858eaa71b206 | 821 | exs | Elixir | priv/repo/migrations/20210524200728_create_users_auth_tables.exs | manojsamanta/stripe-single-product | d0af1cede55ce6ac71100b9f4b5473919c16c884 | [
"MIT"
] | null | null | null | priv/repo/migrations/20210524200728_create_users_auth_tables.exs | manojsamanta/stripe-single-product | d0af1cede55ce6ac71100b9f4b5473919c16c884 | [
"MIT"
] | null | null | null | priv/repo/migrations/20210524200728_create_users_auth_tables.exs | manojsamanta/stripe-single-product | d0af1cede55ce6ac71100b9f4b5473919c16c884 | [
"MIT"
] | null | null | null | defmodule SingleProduct.Repo.Migrations.CreateUsersAuthTables do
use Ecto.Migration
def change do
execute "CREATE EXTENSION IF NOT EXISTS citext", ""
create table(:users) do
add :email, :citext, null: false
add :hashed_password, :string, null: false
add :role, :string, null: false, default: "guest"
add :confirmed_at, :naive_datetime
timestamps()
end
create unique_index(:users, [:email])
create table(:users_tokens) do
add :user_id, references(:users, on_delete: :delete_all), null: false
add :token, :binary, null: false
add :context, :string, null: false
add :sent_to, :string
timestamps(updated_at: false)
end
create index(:users_tokens, [:user_id])
create unique_index(:users_tokens, [:context, :token])
end
end
| 28.310345 | 75 | 0.671133 |
f7c1057ffffcd7e5582964e0518087119e228baa | 1,327 | exs | Elixir | config/config.exs | ftenario/members | 3de1ecc46a93053fa30f7e6845aee05352c9b03e | [
"MIT"
] | null | null | null | config/config.exs | ftenario/members | 3de1ecc46a93053fa30f7e6845aee05352c9b03e | [
"MIT"
] | null | null | null | config/config.exs | ftenario/members | 3de1ecc46a93053fa30f7e6845aee05352c9b03e | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
config :members, Members.Repo,
adapter: Ecto.Adapters.MySQL,
database: "members_repo",
username: "root",
password: "pcm_iot",
hostname: "localhost"
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :members, key: :value
config :members, ecto_repos: [Members.Repo]
#
# and access this configuration in your application as:
#
# Application.get_env(:members, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 33.175 | 73 | 0.746044 |
f7c1091ddc4e8cae849eb62713d82d5b9de17232 | 614 | exs | Elixir | mix.exs | larskrantz/nervous_gardener | 4e03cc739a19e35943d73503d20e8a2c26af2c63 | [
"MIT"
] | null | null | null | mix.exs | larskrantz/nervous_gardener | 4e03cc739a19e35943d73503d20e8a2c26af2c63 | [
"MIT"
] | null | null | null | mix.exs | larskrantz/nervous_gardener | 4e03cc739a19e35943d73503d20e8a2c26af2c63 | [
"MIT"
] | null | null | null | defmodule NervousGardener.Mixfile do
use Mix.Project
def project do
[apps_path: "apps",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options.
#
# Dependencies listed here are available only for this project
# and cannot be accessed from applications inside the apps folder
defp deps do
[]
end
end
| 22.740741 | 77 | 0.644951 |
f7c11cbcf656c68f815a5589587ac6267c511c9e | 887 | exs | Elixir | ui/mix.exs | ejc123/meeting_indicator | 7557971f1a8fece55b01c25dc633be1bb1b9cc2c | [
"Apache-2.0"
] | null | null | null | ui/mix.exs | ejc123/meeting_indicator | 7557971f1a8fece55b01c25dc633be1bb1b9cc2c | [
"Apache-2.0"
] | null | null | null | ui/mix.exs | ejc123/meeting_indicator | 7557971f1a8fece55b01c25dc633be1bb1b9cc2c | [
"Apache-2.0"
] | null | null | null | defmodule Ui.Mixfile do
use Mix.Project
def project do
[
app: :ui,
version: "0.1.3",
elixir: "~> 1.12",
elixirc_paths: elixirc_paths(Mix.env()),
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
deps: deps(),
aliases: aliases()
]
end
def application do
[extra_applications: [:logger], mod: {Ui.Application, []}]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp deps do
[
{:ace, path: "/home/ejc/projects/elixir/Ace"},
{:raxx, "~> 1.1.0"},
{:jason, "~> 1.2.2"},
{:raxx_view, path: "../raxx/extensions/raxx_view"},
{:raxx_logger, "~> 0.2.2"},
{:raxx_static, "~> 0.8.3"},
{:raxx_session, "~> 0.2.0"},
{:exsync, "~> 0.2.4", only: :dev}
]
end
defp aliases() do
[]
end
end
| 21.634146 | 62 | 0.526494 |
f7c122718db136554d95987404df2d03915869e8 | 2,988 | ex | Elixir | lib/maru/utils.ex | nicooga/maru | e61cf79f11c4d016766efb6778f6ae735445b1b3 | [
"BSD-3-Clause"
] | null | null | null | lib/maru/utils.ex | nicooga/maru | e61cf79f11c4d016766efb6778f6ae735445b1b3 | [
"BSD-3-Clause"
] | null | null | null | lib/maru/utils.ex | nicooga/maru | e61cf79f11c4d016766efb6778f6ae735445b1b3 | [
"BSD-3-Clause"
] | null | null | null | defmodule Maru.Utils do
@moduledoc false
@doc false
def is_blank(s) do
s in [nil, "", '', %{}]
end
@doc false
def upper_camel_case(s) do
s |> String.split("_") |> Enum.map(
fn i -> i |> String.capitalize end
) |> Enum.join("")
end
@doc false
def lower_underscore(s) do
for << i <- s >> , into: "" do
if i in ?A..?Z do
<<?\s, i + 32>>
else
<<i>>
end
end |> String.split |> Enum.join("_")
end
@doc false
def make_validator(validator) do
try do
module = [
Maru.Validations,
validator |> Atom.to_string |> upper_camel_case
] |> Module.concat
module.__info__(:functions)
module
rescue
UndefinedFunctionError ->
Maru.Exceptions.UndefinedValidator
|> raise([validator: validator])
end
end
@doc false
def make_type({:__aliases__, _, type}) do
do_make_type(type)
end
def make_type(type) when is_atom(type) do
type = type |> Atom.to_string |> upper_camel_case
do_make_type([type])
end
defp do_make_type(type) do
try do
module = [ Maru.Types | type ] |> Module.concat
module.__info__(:functions)
module
rescue
UndefinedFunctionError ->
type = type |> Module.concat |> inspect
Maru.Exceptions.UndefinedType |> raise([type: type])
end
end
@doc false
def make_parser(parsers, options) do
value = quote do: value
block =
Enum.reduce(parsers, value, fn
{:func, func}, ast ->
quote do
unquote(func).(unquote(ast))
end
{:module, module, arguments}, ast ->
arguments =
Keyword.take(options, arguments)
|> Enum.into(%{})
|> Macro.escape
quote do
unquote(module).parse(unquote(ast), unquote(arguments))
end
{:list, nested}, ast ->
func = make_parser(nested, options)
quote do
Enum.map(unquote(ast), unquote(func))
end
end)
quote do
fn unquote(value) -> unquote(block) end
end
end
@doc false
def get_nested(params, attr) when attr in [:information, :runtime] do
Enum.map(params, fn
%{__struct__: type}=param when type in [
Maru.Struct.Parameter,
Maru.Struct.Dependent,
Maru.Struct.Validator
] ->
param |> Map.fetch!(attr)
end)
end
@doc false
def split_router({:|>, _, [left, right]}) do
split_router(left) ++ split_router(right)
end
def split_router({:__aliases__, _, module}) do
[Module.safe_concat(module)]
end
@doc false
def warning_unknown_opts(module, keys) do
keys
|> Enum.map(&inspect/1)
|> Enum.join(", ")
|> case do
"" -> nil
keys -> Maru.Utils.warn "unknown `use` options #{keys} for module #{inspect module}\n"
end
end
@doc false
def warn(string) do
IO.write :stderr, "\e[33mwarning: \e[0m#{string}"
end
end
| 22.984615 | 92 | 0.571954 |
f7c13643e8ced6e7b31b15b4f6f78df7f3be142c | 242 | ex | Elixir | lib/glimesh_web/plugs/user_agent_plug.ex | MemoryLeakDeath/glimesh.tv | 1462c4b939da899f5e3f67c3f28850025d59a10f | [
"MIT"
] | null | null | null | lib/glimesh_web/plugs/user_agent_plug.ex | MemoryLeakDeath/glimesh.tv | 1462c4b939da899f5e3f67c3f28850025d59a10f | [
"MIT"
] | null | null | null | lib/glimesh_web/plugs/user_agent_plug.ex | MemoryLeakDeath/glimesh.tv | 1462c4b939da899f5e3f67c3f28850025d59a10f | [
"MIT"
] | null | null | null | defmodule GlimeshWeb.Plugs.UserAgent do
import Plug.Conn
def init(_opts), do: nil
def call(conn, _opts \\ []) do
user_agent = get_req_header(conn, "user-agent")
conn
|> put_session(:user_agent, "#{user_agent}")
end
end
| 18.615385 | 51 | 0.669421 |
f7c14a047227d38fb229d995a14dd9b3fcb243c5 | 855 | ex | Elixir | lib/orders/report.ex | Sup3r-Us3r/exlivery | 7d8c5c0011f53cdca1fc8345e1b267355995ac35 | [
"MIT"
] | null | null | null | lib/orders/report.ex | Sup3r-Us3r/exlivery | 7d8c5c0011f53cdca1fc8345e1b267355995ac35 | [
"MIT"
] | null | null | null | lib/orders/report.ex | Sup3r-Us3r/exlivery | 7d8c5c0011f53cdca1fc8345e1b267355995ac35 | [
"MIT"
] | null | null | null | defmodule Exlivery.Orders.Report do
alias Exlivery.Orders.Agent, as: OrderAgent
alias Exlivery.Orders.{Item, Order}
def create(filename \\ "report.csv") do
order_list = build_order_list()
File.write(filename, order_list)
{:ok, "Report has been generated"}
end
defp build_order_list() do
OrderAgent.get_all()
|> Map.values()
|> Enum.map(fn order -> order_string(order) end)
end
defp order_string(%Order{user_cpf: cpf, items: items, total_price: total_price}) do
Enum.map(items, fn item -> item_string(cpf, item, total_price) end)
end
defp item_string(
cpf,
%Item{
category: category,
unity_price: unity_price,
quantity: quantity
},
total_price
) do
"#{cpf},#{category},#{quantity},#{unity_price},#{total_price}\n"
end
end
| 24.428571 | 85 | 0.636257 |
f7c17fcc5068dd5b4a409b7949c2a756e96d9a8d | 130 | ex | Elixir | apps/firestorm_web/lib/firestorm_web/web/views/inbound_view.ex | CircleCI-Public/firestorm | 9ca2c46a2b2377370347ad94d6003eeb77be38d6 | [
"MIT"
] | 10 | 2017-06-28T08:06:52.000Z | 2022-03-19T17:49:21.000Z | apps/firestorm_web/lib/firestorm_web/web/views/inbound_view.ex | CircleCI-Public/firestorm | 9ca2c46a2b2377370347ad94d6003eeb77be38d6 | [
"MIT"
] | null | null | null | apps/firestorm_web/lib/firestorm_web/web/views/inbound_view.ex | CircleCI-Public/firestorm | 9ca2c46a2b2377370347ad94d6003eeb77be38d6 | [
"MIT"
] | 2 | 2017-10-21T12:01:02.000Z | 2021-01-29T10:26:22.000Z | defmodule FirestormWeb.Web.InboundView do
use FirestormWeb.Web, :view
def render("sendgrid.json", %{}) do
"ok"
end
end
| 16.25 | 41 | 0.692308 |
f7c18b5f3d6e655803fd6fe5c065c17f3b144917 | 2,529 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/instance_group_managers_list_per_instance_configs_resp.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/instance_group_managers_list_per_instance_configs_resp.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/compute/lib/google_api/compute/v1/model/instance_group_managers_list_per_instance_configs_resp.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Model.InstanceGroupManagersListPerInstanceConfigsResp do
@moduledoc """
## Attributes
* `items` (*type:* `list(GoogleApi.Compute.V1.Model.PerInstanceConfig.t)`, *default:* `nil`) - [Output Only] The list of PerInstanceConfig.
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - [Output Only] This token allows you to get the next page of results for list requests. If the number of results is larger than maxResults, use the nextPageToken as a value for the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results.
* `warning` (*type:* `GoogleApi.Compute.V1.Model.InstanceGroupManagersListPerInstanceConfigsRespWarning.t`, *default:* `nil`) - [Output Only] Informational warning message.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:items => list(GoogleApi.Compute.V1.Model.PerInstanceConfig.t()),
:nextPageToken => String.t(),
:warning =>
GoogleApi.Compute.V1.Model.InstanceGroupManagersListPerInstanceConfigsRespWarning.t()
}
field(:items, as: GoogleApi.Compute.V1.Model.PerInstanceConfig, type: :list)
field(:nextPageToken)
field(:warning,
as: GoogleApi.Compute.V1.Model.InstanceGroupManagersListPerInstanceConfigsRespWarning
)
end
defimpl Poison.Decoder,
for: GoogleApi.Compute.V1.Model.InstanceGroupManagersListPerInstanceConfigsResp do
def decode(value, options) do
GoogleApi.Compute.V1.Model.InstanceGroupManagersListPerInstanceConfigsResp.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Compute.V1.Model.InstanceGroupManagersListPerInstanceConfigsResp do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.790323 | 393 | 0.752076 |
f7c19e02fd9de486db74ac9238381959322be73e | 1,491 | ex | Elixir | DL-PR26/DL-PR26 (Pmin=0.0,Pmax=1.0).ELEMENT-IoT.ex | Realscrat/decentlab-decoders | 3ca5006cd85e3772a15a1b3fff3922c50979eeb6 | [
"MIT"
] | 13 | 2020-01-18T22:08:44.000Z | 2022-02-06T14:19:57.000Z | DL-PR26/DL-PR26 (Pmin=0.0,Pmax=1.0).ELEMENT-IoT.ex | johannesE/decentlab-decoders | c290ea1218de2c82d665fdc9f71f16682e12d917 | [
"MIT"
] | 4 | 2019-05-10T07:17:41.000Z | 2021-10-20T16:24:04.000Z | DL-PR26/DL-PR26 (Pmin=0.0,Pmax=1.0).ELEMENT-IoT.ex | johannesE/decentlab-decoders | c290ea1218de2c82d665fdc9f71f16682e12d917 | [
"MIT"
] | 15 | 2019-06-04T06:13:32.000Z | 2022-02-15T07:28:52.000Z |
# https://www.decentlab.com/products/pressure-/-liquid-level-and-temperature-sensor-for-lorawan
defmodule Parser do
use Platform.Parsing.Behaviour
# device-specific parameters
defp pmin(), do: 0.0
defp pmax(), do: 1.0
## test payloads
# 02016700033e8060170c7f
# 02016700020c7f
def fields do
[
%{field: "pressure", display: "Pressure", unit: "bar"},
%{field: "temperature", display: "Temperature", unit: "°C"},
%{field: "battery_voltage", display: "Battery voltage", unit: "V"}
]
end
def parse(<<2, device_id::size(16), flags::binary-size(2), words::binary>>, _meta) do
{_remaining, result} =
{words, %{:device_id => device_id, :protocol_version => 2}}
|> sensor0(flags)
|> sensor1(flags)
result
end
defp sensor0({<<x0::size(16), x1::size(16), remaining::binary>>, result},
<<_::size(15), 1::size(1), _::size(0)>>) do
{remaining,
Map.merge(result,
%{
:pressure => (x0 - 16384) / 32768 * (pmax() - pmin()) + pmin(),
:temperature => (x1 - 384) * 0.003125 - 50
})}
end
defp sensor0(result, _flags), do: result
defp sensor1({<<x0::size(16), remaining::binary>>, result},
<<_::size(14), 1::size(1), _::size(1)>>) do
{remaining,
Map.merge(result,
%{
:battery_voltage => x0 / 1000
})}
end
defp sensor1(result, _flags), do: result
end | 27.611111 | 95 | 0.551979 |
f7c1c0da951bff80feeaee473c6711ab0cb8b56e | 557 | ex | Elixir | lib/geo/wkb/writer.ex | ilyashuma/geo | 92c276a784237affd194b9dd0e3428bdc39ce4f8 | [
"MIT"
] | 1 | 2021-08-23T13:14:22.000Z | 2021-08-23T13:14:22.000Z | lib/geo/wkb/writer.ex | ilyashuma/geo | 92c276a784237affd194b9dd0e3428bdc39ce4f8 | [
"MIT"
] | null | null | null | lib/geo/wkb/writer.ex | ilyashuma/geo | 92c276a784237affd194b9dd0e3428bdc39ce4f8 | [
"MIT"
] | null | null | null | defmodule Geo.WKB.Writer do
@moduledoc false
defstruct wkb: nil, endian: :xdr
def new(:ndr) do
%Geo.WKB.Writer{wkb: "01", endian: :ndr}
end
def new(:xdr) do
%Geo.WKB.Writer{wkb: "00", endian: :xdr}
end
def write(%{endian: :ndr} = writer, value) do
value = Geo.Utils.reverse_byte_order(value)
%{writer | wkb: writer.wkb <> value}
end
def write(%{endian: :xdr} = writer, value) do
%{writer | wkb: writer.wkb <> value}
end
def write_no_endian(writer, value) do
%{writer | wkb: writer.wkb <> value}
end
end
| 20.62963 | 47 | 0.626571 |
f7c2135af1bc983ba612b66a7575ed3f7e21debe | 1,212 | exs | Elixir | mix.exs | powerhome/clockr | 751ceb03885a1a0a0f20e7443e961d6f1ca0a091 | [
"Apache-2.0"
] | null | null | null | mix.exs | powerhome/clockr | 751ceb03885a1a0a0f20e7443e961d6f1ca0a091 | [
"Apache-2.0"
] | null | null | null | mix.exs | powerhome/clockr | 751ceb03885a1a0a0f20e7443e961d6f1ca0a091 | [
"Apache-2.0"
] | null | null | null | defmodule Clockr.Mixfile do
use Mix.Project
def project do
[app: :clockr,
version: "0.0.1",
elixir: "~> 1.2",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix, :gettext] ++ Mix.compilers,
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[mod: {Clockr, []},
applications: [:phoenix, :phoenix_pubsub, :phoenix_html, :cowboy, :logger, :gettext]]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
defp elixirc_paths(_), do: ["lib", "web"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[{:phoenix, "~> 1.2.1"},
{:phoenix_pubsub, "~> 1.0"},
{:phoenix_html, "~> 2.6"},
{:phoenix_live_reload, "~> 1.0", only: :dev},
{:gettext, "~> 0.11"},
{:cowboy, "~> 1.0"},
{:distillery, "~> 1.4"},
{:credo, "~> 0.3", only: [:dev, :test]},
{:mix_test_watch, "~> 0.2", only: [:dev, :test], runtime: false}]
end
end
| 28.857143 | 90 | 0.589109 |
f7c22d574170bec2256db4f3d231019e3f852273 | 2,380 | ex | Elixir | lib/incident.ex | OleMchls/incident | 220e46598719fc50d10c96e5a848080172bf6f9b | [
"MIT"
] | null | null | null | lib/incident.ex | OleMchls/incident | 220e46598719fc50d10c96e5a848080172bf6f9b | [
"MIT"
] | null | null | null | lib/incident.ex | OleMchls/incident | 220e46598719fc50d10c96e5a848080172bf6f9b | [
"MIT"
] | null | null | null | defmodule Incident do
@moduledoc false
use Supervisor
alias Incident.{EventStore, EventStoreSupervisor, ProjectionStore}
@doc """
Starts an instance of Incident with an Incident supervisor.
"""
def start_link(config) do
Supervisor.start_link(__MODULE__, config, name: Incident.Supervisor)
end
@impl true
def init(config) do
config = %{
event_store: %{
adapter: event_store_adapter_for(config),
options: event_store_options(config)
},
projection_store: %{
adapter: projection_store_adapter_for(config),
options: projection_store_options(config)
}
}
children = [
{EventStoreSupervisor, config.event_store},
{ProjectionStore, config.projection_store}
]
Supervisor.init(children, strategy: :one_for_one)
end
@spec event_store_adapter_for(keyword()) :: module() | no_return()
defp event_store_adapter_for(config) do
case get_in(config, [:event_store, :adapter]) do
:postgres ->
EventStore.Postgres.Adapter
:in_memory ->
EventStore.InMemory.Adapter
_ ->
raise RuntimeError,
"An Event Store adapter is required in the config. The options are :postgres and :in_memory."
end
end
@spec event_store_options(keyword()) :: keyword() | no_return()
defp event_store_options(config) do
case get_in(config, [:event_store, :options]) do
nil ->
raise RuntimeError, "An Event Store Options is required based on the adapter chosen."
options ->
options
end
end
@spec projection_store_adapter_for(keyword()) :: module() | no_return()
defp projection_store_adapter_for(config) do
case get_in(config, [:projection_store, :adapter]) do
:postgres ->
ProjectionStore.Postgres.Adapter
:in_memory ->
ProjectionStore.InMemory.Adapter
_ ->
raise RuntimeError,
"A Projection Store adapter is required in the config. The options are :postgres and :in_memory."
end
end
@spec projection_store_options(keyword()) :: keyword() | no_return()
defp projection_store_options(config) do
case get_in(config, [:projection_store, :options]) do
nil ->
raise RuntimeError, "A Projection Store Options is required based on the adapter chosen."
options ->
options
end
end
end
| 27.045455 | 111 | 0.670588 |
f7c22fb81d08762cf3ab19775a985562d1d9c4a8 | 419 | ex | Elixir | bench/lib/benchmarks.pb.ex | redink/protobuf | 9f3e443ba2f0e808aae8ae075ffb7abeeca75483 | [
"MIT"
] | 4 | 2021-01-16T02:21:44.000Z | 2022-03-04T18:42:18.000Z | bench/lib/benchmarks.pb.ex | redink/protobuf | 9f3e443ba2f0e808aae8ae075ffb7abeeca75483 | [
"MIT"
] | 5 | 2020-04-07T20:22:38.000Z | 2020-09-23T02:28:36.000Z | bench/lib/benchmarks.pb.ex | redink/protobuf | 9f3e443ba2f0e808aae8ae075ffb7abeeca75483 | [
"MIT"
] | 4 | 2020-07-22T23:38:34.000Z | 2021-03-26T18:52:54.000Z | defmodule Benchmarks.BenchmarkDataset do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t(),
message_name: String.t(),
payload: [binary]
}
defstruct [:name, :message_name, :payload]
field :name, 1, type: :string
field :message_name, 2, type: :string, json_name: "messageName"
field :payload, 3, repeated: true, type: :bytes
end
| 26.1875 | 65 | 0.642005 |
f7c2315112f088f0e99088c8e7311cae2bbbc064 | 1,506 | exs | Elixir | test/saltpack_test.exs | mwmiller/saltpack_ex | 6789dc6a37e7e5c103e78c90d28a0074e9aabb1d | [
"MIT"
] | 9 | 2016-04-12T21:47:02.000Z | 2020-02-03T02:09:16.000Z | test/saltpack_test.exs | mwmiller/saltpack_ex | 6789dc6a37e7e5c103e78c90d28a0074e9aabb1d | [
"MIT"
] | null | null | null | test/saltpack_test.exs | mwmiller/saltpack_ex | 6789dc6a37e7e5c103e78c90d28a0074e9aabb1d | [
"MIT"
] | null | null | null | defmodule SaltpackTest do
use ExUnit.Case
doctest Saltpack
test "encryption cycle" do
{ask, apk} = Saltpack.new_key_pair()
{bsk, bpk} = Saltpack.new_key_pair()
short_message = :crypto.strong_rand_bytes(32)
long_message = :crypto.strong_rand_bytes(8192)
assert short_message |> Saltpack.encrypt_message([bpk], ask) |> Saltpack.open_message(bsk) ==
short_message
assert long_message |> Saltpack.encrypt_message([apk], bsk) |> Saltpack.open_message(ask) ==
long_message
end
test "signing cycles" do
{ask, apk} = Saltpack.new_key_pair(:sign)
short_message = :crypto.strong_rand_bytes(32)
long_message = :crypto.strong_rand_bytes(8192)
assert short_message |> Saltpack.sign_message(ask) |> Saltpack.open_message() == short_message
assert long_message |> Saltpack.sign_message(ask) |> Saltpack.open_message() == long_message
assert short_message
|> Saltpack.sign_message(ask, apk, :detached)
|> Saltpack.open_message(nil, short_message) == apk
end
test "armor cycle" do
# Not actually needed, but showing API obliviousness
{ask, _apk} = Saltpack.new_key_pair()
short_message = :crypto.strong_rand_bytes(32)
long_message = :crypto.strong_rand_bytes(8192)
assert short_message |> Saltpack.armor_message() |> Saltpack.open_message(ask) ==
short_message
assert long_message |> Saltpack.armor_message() |> Saltpack.open_message(ask) == long_message
end
end
| 33.466667 | 98 | 0.702523 |
f7c2789c7c8370057bbdfd3dc671471508d7763f | 12,909 | exs | Elixir | priv/repo/migrations/20200310193550_init_core_schemas.exs | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 45 | 2020-04-17T15:40:27.000Z | 2022-03-25T00:13:30.000Z | priv/repo/migrations/20200310193550_init_core_schemas.exs | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 944 | 2020-02-13T02:37:01.000Z | 2022-03-31T17:50:07.000Z | priv/repo/migrations/20200310193550_init_core_schemas.exs | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 23 | 2020-07-28T03:36:13.000Z | 2022-03-17T14:29:02.000Z | defmodule Oli.Repo.Migrations.InitCoreSchemas do
use Ecto.Migration
def change do
create table(:resource_types) do
timestamps(type: :timestamptz)
add :type, :string
end
create table(:scoring_strategies) do
timestamps(type: :timestamptz)
add :type, :string
end
create table(:system_roles) do
timestamps(type: :timestamptz)
add :type, :string
end
create unique_index(:system_roles, [:type])
create table(:project_roles) do
timestamps(type: :timestamptz)
add :type, :string
end
create table(:section_roles) do
timestamps(type: :timestamptz)
add :type, :string
end
create table(:authors) do
add :email, :string
add :first_name, :string
add :last_name, :string
add :provider, :string
add :token, :string
add :password_hash, :string
add :email_verified, :boolean
add :system_role_id, references(:system_roles)
add :preferences, :map
timestamps(type: :timestamptz)
end
create unique_index(:authors, [:email])
create table(:institutions) do
add :institution_email, :string
add :name, :string
add :country_code, :string
add :institution_url, :string
add :timezone, :string
add :consumer_key, :string
add :shared_secret, :string
add :author_id, references(:authors)
timestamps(type: :timestamptz)
end
create table(:lti_tool_consumers) do
add :instance_guid, :string
add :instance_name, :string
add :instance_contact_email, :string
add :info_version, :string
add :info_product_family_code, :string
add :institution_id, references(:institutions)
timestamps(type: :timestamptz)
end
create table(:nonces) do
add :value, :string
timestamps(type: :timestamptz)
end
create unique_index(:nonces, [:value])
create table(:users) do
add :email, :string
add :first_name, :string
add :last_name, :string
add :user_id, :string
add :user_image, :string
add :roles, :string
# Remove when LTI 1.3 GS replaces canvas api for grade passback
add :canvas_id, :string
add :author_id, references(:authors)
add :lti_tool_consumer_id, references(:lti_tool_consumers)
add :institution_id, references(:institutions)
timestamps(type: :timestamptz)
end
create table(:families) do
add :title, :string
add :slug, :string
add :description, :string
timestamps(type: :timestamptz)
end
create unique_index(:families, [:slug], name: :index_slug_families)
create table(:projects) do
add :title, :string
add :slug, :string
add :description, :text
add :version, :string
add :project_id, references(:projects)
add :family_id, references(:families)
timestamps(type: :timestamptz)
end
create unique_index(:projects, [:slug], name: :index_slug_projects)
create table(:resources) do
timestamps(type: :timestamptz)
end
create table(:publications) do
add :description, :string
add :published, :boolean, default: false, null: false
add :open_and_free, :boolean, default: false, null: false
add :root_resource_id, references(:resources)
add :project_id, references(:projects)
timestamps(type: :timestamptz)
end
create table(:sections) do
add :title, :string
add :start_date, :date
add :end_date, :date
add :time_zone, :string
add :open_and_free, :boolean, default: false, null: false
add :registration_open, :boolean, default: false, null: false
add :context_id, :string
add :lti_lineitems_url, :string
add :lti_lineitems_token, :string
# Remove when LTI 1.3 GS replaces canvas api for grade passback
add :canvas_url, :string
add :canvas_token, :string
add :canvas_id, :string
add :institution_id, references(:institutions)
add :project_id, references(:projects)
add :publication_id, references(:publications)
timestamps(type: :timestamptz)
end
create table(:activity_registrations) do
add :slug, :string
add :title, :string
add :icon, :string
add :description, :string
add :delivery_element, :string
add :authoring_element, :string
add :delivery_script, :string
add :authoring_script, :string
timestamps(type: :timestamptz)
end
create unique_index(:activity_registrations, [:slug], name: :index_slug_registrations)
create unique_index(:activity_registrations, [:delivery_element],
name: :index_delivery_element_registrations
)
create unique_index(:activity_registrations, [:authoring_element],
name: :index_authoring_element_registrations
)
create unique_index(:activity_registrations, [:delivery_script],
name: :index_delivery_script_registrations
)
create unique_index(:activity_registrations, [:authoring_script],
name: :index_authoring_script_registrations
)
create table(:revisions) do
add :title, :string
add :slug, :string
add :content, :map
add :children, {:array, :id}
add :objectives, :map
add :deleted, :boolean, default: false, null: false
add :graded, :boolean, default: false, null: false
add :max_attempts, :integer
add :recommended_attempts, :integer
add :time_limit, :integer
add :scoring_strategy_id, references(:scoring_strategies)
add :author_id, references(:authors)
add :resource_id, references(:resources)
add :resource_type_id, references(:resource_types)
add :previous_revision_id, references(:revisions)
add :activity_type_id, references(:activity_registrations)
timestamps(type: :timestamptz)
end
create index(:revisions, [:slug], name: :index_slug_revisions)
create table(:published_resources) do
add :resource_id, references(:resources)
add :publication_id, references(:publications)
add :revision_id, references(:revisions)
add :locked_by_id, references(:authors), null: true
add :lock_updated_at, :naive_datetime
timestamps(type: :timestamptz)
end
create table(:enrollments) do
timestamps()
add :user_id, references(:users), primary_key: true
add :section_id, references(:sections), primary_key: true
add :section_role_id, references(:section_roles)
end
create index(:enrollments, [:user_id])
create index(:enrollments, [:section_id])
create unique_index(:enrollments, [:user_id, :section_id], name: :index_user_section)
create table(:authors_sections) do
timestamps(type: :timestamptz)
add :author_id, references(:authors), primary_key: true
add :section_id, references(:sections), primary_key: true
add :section_role_id, references(:section_roles)
end
create index(:authors_sections, [:author_id])
create index(:authors_sections, [:section_id])
create unique_index(:authors_sections, [:author_id, :section_id], name: :index_author_section)
create table(:authors_projects, primary_key: false) do
timestamps(type: :timestamptz)
add :author_id, references(:authors), primary_key: true
add :project_id, references(:projects), primary_key: true
add :project_role_id, references(:project_roles)
end
create index(:authors_projects, [:author_id])
create index(:authors_projects, [:project_id])
create unique_index(:authors_projects, [:author_id, :project_id], name: :index_author_project)
create table(:projects_resources, primary_key: false) do
timestamps(type: :timestamptz)
add :project_id, references(:projects), primary_key: true
add :resource_id, references(:resources), primary_key: true
end
create index(:projects_resources, [:resource_id])
create index(:projects_resources, [:project_id])
create unique_index(:projects_resources, [:resource_id, :project_id],
name: :index_project_resource
)
create table(:resource_accesses) do
timestamps(type: :timestamptz)
add :access_count, :integer, null: true
add :score, :float, null: true
add :out_of, :float, null: true
add :user_id, references(:users)
add :section_id, references(:sections)
add :resource_id, references(:resources)
end
create index(:resource_accesses, [:resource_id])
create index(:resource_accesses, [:section_id])
create index(:resource_accesses, [:user_id])
create unique_index(:resource_accesses, [:resource_id, :user_id, :section_id],
name: :resource_accesses_unique_index
)
create table(:resource_attempts) do
timestamps(type: :timestamptz)
add :attempt_guid, :string
add :attempt_number, :integer
add :date_evaluated, :utc_datetime
add :score, :float
add :out_of, :float
add :resource_access_id, references(:resource_accesses)
add :revision_id, references(:revisions)
end
create index(:resource_attempts, [:resource_access_id])
create unique_index(:resource_attempts, [:attempt_guid], name: :resource_attempt_guid_index)
create table(:activity_attempts) do
timestamps(type: :timestamptz)
add :attempt_guid, :string
add :attempt_number, :integer
add :date_evaluated, :utc_datetime
add :score, :float
add :out_of, :float
add :transformed_model, :map
add :resource_attempt_id, references(:resource_attempts)
add :revision_id, references(:revisions)
add :resource_id, references(:resources)
end
create unique_index(:activity_attempts, [:attempt_guid], name: :activity_attempt_guid_index)
create index(:activity_attempts, [:resource_attempt_id])
create table(:part_attempts) do
timestamps(type: :timestamptz)
add :attempt_guid, :string
add :attempt_number, :integer
add :date_evaluated, :utc_datetime
add :score, :float
add :out_of, :float
add :response, :map
add :feedback, :map
add :hints, {:array, :string}
add :part_id, :string
add :activity_attempt_id, references(:activity_attempts)
end
create index(:part_attempts, [:activity_attempt_id])
create unique_index(:part_attempts, [:attempt_guid], name: :attempt_guid_index)
create table(:snapshots) do
timestamps(type: :timestamptz)
add :resource_id, references(:resources)
add :activity_id, references(:resources)
add :part_id, :string
add :part_attempt_id, references(:part_attempts)
add :user_id, references(:users)
add :section_id, references(:sections)
add :objective_id, references(:resources)
add :objective_revision_id, references(:revisions)
add :revision_id, references(:revisions)
add :activity_type_id, references(:activity_registrations)
add :attempt_number, :integer
add :part_attempt_number, :integer
add :resource_attempt_number, :integer
add :correct, :boolean
add :graded, :boolean
add :score, :float
add :out_of, :float
add :hints, :integer
end
create index(:snapshots, [:objective_id])
create index(:snapshots, [:activity_id])
create index(:snapshots, [:section_id])
create unique_index(:snapshots, [:part_attempt_id, :objective_id], name: :snapshot_unique_part)
create table(:media_items) do
timestamps(type: :timestamptz)
add :url, :string
add :file_name, :string
add :mime_type, :string
add :file_size, :integer
add :md5_hash, :string
add :deleted, :boolean, default: false, null: false
add :project_id, references(:projects)
end
create index(:media_items, [:file_name])
create index(:media_items, [:file_size])
create index(:media_items, [:md5_hash])
create table(:reviews) do
timestamps(type: :timestamptz)
add :project_id, references(:projects)
add :type, :string, null: false
add :done, :boolean, default: false
end
create index(:reviews, :project_id)
create table(:warnings) do
timestamps(type: :timestamptz)
add :review_id, references(:reviews, on_delete: :delete_all)
add :revision_id, references(:revisions), null: true
add :subtype, :string
add :content, :map
add :requires_fix, :boolean, default: false
add :is_dismissed, :boolean, default: false
end
create index(:warnings, [:review_id])
create index(:warnings, [:revision_id])
create table(:themes) do
timestamps(type: :timestamptz)
add :name, :string
add :url, :string
add :default, :boolean, default: false
end
end
end
| 31.03125 | 99 | 0.673096 |
f7c29e2264b47b43d2189f3a8663d816e6b1beeb | 3,325 | ex | Elixir | clients/firestore/lib/google_api/firestore/v1/model/google_firestore_admin_v1_database.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/firestore/lib/google_api/firestore/v1/model/google_firestore_admin_v1_database.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/firestore/lib/google_api/firestore/v1/model/google_firestore_admin_v1_database.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Firestore.V1.Model.GoogleFirestoreAdminV1Database do
@moduledoc """
A Cloud Firestore Database. Currently only one database is allowed per cloud project; this database must have a `database_id` of '(default)'.
## Attributes
* `concurrencyMode` (*type:* `String.t`, *default:* `nil`) - The concurrency control mode to use for this database.
* `earliestVersionTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. The earliest timestamp at which older versions of the data can be read from the database. See [version_retention_period] above; this field is populated with `now - version_retention_period`. This value is continuously updated, and becomes stale the moment it is queried. If you are using this value to recover data, make sure to account for the time from the moment when the value is queried to the moment when you initiate the recovery. Note that you should not need to query this field: if you know the `version_retention_period` then you can query within that time.
* `etag` (*type:* `String.t`, *default:* `nil`) - This checksum is computed by the server based on the value of other fields, and may be sent on update and delete requests to ensure the client has an up-to-date value before proceeding.
* `locationId` (*type:* `String.t`, *default:* `nil`) - The location of the database. Available databases are listed at https://cloud.google.com/firestore/docs/locations.
* `name` (*type:* `String.t`, *default:* `nil`) - The resource name of the Database. Format: `projects/{project}/databases/{database}`
* `type` (*type:* `String.t`, *default:* `nil`) - The type of the database. See https://cloud.google.com/datastore/docs/firestore-or-datastore for information about how to choose.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:concurrencyMode => String.t() | nil,
:earliestVersionTime => DateTime.t() | nil,
:etag => String.t() | nil,
:locationId => String.t() | nil,
:name => String.t() | nil,
:type => String.t() | nil
}
field(:concurrencyMode)
field(:earliestVersionTime, as: DateTime)
field(:etag)
field(:locationId)
field(:name)
field(:type)
end
defimpl Poison.Decoder, for: GoogleApi.Firestore.V1.Model.GoogleFirestoreAdminV1Database do
def decode(value, options) do
GoogleApi.Firestore.V1.Model.GoogleFirestoreAdminV1Database.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Firestore.V1.Model.GoogleFirestoreAdminV1Database do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 53.629032 | 652 | 0.726617 |
f7c2b52b50a7a73927931d3dda070cb5c789ec19 | 1,489 | exs | Elixir | mix.exs | Deepidoo/axioncable | 99e860e12418b4b0f4661824b300a04406504253 | [
"MIT"
] | null | null | null | mix.exs | Deepidoo/axioncable | 99e860e12418b4b0f4661824b300a04406504253 | [
"MIT"
] | null | null | null | mix.exs | Deepidoo/axioncable | 99e860e12418b4b0f4661824b300a04406504253 | [
"MIT"
] | null | null | null | defmodule Axioncable.MixProject do
use Mix.Project
def project do
[
app: :axioncable,
version: "0.2.5",
elixir: "~> 1.5",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
description: description(),
package: package(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Axioncable.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
defp description do
"""
It's like AxionCable (100% compatible with JS Client), but you know, for Elixir
"""
end
defp package do
[
links: %{"GitHub" => "https://github.com/Deepidoo/axioncable"},
licenses: ["MIT"]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.4.10"},
{:phoenix_pubsub, "~> 1.1"},
{:phoenix_html, "~> 2.11"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:gettext, "~> 0.11"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"},
{:poison, ">= 0.0.0"},
{:ex_doc, "~> 0.19", only: :dev, runtime: false},
]
end
end
| 24.016129 | 83 | 0.572196 |
f7c2de84388e262af8d3ef151893561313cf9799 | 4,923 | exs | Elixir | test/absinthe/phase/document/arguments/coerce_lists_test.exs | TheRealReal/absinthe | 6eae5bc36283e58f42d032b8afd90de3ad64f97b | [
"MIT"
] | 4,101 | 2016-03-02T03:49:20.000Z | 2022-03-31T05:46:01.000Z | test/absinthe/phase/document/arguments/coerce_lists_test.exs | TheRealReal/absinthe | 6eae5bc36283e58f42d032b8afd90de3ad64f97b | [
"MIT"
] | 889 | 2016-03-02T16:06:59.000Z | 2022-03-31T20:24:12.000Z | test/absinthe/phase/document/arguments/coerce_lists_test.exs | TheRealReal/absinthe | 6eae5bc36283e58f42d032b8afd90de3ad64f97b | [
"MIT"
] | 564 | 2016-03-02T07:49:59.000Z | 2022-03-06T14:40:59.000Z | defmodule Absinthe.Phase.Document.Arguments.CoerceListsTest do
use Absinthe.PhaseCase,
phase: Absinthe.Phase.Document.Arguments.CoerceLists,
schema: __MODULE__.Schema,
async: true
alias Absinthe.Blueprint
defmodule Schema do
use Absinthe.Schema
query do
field :foo_int_list, :foo do
arg :input, list_of(:integer)
end
field :foo_wrapped_int_list, :foo do
arg :input, non_null(list_of(non_null(:integer)))
end
field :foo_wrapped_enum_list, :foo do
arg :input, non_null(list_of(non_null(:type)))
end
end
object :foo do
field :bar, :string
end
enum :type do
value :baz
end
end
describe "when using an List type input argument" do
test "coerces the type from a single element to List" do
doc = """
query List {
fooIntList(input: 42) {
bar
}
}
"""
{:ok, result, _} = run_phase(doc, operation_name: "List", variables: %{})
op = result.operations |> Enum.find(&(&1.name == "List"))
field = op.selections |> List.first()
input_argument = field.arguments |> Enum.find(&(&1.name == "input"))
assert %Blueprint.Input.List{
items: [%Blueprint.Input.Value{normalized: %Blueprint.Input.Integer{value: 42}}]
} = input_argument.input_value.normalized
end
test "coerces the type from a single element to List when supplying variables" do
doc = """
query ListVar($input: Int) {
fooIntList(input: $input) {
bar
}
}
"""
{:ok, result, _} = run_phase(doc, operation_name: "ListVar", variables: %{"input" => 42})
op = result.operations |> Enum.find(&(&1.name == "ListVar"))
field = op.selections |> List.first()
input_argument = field.arguments |> Enum.find(&(&1.name == "input"))
assert %Blueprint.Input.List{
items: [%Blueprint.Input.Value{normalized: %Blueprint.Input.Integer{value: 42}}]
} = input_argument.input_value.normalized
end
end
describe "when using a wrapped List type input argument" do
test "coerces the type from a single element to List" do
doc = """
query List {
fooWrappedIntList(input: 42) {
bar
}
}
"""
{:ok, result, _} = run_phase(doc, operation_name: "List", variables: %{})
op = result.operations |> Enum.find(&(&1.name == "List"))
field = op.selections |> List.first()
input_argument = field.arguments |> Enum.find(&(&1.name == "input"))
assert %Blueprint.Input.List{
items: [%Blueprint.Input.Value{normalized: %Blueprint.Input.Integer{value: 42}}]
} = input_argument.input_value.normalized
end
test "coerces the type from a single element to List when supplying variables" do
doc = """
query ListVar($input: Int!) {
fooWrappedIntList(input: $input) {
bar
}
}
"""
{:ok, result, _} = run_phase(doc, operation_name: "ListVar", variables: %{"input" => 42})
op = result.operations |> Enum.find(&(&1.name == "ListVar"))
field = op.selections |> List.first()
input_argument = field.arguments |> Enum.find(&(&1.name == "input"))
assert %Blueprint.Input.List{
items: [%Blueprint.Input.Value{normalized: %Blueprint.Input.Integer{value: 42}}]
} = input_argument.input_value.normalized
end
end
describe "when using a List of a coercable type input argument" do
test "coerces the type from a single element to List" do
doc = """
query List {
fooWrappedEnumList(input: BAZ) {
bar
}
}
"""
{:ok, result, _} = run_phase(doc, operation_name: "List", variables: %{})
op = result.operations |> Enum.find(&(&1.name == "List"))
field = op.selections |> List.first()
input_argument = field.arguments |> Enum.find(&(&1.name == "input"))
assert %Blueprint.Input.List{
items: [%Blueprint.Input.Value{normalized: %Blueprint.Input.Enum{value: "BAZ"}}]
} = input_argument.input_value.normalized
end
test "coerces the type from a single element to List when supplying variables" do
doc = """
query ListVar($input: Type!) {
fooWrappedEnumList(input: $input) {
bar
}
}
"""
{:ok, result, _} = run_phase(doc, operation_name: "ListVar", variables: %{"input" => "BAZ"})
op = result.operations |> Enum.find(&(&1.name == "ListVar"))
field = op.selections |> List.first()
input_argument = field.arguments |> Enum.find(&(&1.name == "input"))
assert %Blueprint.Input.List{
items: [%Blueprint.Input.Value{normalized: %Blueprint.Input.Enum{value: "BAZ"}}]
} = input_argument.input_value.normalized
end
end
end
| 31.76129 | 98 | 0.592931 |
f7c2e09da0f2b5a5f59093da17f1110f48220ec8 | 1,739 | ex | Elixir | clients/iam_credentials/lib/google_api/iam_credentials/v1/model/generate_identity_binding_access_token_response.ex | chingor13/elixir-google-api | 85e13fa25c4c9f4618bb463ab4c79245fc6d2a7b | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/iam_credentials/lib/google_api/iam_credentials/v1/model/generate_identity_binding_access_token_response.ex | chingor13/elixir-google-api | 85e13fa25c4c9f4618bb463ab4c79245fc6d2a7b | [
"Apache-2.0"
] | null | null | null | clients/iam_credentials/lib/google_api/iam_credentials/v1/model/generate_identity_binding_access_token_response.ex | chingor13/elixir-google-api | 85e13fa25c4c9f4618bb463ab4c79245fc6d2a7b | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.IAMCredentials.V1.Model.GenerateIdentityBindingAccessTokenResponse do
@moduledoc """
## Attributes
* `accessToken` (*type:* `String.t`, *default:* `nil`) - The OAuth 2.0 access token.
* `expireTime` (*type:* `DateTime.t`, *default:* `nil`) - Token expiration time.
The expiration time is always set.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:accessToken => String.t(),
:expireTime => DateTime.t()
}
field(:accessToken)
field(:expireTime, as: DateTime)
end
defimpl Poison.Decoder,
for: GoogleApi.IAMCredentials.V1.Model.GenerateIdentityBindingAccessTokenResponse do
def decode(value, options) do
GoogleApi.IAMCredentials.V1.Model.GenerateIdentityBindingAccessTokenResponse.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.IAMCredentials.V1.Model.GenerateIdentityBindingAccessTokenResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.053571 | 89 | 0.732605 |
f7c2e8959df134c8344a820ed7ab9b01b76e44ad | 2,672 | ex | Elixir | lib/new_relic/harvest/collector/metric/harvester.ex | simonprev/elixir_agent | 56e6bf32259706ba45f3a158079f8e5a26f28b91 | [
"Apache-2.0"
] | null | null | null | lib/new_relic/harvest/collector/metric/harvester.ex | simonprev/elixir_agent | 56e6bf32259706ba45f3a158079f8e5a26f28b91 | [
"Apache-2.0"
] | null | null | null | lib/new_relic/harvest/collector/metric/harvester.ex | simonprev/elixir_agent | 56e6bf32259706ba45f3a158079f8e5a26f28b91 | [
"Apache-2.0"
] | null | null | null | defmodule NewRelic.Harvest.Collector.Metric.Harvester do
use GenServer
@moduledoc false
alias NewRelic.Harvest.Collector
def start_link(_) do
GenServer.start_link(__MODULE__, [])
end
def init(_) do
{:ok,
%{
start_time: System.system_time(),
start_time_mono: System.monotonic_time(),
end_time_mono: nil,
metrics: %{}
}}
end
# API
def report_metric(identifier, values),
do:
Collector.Metric.HarvestCycle
|> Collector.HarvestCycle.current_harvester()
|> GenServer.cast({:report, Collector.MetricData.transform(identifier, values)})
def gather_harvest,
do:
Collector.Metric.HarvestCycle
|> Collector.HarvestCycle.current_harvester()
|> GenServer.call(:gather_harvest)
# Server
def handle_cast(_late_msg, :completed), do: {:noreply, :completed}
def handle_cast({:report, report_metrics}, state) do
metrics =
report_metrics
|> List.wrap()
|> Enum.reduce(state.metrics, fn %{name: name, scope: scope} = metric, acc ->
Map.update(acc, {name, scope}, metric, fn existing ->
NewRelic.Metric.merge(existing, metric)
end)
end)
{:noreply, %{state | metrics: metrics}}
end
def handle_call(_late_msg, _from, :completed), do: {:reply, :completed, :completed}
def handle_call(:send_harvest, _from, state) do
send_harvest(%{state | end_time_mono: System.monotonic_time()})
{:reply, :ok, :completed}
end
def handle_call(:gather_harvest, _from, state) do
{:reply, build_metric_data(state.metrics), state}
end
def send_harvest(state) do
metric_data = build_metric_data(state.metrics)
Collector.Protocol.metric_data([
Collector.AgentRun.agent_run_id(),
System.convert_time_unit(state.start_time, :native, :second),
System.convert_time_unit(
state.start_time + (state.end_time_mono - state.start_time_mono),
:native,
:second
),
metric_data
])
log_harvest(length(metric_data))
end
def log_harvest(harvest_size) do
NewRelic.report_metric({:supportability, "MetricData"}, harvest_size: harvest_size)
NewRelic.log(:debug, "Completed Metric harvest - size: #{harvest_size}")
end
defp build_metric_data(metrics) do
metrics
|> Map.values()
|> Enum.map(&encode/1)
end
def encode(%NewRelic.Metric{name: name, scope: scope} = m) do
[
%{name: to_string(name), scope: to_string(scope)},
[
m.call_count,
m.total_call_time,
m.total_exclusive_time,
m.min_call_time,
m.max_call_time,
m.sum_of_squares
]
]
end
end
| 25.207547 | 87 | 0.656811 |
f7c2eafaab8b6e630e095af946e4f22c974dd89e | 1,605 | exs | Elixir | mix.exs | szTheory/coverex | 9a6806a01ddd96d802bd0332bffdf29e235df334 | [
"Apache-2.0"
] | 107 | 2015-02-09T21:37:47.000Z | 2021-09-07T16:27:23.000Z | mix.exs | szTheory/coverex | 9a6806a01ddd96d802bd0332bffdf29e235df334 | [
"Apache-2.0"
] | 35 | 2015-02-09T21:07:16.000Z | 2022-01-15T13:29:22.000Z | mix.exs | szTheory/coverex | 9a6806a01ddd96d802bd0332bffdf29e235df334 | [
"Apache-2.0"
] | 35 | 2015-05-04T16:12:13.000Z | 2021-01-20T12:11:57.000Z | defmodule Coverex.Mixfile do
use Mix.Project
def project do
[app: :coverex,
version: "1.5.1-dev",
elixir: ">= 1.6.0",
package: package(),
name: "Coverex - Coverage Reports for Elixir",
source_url: "https://github.com/alfert/coverex",
homepage_url: "https://github.com/alfert/coverex",
docs: [readme: "README.md", extra: "CHANGELOG.md"],
description: description(),
test_coverage: [tool: Coverex.Task, coveralls: true, ignore_modules: []],
deps: deps()]
end
# Configuration for the OTP application
#
# Type `mix help compile.app` for more information
def application do
[ extra_applications: [:logger, :hackney]]
end
# List all dependencies in the format:
#
# {:foobar, git: "https://github.com/elixir-lang/foobar.git", tag: "0.1"}
#
# Type `mix help deps` for more examples and options
defp deps do
[
{:hackney, "~> 1.5"},
{:poison, "~> 3.0 or ~> 3.1 or ~> 4.0"},
{:earmark, "~> 1.0", only: :dev},
{:ex_doc, "~> 0.13", only: :dev},
{:dialyxir, "~> 1.0.0-rc3", only: [:dev, :test], runtime: false},
]
end
# Hex Package description
defp description do
"""
Coverex is an Elixir Coverage tool used by mix. It provides tables with overviews of
module and function coverage data, includings links to annotated source code files and
supports coveralls.io.
"""
end
# Hex Package definition
defp package do
[maintainers: ["Klaus Alfert"],
licenses: ["Apache 2.0"],
links: %{"GitHub" => "https://github.com/alfert/coverex"}
]
end
end
| 28.157895 | 90 | 0.618069 |
f7c2eec11e2aaeee8174c938b7ec9ac5117ddc93 | 1,971 | ex | Elixir | clients/you_tube/lib/google_api/you_tube/v3/model/comment.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/comment.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/comment.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.YouTube.V3.Model.Comment do
@moduledoc """
A comment represents a single YouTube comment.
## Attributes
- etag (String.t): Etag of this resource. Defaults to: `null`.
- id (String.t): The ID that YouTube uses to uniquely identify the comment. Defaults to: `null`.
- kind (String.t): Identifies what kind of resource this is. Value: the fixed string \"youtube#comment\". Defaults to: `null`.
- snippet (CommentSnippet): The snippet object contains basic details about the comment. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:etag => any(),
:id => any(),
:kind => any(),
:snippet => GoogleApi.YouTube.V3.Model.CommentSnippet.t()
}
field(:etag)
field(:id)
field(:kind)
field(:snippet, as: GoogleApi.YouTube.V3.Model.CommentSnippet)
end
defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.Comment do
def decode(value, options) do
GoogleApi.YouTube.V3.Model.Comment.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.YouTube.V3.Model.Comment do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.578947 | 138 | 0.718417 |
f7c2ef08a6f38068e5135887bbdb6b5358e64d34 | 110 | exs | Elixir | test/web3_test.exs | wuminzhe/web3.ex | 3f34c6d59d17b54dfd743e6edf20921f6938a404 | [
"MIT"
] | null | null | null | test/web3_test.exs | wuminzhe/web3.ex | 3f34c6d59d17b54dfd743e6edf20921f6938a404 | [
"MIT"
] | null | null | null | test/web3_test.exs | wuminzhe/web3.ex | 3f34c6d59d17b54dfd743e6edf20921f6938a404 | [
"MIT"
] | null | null | null | defmodule Web3Test do
use ExUnit.Case
doctest Web3
test "the truth" do
assert 1 + 1 == 2
end
end
| 12.222222 | 21 | 0.654545 |
f7c2f409b57f06e649574f157ec3978a357f161c | 134 | ex | Elixir | apps/store/lib/staxx/store/chains.ex | makerdao/qa_backend_gateway | 38e9a3f3f4b66212f1ee9d38b3b698a2a1f9a809 | [
"Apache-2.0"
] | 1 | 2020-10-23T19:25:27.000Z | 2020-10-23T19:25:27.000Z | apps/store/lib/staxx/store/chains.ex | makerdao/qa_backend_gateway | 38e9a3f3f4b66212f1ee9d38b3b698a2a1f9a809 | [
"Apache-2.0"
] | 5 | 2019-01-11T11:48:08.000Z | 2019-01-16T17:29:23.000Z | apps/store/lib/staxx/store/chains.ex | makerdao/qa_backend_gateway | 38e9a3f3f4b66212f1ee9d38b3b698a2a1f9a809 | [
"Apache-2.0"
] | 7 | 2019-10-09T05:49:52.000Z | 2022-03-23T16:48:45.000Z | defmodule Staxx.Store.Chains do
@moduledoc """
Module that will handle most of actions regarding work with chains model
"""
end
| 22.333333 | 74 | 0.746269 |
f7c2f972e2774f86a67a89fda7ecfc6aa5b13827 | 505 | ex | Elixir | lib/meeseeks/selector/element/attribute/value_contains.ex | RichMorin/meeseeks | d52a15a0b78acfc4d7b979d1df6e146482dc3a10 | [
"Apache-2.0",
"MIT"
] | 291 | 2017-03-27T15:53:36.000Z | 2022-03-14T23:01:42.000Z | lib/meeseeks/selector/element/attribute/value_contains.ex | RichMorin/meeseeks | d52a15a0b78acfc4d7b979d1df6e146482dc3a10 | [
"Apache-2.0",
"MIT"
] | 70 | 2017-03-30T23:32:34.000Z | 2021-06-27T06:26:28.000Z | lib/meeseeks/selector/element/attribute/value_contains.ex | RichMorin/meeseeks | d52a15a0b78acfc4d7b979d1df6e146482dc3a10 | [
"Apache-2.0",
"MIT"
] | 23 | 2017-06-18T10:29:04.000Z | 2021-11-04T13:08:12.000Z | defmodule Meeseeks.Selector.Element.Attribute.ValueContains do
use Meeseeks.Selector
@moduledoc false
alias Meeseeks.Document
alias Meeseeks.Selector.Element.Attribute.Helpers
defstruct attribute: nil, value: nil
@impl true
def match(selector, %Document.Element{} = element, _document, _context) do
value = Helpers.get(element.attributes, selector.attribute)
String.contains?(value, selector.value)
end
def match(_selector, _node, _document, _context) do
false
end
end
| 25.25 | 76 | 0.760396 |
f7c2fdd87d9ca3d94f24b3ed3702dab830773cf4 | 708 | ex | Elixir | lib/detect_web/gettext.ex | serokellcao/preact-phoenix | 6094c0db9411231adb053f2dc351b447dc53732a | [
"MIT"
] | null | null | null | lib/detect_web/gettext.ex | serokellcao/preact-phoenix | 6094c0db9411231adb053f2dc351b447dc53732a | [
"MIT"
] | 2 | 2021-03-10T04:41:11.000Z | 2021-05-11T00:41:00.000Z | lib/detect_web/gettext.ex | serokellcao/preact-phoenix | 6094c0db9411231adb053f2dc351b447dc53732a | [
"MIT"
] | null | null | null | defmodule DetectWeb.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import DetectWeb.Gettext
# Simple translation
gettext("Here is the string to translate")
# Plural translation
ngettext("Here is the string to translate",
"Here are the strings to translate",
3)
# Domain-based translation
dgettext("errors", "Here is the error message to translate")
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :detect
end
| 28.32 | 72 | 0.676554 |
f7c302b7740c2bb81b61cd550631482c07756494 | 1,007 | exs | Elixir | config/config.exs | DeathstarNovember/legends | 8354f83942f0275f484e66d883413f308c013793 | [
"Unlicense"
] | null | null | null | config/config.exs | DeathstarNovember/legends | 8354f83942f0275f484e66d883413f308c013793 | [
"Unlicense"
] | 6 | 2019-11-20T19:31:23.000Z | 2021-09-02T02:34:05.000Z | config/config.exs | DeathstarNovember/legends | 8354f83942f0275f484e66d883413f308c013793 | [
"Unlicense"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
# General application configuration
use Mix.Config
config :legends,
ecto_repos: [Legends.Repo]
# Configures the endpoint
config :legends, LegendsWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "FdoYieVIOFzJ2N5jcqVxZkhuw3Hs2lCV6HinhRJL+rUX2vEQ1NViSOEZ+sNKLsrX",
render_errors: [view: LegendsWeb.ErrorView, accepts: ~w(html json)],
pubsub: [name: Legends.PubSub, adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Use Jason for JSON parsing in Phoenix
config :phoenix, :json_library, Jason
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"
| 32.483871 | 86 | 0.769613 |
f7c33737a510264d519931d64eebd561a6a976b3 | 132 | exs | Elixir | config/config.exs | MaethorNaur/forget | 8ce8adfcbf88a48d7adabc03c4815f5777e75f03 | [
"MIT"
] | null | null | null | config/config.exs | MaethorNaur/forget | 8ce8adfcbf88a48d7adabc03c4815f5777e75f03 | [
"MIT"
] | null | null | null | config/config.exs | MaethorNaur/forget | 8ce8adfcbf88a48d7adabc03c4815f5777e75f03 | [
"MIT"
] | null | null | null | import Config
config :libcluster, :topologies, []
config :forget, :configuration,
cluster: [
quorum: 1,
schema: :ram
]
| 14.666667 | 35 | 0.651515 |
f7c369433466fbce3d623ea6c8b34305bf014c59 | 533 | ex | Elixir | test/support/test_helpers/agency_helpers.ex | jennstein2017/Challenge_gov | e0820df8b124a32ff8b78cb827ae43551492988b | [
"CC0-1.0"
] | null | null | null | test/support/test_helpers/agency_helpers.ex | jennstein2017/Challenge_gov | e0820df8b124a32ff8b78cb827ae43551492988b | [
"CC0-1.0"
] | null | null | null | test/support/test_helpers/agency_helpers.ex | jennstein2017/Challenge_gov | e0820df8b124a32ff8b78cb827ae43551492988b | [
"CC0-1.0"
] | null | null | null | defmodule ChallengeGov.TestHelpers.AgencyHelpers do
@moduledoc """
Helper factory functions for agencies
"""
alias ChallengeGov.Agencies
alias ChallengeGov.Repo
defp default_attributes(attributes) do
Map.merge(
%{
name: "Test Agency",
acronym: "TA"
},
attributes
)
end
def create_agency(attributes \\ %{}) do
{:ok, agency} =
%Agencies.Agency{}
|> Agencies.Agency.create_changeset(default_attributes(attributes))
|> Repo.insert()
agency
end
end
| 19.740741 | 73 | 0.643527 |
f7c37c26df51948b300661b183607795a3958f07 | 1,888 | exs | Elixir | test/ex_jagaimo_blog/blogs_test.exs | JasonTrue/ex_jagaimo_blog | e90e2dfe67971e009f3fafb9b4a06dd3bc92ffb9 | [
"MIT"
] | 1 | 2021-06-19T04:19:06.000Z | 2021-06-19T04:19:06.000Z | test/ex_jagaimo_blog/blogs_test.exs | JasonTrue/ex_jagaimo_blog | e90e2dfe67971e009f3fafb9b4a06dd3bc92ffb9 | [
"MIT"
] | 1 | 2021-06-19T04:21:52.000Z | 2021-06-19T04:21:52.000Z | test/ex_jagaimo_blog/blogs_test.exs | JasonTrue/ex_jagaimo_blog | e90e2dfe67971e009f3fafb9b4a06dd3bc92ffb9 | [
"MIT"
] | null | null | null | defmodule ExJagaimoBlog.BlogsTest do
use ExJagaimoBlog.DataCase
# alias ExJagaimoBlog.Blogs
#
# describe "blogs" do
# alias ExJagaimoBlog.Blogs.Blog
#
# @valid_attrs %{}
# @update_attrs %{}
# @invalid_attrs %{}
#
# def blog_fixture(attrs \\ %{}) do
# {:ok, blog} =
# attrs
# |> Enum.into(@valid_attrs)
# |> Blogs.create_blog()
#
# blog
# end
#
# test "list_blogs/0 returns all blogs" do
# blog = blog_fixture()
# assert Blogs.list_blogs() == [blog]
# end
#
# test "get_blog!/1 returns the blog with given id" do
# blog = blog_fixture()
# assert Blogs.get_blog!(blog.id) == blog
# end
#
# test "create_blog/1 with valid data creates a blog" do
# assert {:ok, %Blog{} = blog} = Blogs.create_blog(@valid_attrs)
# end
#
# test "create_blog/1 with invalid data returns error changeset" do
# assert {:error, %Ecto.Changeset{}} = Blogs.create_blog(@invalid_attrs)
# end
#
# test "update_blog/2 with valid data updates the blog" do
# blog = blog_fixture()
# assert {:ok, %Blog{} = blog} = Blogs.update_blog(blog, @update_attrs)
# end
#
# test "update_blog/2 with invalid data returns error changeset" do
# blog = blog_fixture()
# assert {:error, %Ecto.Changeset{}} = Blogs.update_blog(blog, @invalid_attrs)
# assert blog == Blogs.get_blog!(blog.id)
# end
#
# test "delete_blog/1 deletes the blog" do
# blog = blog_fixture()
# assert {:ok, %Blog{}} = Blogs.delete_blog(blog)
# assert_raise Ecto.NoResultsError, fn -> Blogs.get_blog!(blog.id) end
# end
#
# test "change_blog/1 returns a blog changeset" do
# blog = blog_fixture()
# assert %Ecto.Changeset{} = Blogs.change_blog(blog)
# end
# end
end
| 29.968254 | 85 | 0.586335 |
f7c3955d2a1ac690ec9b9059de4588dc26d15e14 | 3,164 | ex | Elixir | clients/composer/lib/google_api/composer/v1beta1/model/environment.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/composer/lib/google_api/composer/v1beta1/model/environment.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/composer/lib/google_api/composer/v1beta1/model/environment.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Composer.V1beta1.Model.Environment do
@moduledoc """
An environment for running orchestration tasks.
## Attributes
* `config` (*type:* `GoogleApi.Composer.V1beta1.Model.EnvironmentConfig.t`, *default:* `nil`) - Configuration parameters for this environment.
* `createTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only.
The time at which this environment was created.
* `labels` (*type:* `map()`, *default:* `nil`) - Optional. User-defined labels for this environment.
The labels map can contain no more than 64 entries. Entries of the labels
map are UTF8 strings that comply with the following restrictions:
* Keys must conform to regexp: \\p{Ll}\\p{Lo}{0,62}
* Values must conform to regexp: [\\p{Ll}\\p{Lo}\\p{N}_-]{0,63}
* Both keys and values are additionally constrained to be <= 128 bytes in
size.
* `name` (*type:* `String.t`, *default:* `nil`) - The resource name of the environment, in the form:
"projects/{projectId}/locations/{locationId}/environments/{environmentId}"
* `state` (*type:* `String.t`, *default:* `nil`) - The current state of the environment.
* `updateTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only.
The time at which this environment was last modified.
* `uuid` (*type:* `String.t`, *default:* `nil`) - Output only.
The UUID (Universally Unique IDentifier) associated with this environment.
This value is generated when the environment is created.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:config => GoogleApi.Composer.V1beta1.Model.EnvironmentConfig.t(),
:createTime => DateTime.t(),
:labels => map(),
:name => String.t(),
:state => String.t(),
:updateTime => DateTime.t(),
:uuid => String.t()
}
field(:config, as: GoogleApi.Composer.V1beta1.Model.EnvironmentConfig)
field(:createTime, as: DateTime)
field(:labels, type: :map)
field(:name)
field(:state)
field(:updateTime, as: DateTime)
field(:uuid)
end
defimpl Poison.Decoder, for: GoogleApi.Composer.V1beta1.Model.Environment do
def decode(value, options) do
GoogleApi.Composer.V1beta1.Model.Environment.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Composer.V1beta1.Model.Environment do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.090909 | 146 | 0.689001 |
f7c399a5975d0702219d84d8e14f92f5d87872f3 | 1,762 | exs | Elixir | backend/test/honeyland/geolocation/providers/free_geo_ip_test.exs | bejolithic/honeyland | 8c4a0d3b56543648d3acb96cc6906df86526743b | [
"Apache-2.0"
] | null | null | null | backend/test/honeyland/geolocation/providers/free_geo_ip_test.exs | bejolithic/honeyland | 8c4a0d3b56543648d3acb96cc6906df86526743b | [
"Apache-2.0"
] | null | null | null | backend/test/honeyland/geolocation/providers/free_geo_ip_test.exs | bejolithic/honeyland | 8c4a0d3b56543648d3acb96cc6906df86526743b | [
"Apache-2.0"
] | null | null | null | #
# This file is part of Honeyland.
#
# Copyright 2022 Nervive Studio
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
defmodule Honeyland.Geolocation.Providers.FreeGeoIpTest do
use Honeyland.DataCase
import Tesla.Mock
alias Honeyland.Geolocation.Providers.FreeGeoIp
describe "ip_geolocation" do
test "geolocate/1 returns error without input IP address" do
assert FreeGeoIp.geolocate(nil) == {:error, :coordinates_not_found}
end
test "geolocate/1 returns coordinates from IP address" do
ip_address = "198.51.100.25"
response = %{
"latitude" => 45.4019498,
"longitude" => 11.8706081
}
mock(fn
%{method: :get, url: _api_url} ->
json(response)
end)
assert {:ok, coordinates} = FreeGeoIp.geolocate(ip_address)
assert %{accuracy: nil, latitude: 45.4019498, longitude: 11.8706081} = coordinates
end
test "geolocate/1 returns error without results from FreeGeoIp" do
ip_address = "198.51.100.25"
response = %{
"garbage" => "error"
}
mock(fn
%{method: :get, url: _api_url} ->
json(response)
end)
assert FreeGeoIp.geolocate(ip_address) == {:error, :coordinates_not_found}
end
end
end
| 27.968254 | 88 | 0.678774 |
f7c3ea0347ad2723247e84b7e45047137127f693 | 1,726 | ex | Elixir | Microsoft.Azure.Management.Network/lib/microsoft/azure/management/network/model/virtual_network_properties_format.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | 4 | 2018-09-29T03:43:15.000Z | 2021-04-01T18:30:46.000Z | Microsoft.Azure.Management.Network/lib/microsoft/azure/management/network/model/virtual_network_properties_format.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | Microsoft.Azure.Management.Network/lib/microsoft/azure/management/network/model/virtual_network_properties_format.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule Microsoft.Azure.Management.Network.Model.VirtualNetworkPropertiesFormat do
@moduledoc """
Properties of the virtual network.
"""
@derive [Poison.Encoder]
defstruct [
:"addressSpace",
:"dhcpOptions",
:"subnets",
:"virtualNetworkPeerings",
:"resourceGuid",
:"provisioningState",
:"enableDdosProtection",
:"enableVmProtection",
:"ddosProtectionPlan"
]
@type t :: %__MODULE__{
:"addressSpace" => AddressSpace,
:"dhcpOptions" => DhcpOptions,
:"subnets" => [Subnet],
:"virtualNetworkPeerings" => [VirtualNetworkPeering],
:"resourceGuid" => String.t,
:"provisioningState" => String.t,
:"enableDdosProtection" => boolean(),
:"enableVmProtection" => boolean(),
:"ddosProtectionPlan" => SubResource
}
end
defimpl Poison.Decoder, for: Microsoft.Azure.Management.Network.Model.VirtualNetworkPropertiesFormat do
import Microsoft.Azure.Management.Network.Deserializer
def decode(value, options) do
value
|> deserialize(:"addressSpace", :struct, Microsoft.Azure.Management.Network.Model.AddressSpace, options)
|> deserialize(:"dhcpOptions", :struct, Microsoft.Azure.Management.Network.Model.DhcpOptions, options)
|> deserialize(:"subnets", :list, Microsoft.Azure.Management.Network.Model.Subnet, options)
|> deserialize(:"virtualNetworkPeerings", :list, Microsoft.Azure.Management.Network.Model.VirtualNetworkPeering, options)
|> deserialize(:"ddosProtectionPlan", :struct, Microsoft.Azure.Management.Network.Model.SubResource, options)
end
end
| 35.958333 | 125 | 0.724797 |
f7c3fb7cc3bc352258c155bf5095bc8b257df0ad | 2,295 | ex | Elixir | lib/petal_web.ex | colindensem/petal-k8s | def9dc653dcae2ef35021d44821e6c69d8744e15 | [
"MIT"
] | 1 | 2021-02-12T09:17:38.000Z | 2021-02-12T09:17:38.000Z | lib/petal_web.ex | colindensem/petal-k8s | def9dc653dcae2ef35021d44821e6c69d8744e15 | [
"MIT"
] | null | null | null | lib/petal_web.ex | colindensem/petal-k8s | def9dc653dcae2ef35021d44821e6c69d8744e15 | [
"MIT"
] | null | null | null | defmodule PetalWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use PetalWeb, :controller
use PetalWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: PetalWeb
import Plug.Conn
import PetalWeb.Gettext
alias PetalWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/petal_web/templates",
namespace: PetalWeb
# Import convenience functions from controllers
import Phoenix.Controller,
only: [get_flash: 1, get_flash: 2, view_module: 1, view_template: 1]
# Include shared imports and aliases for views
unquote(view_helpers())
end
end
def live_view do
quote do
use Phoenix.LiveView,
layout: {PetalWeb.LayoutView, "live.html"}
unquote(view_helpers())
end
end
def live_component do
quote do
use Phoenix.LiveComponent
unquote(view_helpers())
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
import Phoenix.LiveView.Router
end
end
def channel do
quote do
use Phoenix.Channel
import PetalWeb.Gettext
end
end
defp view_helpers do
quote do
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
# Import LiveView helpers (live_render, live_component, live_patch, etc)
import Phoenix.LiveView.Helpers
# Import basic rendering functionality (render, render_layout, etc)
import Phoenix.View
import PetalWeb.ErrorHelpers
import PetalWeb.Gettext
alias PetalWeb.Router.Helpers, as: Routes
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 22.281553 | 78 | 0.678431 |
f7c427d012a60bd632840d49faee33b201af82ec | 22,750 | ex | Elixir | lib/elixir/lib/calendar/time.ex | spencerdcarlson/elixir | 23d75ecdf58df80969e12f4420282238e19219a1 | [
"Apache-2.0"
] | 1 | 2021-12-16T20:32:28.000Z | 2021-12-16T20:32:28.000Z | lib/elixir/lib/calendar/time.ex | spencerdcarlson/elixir | 23d75ecdf58df80969e12f4420282238e19219a1 | [
"Apache-2.0"
] | 1 | 2020-09-14T16:23:33.000Z | 2021-03-25T17:38:59.000Z | lib/elixir/lib/calendar/time.ex | spencerdcarlson/elixir | 23d75ecdf58df80969e12f4420282238e19219a1 | [
"Apache-2.0"
] | 1 | 2020-11-25T02:22:55.000Z | 2020-11-25T02:22:55.000Z | defmodule Time do
@moduledoc """
A Time struct and functions.
The Time struct contains the fields hour, minute, second and microseconds.
New times can be built with the `new/4` function or using the
`~T` (see `Kernel.sigil_T/2`) sigil:
iex> ~T[23:00:07.001]
~T[23:00:07.001]
Both `new/4` and sigil return a struct where the time fields can
be accessed directly:
iex> time = ~T[23:00:07.001]
iex> time.hour
23
iex> time.microsecond
{1000, 3}
The functions on this module work with the `Time` struct as well
as any struct that contains the same fields as the `Time` struct,
such as `NaiveDateTime` and `DateTime`. Such functions expect
`t:Calendar.time/0` in their typespecs (instead of `t:t/0`).
Developers should avoid creating the Time structs directly
and instead rely on the functions provided by this module as well
as the ones in third-party calendar libraries.
## Comparing times
Comparisons in Elixir using `==/2`, `>/2`, `</2` and similar are structural
and based on the `Time` struct fields. For proper comparison between
times, use the `compare/2` function.
"""
@enforce_keys [:hour, :minute, :second]
defstruct [:hour, :minute, :second, microsecond: {0, 0}, calendar: Calendar.ISO]
@type t :: %__MODULE__{
hour: Calendar.hour(),
minute: Calendar.minute(),
second: Calendar.second(),
microsecond: Calendar.microsecond(),
calendar: Calendar.calendar()
}
@parts_per_day 86_400_000_000
@seconds_per_day 24 * 60 * 60
@doc """
Returns the current time in UTC.
## Examples
iex> time = Time.utc_now()
iex> time.hour >= 0
true
"""
@doc since: "1.4.0"
@spec utc_now(Calendar.calendar()) :: t
def utc_now(calendar \\ Calendar.ISO) do
{:ok, _, time, microsecond} = Calendar.ISO.from_unix(:os.system_time(), :native)
{hour, minute, second} = time
iso_time = %Time{
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
calendar: Calendar.ISO
}
convert!(iso_time, calendar)
end
@doc """
Builds a new time.
Expects all values to be integers. Returns `{:ok, time}` if each
entry fits its appropriate range, returns `{:error, reason}` otherwise.
Microseconds can also be given with a precision, which must be an
integer between 0 and 6.
The built-in calendar does not support leap seconds.
## Examples
iex> Time.new(0, 0, 0, 0)
{:ok, ~T[00:00:00.000000]}
iex> Time.new(23, 59, 59, 999_999)
{:ok, ~T[23:59:59.999999]}
iex> Time.new(24, 59, 59, 999_999)
{:error, :invalid_time}
iex> Time.new(23, 60, 59, 999_999)
{:error, :invalid_time}
iex> Time.new(23, 59, 60, 999_999)
{:error, :invalid_time}
iex> Time.new(23, 59, 59, 1_000_000)
{:error, :invalid_time}
# Invalid precision
Time.new(23, 59, 59, {999_999, 10})
{:error, :invalid_time}
"""
@spec new(
Calendar.hour(),
Calendar.minute(),
Calendar.second(),
Calendar.microsecond() | non_neg_integer,
Calendar.calendar()
) :: {:ok, t} | {:error, atom}
def new(hour, minute, second, microsecond \\ {0, 0}, calendar \\ Calendar.ISO)
def new(hour, minute, second, microsecond, calendar) when is_integer(microsecond) do
new(hour, minute, second, {microsecond, 6}, calendar)
end
def new(hour, minute, second, {microsecond, precision}, calendar)
when is_integer(hour) and is_integer(minute) and is_integer(second) and
is_integer(microsecond) and is_integer(precision) do
case calendar.valid_time?(hour, minute, second, {microsecond, precision}) do
true ->
time = %Time{
hour: hour,
minute: minute,
second: second,
microsecond: {microsecond, precision},
calendar: calendar
}
{:ok, time}
false ->
{:error, :invalid_time}
end
end
@doc """
Builds a new time.
Expects all values to be integers. Returns `time` if each
entry fits its appropriate range, raises if the time is invalid.
Microseconds can also be given with a precision, which must be an
integer between 0 and 6.
The built-in calendar does not support leap seconds.
## Examples
iex> Time.new!(0, 0, 0, 0)
~T[00:00:00.000000]
iex> Time.new!(23, 59, 59, 999_999)
~T[23:59:59.999999]
iex> Time.new!(24, 59, 59, 999_999)
** (ArgumentError) cannot build time, reason: :invalid_time
"""
@doc since: "1.11.0"
@spec new!(
Calendar.hour(),
Calendar.minute(),
Calendar.second(),
Calendar.microsecond() | non_neg_integer,
Calendar.calendar()
) :: t
def new!(hour, minute, second, microsecond \\ {0, 0}, calendar \\ Calendar.ISO) do
case new(hour, minute, second, microsecond, calendar) do
{:ok, time} ->
time
{:error, reason} ->
raise ArgumentError, "cannot build time, reason: #{inspect(reason)}"
end
end
@doc """
Converts the given `time` to a string.
### Examples
iex> Time.to_string(~T[23:00:00])
"23:00:00"
iex> Time.to_string(~T[23:00:00.001])
"23:00:00.001"
iex> Time.to_string(~T[23:00:00.123456])
"23:00:00.123456"
iex> Time.to_string(~N[2015-01-01 23:00:00.001])
"23:00:00.001"
iex> Time.to_string(~N[2015-01-01 23:00:00.123456])
"23:00:00.123456"
"""
@spec to_string(Calendar.time()) :: String.t()
def to_string(time)
def to_string(%{
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
calendar: calendar
}) do
calendar.time_to_string(hour, minute, second, microsecond)
end
@doc """
Parses the extended "Local time" format described by
[ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601).
Time zone offset may be included in the string but they will be
simply discarded as such information is not included in times.
As specified in the standard, the separator "T" may be omitted if
desired as there is no ambiguity within this function.
Time representations with reduced accuracy are not supported.
Note that while ISO 8601 allows times to specify 24:00:00 as the
zero hour of the next day, this notation is not supported by Elixir.
Leap seconds are not supported as well by the built-in Calendar.ISO.
## Examples
iex> Time.from_iso8601("23:50:07")
{:ok, ~T[23:50:07]}
iex> Time.from_iso8601("23:50:07Z")
{:ok, ~T[23:50:07]}
iex> Time.from_iso8601("T23:50:07Z")
{:ok, ~T[23:50:07]}
iex> Time.from_iso8601("23:50:07,0123456")
{:ok, ~T[23:50:07.012345]}
iex> Time.from_iso8601("23:50:07.0123456")
{:ok, ~T[23:50:07.012345]}
iex> Time.from_iso8601("23:50:07.123Z")
{:ok, ~T[23:50:07.123]}
iex> Time.from_iso8601("2015:01:23 23-50-07")
{:error, :invalid_format}
iex> Time.from_iso8601("23:50:07A")
{:error, :invalid_format}
iex> Time.from_iso8601("23:50:07.")
{:error, :invalid_format}
iex> Time.from_iso8601("23:50:61")
{:error, :invalid_time}
"""
@spec from_iso8601(String.t(), Calendar.calendar()) :: {:ok, t} | {:error, atom}
def from_iso8601(string, calendar \\ Calendar.ISO) do
with {:ok, {hour, minute, second, microsecond}} <- Calendar.ISO.parse_time(string) do
convert(
%Time{hour: hour, minute: minute, second: second, microsecond: microsecond},
calendar
)
end
end
@doc """
Parses the extended "Local time" format described by
[ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601).
Raises if the format is invalid.
## Examples
iex> Time.from_iso8601!("23:50:07,123Z")
~T[23:50:07.123]
iex> Time.from_iso8601!("23:50:07.123Z")
~T[23:50:07.123]
iex> Time.from_iso8601!("2015:01:23 23-50-07")
** (ArgumentError) cannot parse "2015:01:23 23-50-07" as time, reason: :invalid_format
"""
@spec from_iso8601!(String.t(), Calendar.calendar()) :: t
def from_iso8601!(string, calendar \\ Calendar.ISO) do
case from_iso8601(string, calendar) do
{:ok, value} ->
value
{:error, reason} ->
raise ArgumentError, "cannot parse #{inspect(string)} as time, reason: #{inspect(reason)}"
end
end
@doc """
Converts the given time to
[ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601).
By default, `Time.to_iso8601/2` returns times formatted in the "extended"
format, for human readability. It also supports the "basic" format through
passing the `:basic` option.
### Examples
iex> Time.to_iso8601(~T[23:00:13])
"23:00:13"
iex> Time.to_iso8601(~T[23:00:13.001])
"23:00:13.001"
iex> Time.to_iso8601(~T[23:00:13.001], :basic)
"230013.001"
iex> Time.to_iso8601(~N[2010-04-17 23:00:13])
"23:00:13"
"""
@spec to_iso8601(Calendar.time(), :extended | :basic) :: String.t()
def to_iso8601(time, format \\ :extended)
def to_iso8601(%{calendar: Calendar.ISO} = time, format) when format in [:extended, :basic] do
%{
hour: hour,
minute: minute,
second: second,
microsecond: microsecond
} = time
Calendar.ISO.time_to_string(hour, minute, second, microsecond, format)
end
def to_iso8601(%{calendar: _} = time, format) when format in [:extended, :basic] do
time
|> convert!(Calendar.ISO)
|> to_iso8601(format)
end
@doc """
Converts given `time` to an Erlang time tuple.
WARNING: Loss of precision may occur, as Erlang time tuples
only contain hours/minutes/seconds.
## Examples
iex> Time.to_erl(~T[23:30:15.999])
{23, 30, 15}
iex> Time.to_erl(~N[2010-04-17 23:30:15.999])
{23, 30, 15}
"""
@spec to_erl(Calendar.time()) :: :calendar.time()
def to_erl(time) do
%{hour: hour, minute: minute, second: second} = convert!(time, Calendar.ISO)
{hour, minute, second}
end
@doc """
Converts an Erlang time tuple to a `Time` struct.
## Examples
iex> Time.from_erl({23, 30, 15}, {5000, 3})
{:ok, ~T[23:30:15.005]}
iex> Time.from_erl({24, 30, 15})
{:error, :invalid_time}
"""
@spec from_erl(:calendar.time(), Calendar.microsecond(), Calendar.calendar()) ::
{:ok, t} | {:error, atom}
def from_erl(tuple, microsecond \\ {0, 0}, calendar \\ Calendar.ISO)
def from_erl({hour, minute, second}, microsecond, calendar) do
with {:ok, time} <- new(hour, minute, second, microsecond, Calendar.ISO),
do: convert(time, calendar)
end
@doc """
Converts an Erlang time tuple to a `Time` struct.
## Examples
iex> Time.from_erl!({23, 30, 15})
~T[23:30:15]
iex> Time.from_erl!({23, 30, 15}, {5000, 3})
~T[23:30:15.005]
iex> Time.from_erl!({24, 30, 15})
** (ArgumentError) cannot convert {24, 30, 15} to time, reason: :invalid_time
"""
@spec from_erl!(:calendar.time(), Calendar.microsecond(), Calendar.calendar()) :: t
def from_erl!(tuple, microsecond \\ {0, 0}, calendar \\ Calendar.ISO) do
case from_erl(tuple, microsecond, calendar) do
{:ok, value} ->
value
{:error, reason} ->
raise ArgumentError,
"cannot convert #{inspect(tuple)} to time, reason: #{inspect(reason)}"
end
end
@doc """
Converts a number of seconds after midnight to a `Time` struct.
## Examples
iex> Time.from_seconds_after_midnight(10_000)
~T[02:46:40]
iex> Time.from_seconds_after_midnight(30_000, {5000, 3})
~T[08:20:00.005]
iex> Time.from_seconds_after_midnight(-1)
~T[23:59:59]
iex> Time.from_seconds_after_midnight(100_000)
~T[03:46:40]
"""
@doc since: "1.11.0"
@spec from_seconds_after_midnight(
integer(),
Calendar.microsecond(),
Calendar.calendar()
) :: t
def from_seconds_after_midnight(seconds, microsecond \\ {0, 0}, calendar \\ Calendar.ISO)
when is_integer(seconds) do
seconds_in_day = Integer.mod(seconds, @seconds_per_day)
{hour, minute, second, {_, _}} =
calendar.time_from_day_fraction({seconds_in_day, @seconds_per_day})
%Time{
calendar: calendar,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond
}
end
@doc """
Converts a `Time` struct to a number of seconds after midnight.
The returned value is a two-element tuple with the number of seconds and microseconds.
## Examples
iex> Time.to_seconds_after_midnight(~T[23:30:15])
{84615, 0}
iex> Time.to_seconds_after_midnight(~N[2010-04-17 23:30:15.999])
{84615, 999000}
"""
@doc since: "1.11.0"
@spec to_seconds_after_midnight(Calendar.time()) :: {integer(), non_neg_integer()}
def to_seconds_after_midnight(%{microsecond: {microsecond, _precision}} = time) do
iso_days = {0, to_day_fraction(time)}
{Calendar.ISO.iso_days_to_unit(iso_days, :second), microsecond}
end
@doc """
Adds the `number` of `unit`s to the given `time`.
This function accepts the `number` measured according to `Calendar.ISO`.
The time is returned in the same calendar as it was given in.
Note the result value represents the time of day, meaning that it is cyclic,
for instance, it will never go over 24 hours for the ISO calendar.
## Examples
iex> Time.add(~T[10:00:00], 27000)
~T[17:30:00.000000]
iex> Time.add(~T[11:00:00.005], 2400)
~T[11:40:00.005000]
iex> Time.add(~T[00:00:00], 86_399_999, :millisecond)
~T[23:59:59.999000]
iex> Time.add(~T[17:10:05], 86400)
~T[17:10:05.000000]
iex> Time.add(~T[23:00:00], -60)
~T[22:59:00.000000]
"""
@doc since: "1.6.0"
@spec add(Calendar.time(), integer, System.time_unit()) :: t
def add(%{calendar: calendar} = time, number, unit \\ :second) when is_integer(number) do
number = System.convert_time_unit(number, unit, :microsecond)
total = time_to_microseconds(time) + number
parts = Integer.mod(total, @parts_per_day)
{hour, minute, second, microsecond} = calendar.time_from_day_fraction({parts, @parts_per_day})
%Time{
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
calendar: calendar
}
end
defp time_to_microseconds(%{
calendar: Calendar.ISO,
hour: 0,
minute: 0,
second: 0,
microsecond: {0, _}
}) do
0
end
defp time_to_microseconds(time) do
iso_days = {0, to_day_fraction(time)}
Calendar.ISO.iso_days_to_unit(iso_days, :microsecond)
end
@doc """
Compares two time structs.
Returns `:gt` if first time is later than the second
and `:lt` for vice versa. If the two times are equal
`:eq` is returned.
## Examples
iex> Time.compare(~T[16:04:16], ~T[16:04:28])
:lt
iex> Time.compare(~T[16:04:16], ~T[16:04:16])
:eq
iex> Time.compare(~T[16:04:16.01], ~T[16:04:16.001])
:gt
This function can also be used to compare across more
complex calendar types by considering only the time fields:
iex> Time.compare(~N[1900-01-01 16:04:16], ~N[2015-01-01 16:04:16])
:eq
iex> Time.compare(~N[2015-01-01 16:04:16], ~N[2015-01-01 16:04:28])
:lt
iex> Time.compare(~N[2015-01-01 16:04:16.01], ~N[2000-01-01 16:04:16.001])
:gt
"""
@doc since: "1.4.0"
@spec compare(Calendar.time(), Calendar.time()) :: :lt | :eq | :gt
def compare(%{calendar: calendar} = time1, %{calendar: calendar} = time2) do
%{hour: hour1, minute: minute1, second: second1, microsecond: {microsecond1, _}} = time1
%{hour: hour2, minute: minute2, second: second2, microsecond: {microsecond2, _}} = time2
case {{hour1, minute1, second1, microsecond1}, {hour2, minute2, second2, microsecond2}} do
{first, second} when first > second -> :gt
{first, second} when first < second -> :lt
_ -> :eq
end
end
def compare(time1, time2) do
{parts1, ppd1} = to_day_fraction(time1)
{parts2, ppd2} = to_day_fraction(time2)
case {parts1 * ppd2, parts2 * ppd1} do
{first, second} when first > second -> :gt
{first, second} when first < second -> :lt
_ -> :eq
end
end
@doc """
Converts given `time` to a different calendar.
Returns `{:ok, time}` if the conversion was successful,
or `{:error, reason}` if it was not, for some reason.
## Examples
Imagine someone implements `Calendar.Holocene`, a calendar based on the
Gregorian calendar that adds exactly 10,000 years to the current Gregorian
year:
iex> Time.convert(~T[13:30:15], Calendar.Holocene)
{:ok, %Time{calendar: Calendar.Holocene, hour: 13, minute: 30, second: 15, microsecond: {0, 0}}}
"""
@doc since: "1.5.0"
@spec convert(Calendar.time(), Calendar.calendar()) :: {:ok, t} | {:error, atom}
# Keep it multiline for proper function clause errors.
def convert(
%{
calendar: calendar,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond
},
calendar
) do
time = %Time{
calendar: calendar,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond
}
{:ok, time}
end
def convert(%{microsecond: {_, precision}} = time, calendar) do
{hour, minute, second, {microsecond, _}} =
time
|> to_day_fraction()
|> calendar.time_from_day_fraction()
time = %Time{
calendar: calendar,
hour: hour,
minute: minute,
second: second,
microsecond: {microsecond, precision}
}
{:ok, time}
end
@doc """
Similar to `Time.convert/2`, but raises an `ArgumentError`
if the conversion between the two calendars is not possible.
## Examples
Imagine someone implements `Calendar.Holocene`, a calendar based on the
Gregorian calendar that adds exactly 10,000 years to the current Gregorian
year:
iex> Time.convert!(~T[13:30:15], Calendar.Holocene)
%Time{calendar: Calendar.Holocene, hour: 13, minute: 30, second: 15, microsecond: {0, 0}}
"""
@doc since: "1.5.0"
@spec convert!(Calendar.time(), Calendar.calendar()) :: t
def convert!(time, calendar) do
case convert(time, calendar) do
{:ok, value} ->
value
{:error, reason} ->
raise ArgumentError,
"cannot convert #{inspect(time)} to target calendar #{inspect(calendar)}, " <>
"reason: #{inspect(reason)}"
end
end
@doc """
Returns the difference between two times, considering only the hour, minute,
second and microsecond.
As with the `compare/2` function both `Time` structs and other structures
containing time can be used. If for instance a `NaiveDateTime` or `DateTime`
is passed, only the hour, minute, second, and microsecond is considered. Any
additional information about a date or time zone is ignored when calculating
the difference.
The answer can be returned in any `unit` available from
`t:System.time_unit/0`. If the first time value is earlier than
the second, a negative number is returned.
This function returns the difference in seconds where seconds
are measured according to `Calendar.ISO`.
## Examples
iex> Time.diff(~T[00:29:12], ~T[00:29:10])
2
# When passing a `NaiveDateTime` the date part is ignored.
iex> Time.diff(~N[2017-01-01 00:29:12], ~T[00:29:10])
2
# Two `NaiveDateTime` structs could have big differences in the date
# but only the time part is considered.
iex> Time.diff(~N[2017-01-01 00:29:12], ~N[1900-02-03 00:29:10])
2
iex> Time.diff(~T[00:29:12], ~T[00:29:10], :microsecond)
2_000_000
iex> Time.diff(~T[00:29:10], ~T[00:29:12], :microsecond)
-2_000_000
"""
@doc since: "1.5.0"
@spec diff(Calendar.time(), Calendar.time(), System.time_unit()) :: integer
def diff(time1, time2, unit \\ :second)
def diff(
%{
calendar: Calendar.ISO,
hour: hour1,
minute: minute1,
second: second1,
microsecond: {microsecond1, @parts_per_day}
},
%{
calendar: Calendar.ISO,
hour: hour2,
minute: minute2,
second: second2,
microsecond: {microsecond2, @parts_per_day}
},
unit
) do
total =
(hour1 - hour2) * 3_600_000_000 + (minute1 - minute2) * 60_000_000 +
(second1 - second2) * 1_000_000 + (microsecond1 - microsecond2)
System.convert_time_unit(total, :microsecond, unit)
end
def diff(time1, time2, unit) do
fraction1 = to_day_fraction(time1)
fraction2 = to_day_fraction(time2)
Calendar.ISO.iso_days_to_unit({0, fraction1}, unit) -
Calendar.ISO.iso_days_to_unit({0, fraction2}, unit)
end
@doc """
Returns the given time with the microsecond field truncated to the given
precision (`:microsecond`, `millisecond` or `:second`).
The given time is returned unchanged if it already has lower precision than
the given precision.
## Examples
iex> Time.truncate(~T[01:01:01.123456], :microsecond)
~T[01:01:01.123456]
iex> Time.truncate(~T[01:01:01.123456], :millisecond)
~T[01:01:01.123]
iex> Time.truncate(~T[01:01:01.123456], :second)
~T[01:01:01]
"""
@doc since: "1.6.0"
@spec truncate(t(), :microsecond | :millisecond | :second) :: t()
def truncate(%Time{microsecond: microsecond} = time, precision) do
%{time | microsecond: Calendar.truncate(microsecond, precision)}
end
## Helpers
defp to_day_fraction(%{
hour: hour,
minute: minute,
second: second,
microsecond: {_, _} = microsecond,
calendar: calendar
}) do
calendar.time_to_day_fraction(hour, minute, second, microsecond)
end
defimpl String.Chars do
def to_string(time) do
%{
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
calendar: calendar
} = time
calendar.time_to_string(hour, minute, second, microsecond)
end
end
defimpl Inspect do
def inspect(time, _) do
%{
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
calendar: calendar
} = time
"~T[" <>
calendar.time_to_string(hour, minute, second, microsecond) <> suffix(calendar) <> "]"
end
defp suffix(Calendar.ISO), do: ""
defp suffix(calendar), do: " " <> inspect(calendar)
end
end
| 28.761062 | 102 | 0.623253 |
f7c4511cd7a641c37f49712b1f62a6b52f5a787c | 276 | exs | Elixir | apps/mishka_html/test/mishka_html_web/views/layout_view_test.exs | mojtaba-naserei/mishka-cms | 1f31f61347bab1aae6ba0d47c5515a61815db6c9 | [
"Apache-2.0"
] | 35 | 2021-06-26T09:05:50.000Z | 2022-03-30T15:41:22.000Z | apps/mishka_html/test/mishka_html_web/views/layout_view_test.exs | iArazar/mishka-cms | 8b579101d607d91e80834527c1508fe5f4ceefef | [
"Apache-2.0"
] | 101 | 2021-01-01T09:54:07.000Z | 2022-03-28T10:02:24.000Z | apps/mishka_html/test/mishka_html_web/views/layout_view_test.exs | iArazar/mishka-cms | 8b579101d607d91e80834527c1508fe5f4ceefef | [
"Apache-2.0"
] | 8 | 2021-01-17T17:08:07.000Z | 2022-03-11T16:12:06.000Z | defmodule MishkaHtmlWeb.LayoutViewTest do
use MishkaHtmlWeb.ConnCase, async: true
# When testing helpers, you may want to import Phoenix.HTML and
# use functions such as safe_to_string() to convert the helper
# result into an HTML string.
# import Phoenix.HTML
end
| 30.666667 | 65 | 0.771739 |
f7c4654d01b3ec0cacdfc93626de05e66254f69e | 6,734 | ex | Elixir | apps/state/lib/state.ex | fjlanasa/api | c39bc393aea572bfb81754b2ea1adf9dda9ce24a | [
"MIT"
] | null | null | null | apps/state/lib/state.ex | fjlanasa/api | c39bc393aea572bfb81754b2ea1adf9dda9ce24a | [
"MIT"
] | null | null | null | apps/state/lib/state.ex | fjlanasa/api | c39bc393aea572bfb81754b2ea1adf9dda9ce24a | [
"MIT"
] | null | null | null | defmodule State do
@moduledoc """
Maintains the current state of the MBTA system: routes, schedules, vehicle locations, predictions, etc. It also allows
for querying of that state to answer questions from clients.
"""
use Application
@type sort_option :: {:order_by, {atom, :asc | :desc}}
@type option :: sort_option | State.Pagination.pagination_option()
# See http://elixir-lang.org/docs/stable/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec, warn: false
children = [
worker(State.Metadata, []),
worker(State.Service, []),
supervisor(State.Stop, []),
worker(State.Vehicle, []),
worker(State.Alert, []),
worker(State.Facility, []),
worker(State.Facility.Property, []),
worker(State.Facility.Parking, []),
worker(State.Route, []),
worker(State.RoutePattern, []),
worker(State.Line, []),
worker(State.Trip, []),
worker(State.Trip.Added, []),
worker(State.Schedule, []),
worker(State.Prediction, []),
worker(State.StopsOnRoute, []),
worker(State.RoutesPatternsAtStop, []),
worker(State.ServiceByDate, []),
worker(State.RoutesByService, []),
worker(State.Shape, []),
worker(State.Feed, []),
worker(State.Transfer, [])
]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: State.Application, max_restarts: length(children)]
Supervisor.start_link(children, opts)
end
@doc """
Fetches a configuration value and raises if missing.
## Examples
iex> State.config(:routes, :hidden_ids_exact)
[...]
"""
def config(root_key, sub_key) do
root_key
|> config()
|> Keyword.fetch!(sub_key)
end
def config(root_key) do
Application.fetch_env!(:state, root_key)
end
@doc """
Enumerates a result-set according to a list of options.
* `results` - the list of results
* `opts` - the Keyword list of options:
* `:order_by` - a 2-tuple containg the field to order by
and direction, for example: `{:id, :asc}`, `{:name, :desc}`
* `:limit` - the number of results to be returned
* `:offset` - the offset of results to beging selection from
When `:limit` is provided, the function gives a tuple of the paginated list
and a struct of pagination offset values for the next, previous, first and
last pages.
When both sorting and pagination options are used, sorting will happen before
any pagination operation.
## Examples
iex(1)> items = [%{id: 1}, %{id: 2}, %{id: 3}, %{id: 4}, %{id: 5}]
iex(2)> State.all(items, limit: 2, offset: 2)
{[%{id: 3}, %{id: 4}], %State.Pagination.Offsets{
prev: 0,
next: 4,
first: 0,
last: 4
}}
iex> State.all([%{id: 3}, %{id: 1}, %{id: 2}], order_by: {:id, :asc})
[%{id: 1}, %{id: 2}, %{id: 3}]
iex> State.all([%{id: 3}, %{id: 1}, %{id: 2}], order_by: [{:id, :desc}])
[%{id: 3}, %{id: 2}, %{id: 1}]
iex> State.all([%{id: 3}, %{id: 1}, %{id: 2}], order_by: [{:invalid, :asc}])
{:error, :invalid_order_by}
"""
@spec all([map], [option] | map) ::
[map] | {[map], State.Pagination.Offsets.t()} | {:error, atom}
def all(results, opts \\ [])
def all([], _), do: []
def all(results, []) do
results
end
def all(results, opts) when is_list(results) do
opts = Map.new(opts)
case State.order_by(results, opts) do
{:error, _} = error ->
error
new_results ->
State.Pagination.paginate(new_results, opts)
end
end
@doc false
@spec order_by([map], [sort_option] | map) :: [map] | {:error, atom}
def order_by(results, opts \\ [])
def order_by([], _), do: []
def order_by(results, opts) do
opts = Map.new(opts)
case opts do
%{order_by: keys} ->
keys =
keys
|> List.wrap()
|> Enum.reverse()
order_by_keys(results, keys, opts)
_ ->
# order_by not present
results
end
end
defp order_by_keys(results, [{:distance, dir} | keys], %{latitude: _, longitude: _} = opts) do
results
|> do_order_by({:distance, get_latlng(opts)}, dir)
|> order_by_keys(keys, opts)
end
defp order_by_keys([result | _] = results, [{:time, dir} | keys], opts) do
if valid_order_by_key?(:arrival_time, result) or valid_order_by_key?(:departure_time, result) do
results
|> do_order_by(:time, dir)
|> order_by_keys(keys, opts)
else
{:error, :invalid_order_by}
end
end
defp order_by_keys([result | _] = results, [{key, dir} | keys], opts) do
if valid_order_by_key?(key, result) do
results
|> do_order_by(key, dir)
|> order_by_keys(keys, opts)
else
{:error, :invalid_order_by}
end
end
defp order_by_keys(results, [], _opts) do
results
end
defp order_by_keys(_results, _keys, _opts) do
{:error, :invalid_order_by}
end
defp valid_order_by_key?(:distance, _) do
false
end
defp valid_order_by_key?(key, result) do
Map.has_key?(result, key)
end
defp do_order_by(results, key, :asc) do
sort_fn = mapper_fn(key)
Enum.sort_by(results, sort_fn, &<=/2)
end
defp do_order_by(results, key, :desc) do
sort_fn = mapper_fn(key)
Enum.sort_by(results, sort_fn, &>=/2)
end
defp mapper_fn({:distance, position}) do
&distance({&1.latitude, &1.longitude}, position)
end
defp mapper_fn(:time) do
&time/1
end
defp mapper_fn(key) do
fn
%{^key => %DateTime{} = dt} ->
{:date_time, DateTime.to_unix(dt)}
%{^key => value} ->
value
end
end
defp time(%{arrival_time: nil, departure_time: nil}) do
nil
end
defp time(%{arrival_time: nil, departure_time: %DateTime{} = time}) do
{:date_time, DateTime.to_unix(time)}
end
defp time(%{arrival_time: %DateTime{} = time}) do
{:date_time, DateTime.to_unix(time)}
end
defp time(%{arrival_time: nil, departure_time: time}) do
{:seconds, time}
end
defp time(%{arrival_time: time}) do
{:seconds, time}
end
defp fetch_float(opts_map, key) do
case opts_map do
%{^key => val} ->
case Float.parse(val) do
{parsed_value, ""} ->
parsed_value
_ ->
nil
end
_ ->
nil
end
end
def distance({lat1, lng1}, {lat2, lng2}) do
:math.sqrt(:math.pow(lat1 - lat2, 2) + :math.pow(lng1 - lng2, 2))
end
def get_latlng(opts) do
{fetch_float(opts, :latitude), fetch_float(opts, :longitude)}
end
end
| 25.800766 | 120 | 0.599495 |
f7c47b998f212f89cbb6a7c579ff12eb0b2a664b | 16,898 | ex | Elixir | lib/x/compiler.ex | omohokcoj/x_component | 0f51d78b80b7d54d238b7d0c2c3c2f93b9320108 | [
"MIT"
] | 53 | 2020-02-09T16:34:46.000Z | 2021-04-01T17:48:26.000Z | lib/x/compiler.ex | omohokcoj/x_component | 0f51d78b80b7d54d238b7d0c2c3c2f93b9320108 | [
"MIT"
] | 1 | 2020-03-11T03:22:52.000Z | 2020-03-11T03:22:52.000Z | lib/x/compiler.ex | omohokcoj/x_component | 0f51d78b80b7d54d238b7d0c2c3c2f93b9320108 | [
"MIT"
] | 1 | 2020-02-09T16:34:49.000Z | 2020-02-09T16:34:49.000Z | defmodule X.Compiler do
@moduledoc """
X template compiler module.
"""
alias X.Ast
import X.Transformer,
only: [
compact_ast: 1,
transform_expresion: 3,
transform_inline_component: 4
]
@special_tag_name 'X'
@assigns_key_name 'assigns'
@component_key_name 'component'
@is_key_name 'is'
@attrs_key_name 'attrs'
@special_attr_assigns ['style', 'class']
@type options() :: [
{:inline, boolean()}
| {:context, atom()}
| {:line, integer()}
]
@doc ~S"""
Compiles given X template AST into Elixir AST.
## Example
iex> X.Compiler.call(
...> [
...> {{:tag_start, {1, 1}, 'div', [], nil, nil, false, false, false},
...> [
...> {{:tag_start, {6, 1}, 'span',
...> [{:tag_attr, {12, 1}, 'class', 'test', false}], nil, nil, false, false,
...> false},
...> [
...> {{:text_group, {25, 1}, 'span'},
...> [{{:tag_output, {25, 1}, 'a ', true}, []}]}
...> ]}
...> ]}
...> ])
[
"<div><span class=\"test\">",
{{:., [line: 1],
[{:__aliases__, [line: 1, alias: false], [:X, :Html]}, :to_safe_iodata]},
[line: 1], [{:a, [line: 1], nil}]},
"</span></div>"
]
"""
@spec call([Ast.leaf()]) :: Macro.t()
@spec call([Ast.leaf()], Macro.Env.t()) :: Macro.t()
@spec call([Ast.leaf()], Macro.Env.t(), options()) :: Macro.t()
def call(tree, env \\ __ENV__, opts \\ []) when is_map(env) do
tree
|> Ast.drop_whitespace()
|> compile_tree(env, opts)
|> compact_ast()
end
@spec compile_tree([Ast.leaf()], Macro.Env.t(), options()) :: Macro.t()
defp compile_tree(tree = [head | _], env, opts) do
{result, tail} =
head
|> compile(env, opts)
|> maybe_wrap_in_iterator(head, env, opts)
|> maybe_wrap_in_condition(tree, env, opts)
[result | compile_tree(tail, env, opts)]
end
defp compile_tree([], _env, _opts) do
[]
end
defp compile({{:text_group, _, _}, list}, env, opts) do
compile_text_group(list, env, opts, [])
end
defp compile({{:tag_output, cur = {_, row}, body, true}, _}, env, opts) do
quote line: row + Keyword.get(opts, :line, 0) do
X.Html.to_safe_iodata(unquote(compile_expr(body, cur, env, opts)))
end
end
defp compile({{:tag_output, cur = {_, row}, body, false}, _}, env, opts) do
quote line: row + Keyword.get(opts, :line, 0) do
unquote(compile_expr(body, cur, env, opts))
end
end
defp compile({{:tag_text, _, body, _, false}, _}, _env, _opts) do
:unicode.characters_to_binary(body)
end
defp compile({{:tag_start, _, tag_name, attrs, _, _, false, _, false}, children}, env, opts) do
tag_name_binary = :erlang.iolist_to_binary(tag_name)
attrs_ast = compile_attrs(attrs, env, opts)
children_ast = compile_tree(children, env, opts)
[?<, tag_name_binary, attrs_ast, ?>, children_ast, "</", tag_name_binary, ?>]
end
defp compile({{:tag_start, _, tag_name, attrs, _, _, true, _, false}, _}, env, opts) do
[?<, :erlang.iolist_to_binary(tag_name), compile_attrs(attrs, env, opts), ?>]
end
defp compile(
{{:tag_start, cur, @special_tag_name, attrs, _, _, _, _, true}, children},
env,
opts
) do
{component, is_tag, attrs} =
Enum.reduce(attrs, {nil, nil, []}, fn
{:tag_attr, _, @component_key_name, value, true}, {_, is_tag, acc} -> {value, is_tag, acc}
{:tag_attr, _, @is_key_name, value, true}, {component, _, acc} -> {component, value, acc}
attr, {component, is_tag, acc} -> {component, is_tag, [attr | acc]}
end)
cond do
component ->
compile_component(component, attrs, children, cur, env, opts)
is_tag ->
children_ast = compile_tree(children, env, opts)
tag_ast = compile_expr(is_tag, cur, env, opts)
attrs_ast = compile_attrs(:lists.reverse(attrs), env, opts)
tag_ast = quote(do: :erlang.iolist_to_binary(unquote(tag_ast)))
[?<, tag_ast, attrs_ast, ?>, children_ast, "</", tag_ast, ?>]
true ->
children
|> Ast.drop_whitespace()
|> compile_tree(env, opts)
end
end
defp compile({{:tag_start, cur, tag_name, attrs, _, _, _, _, true}, children}, env, opts) do
compile_component(tag_name, attrs, children, cur, env, opts)
end
defp compile({{:tag_comment, _, body}, _}, _env, _opts) do
["<!", :unicode.characters_to_binary(body), ?>]
end
@spec maybe_wrap_in_iterator(Macro.t(), Ast.leaf(), Macro.Env.t(), options()) :: Macro.t()
defp maybe_wrap_in_iterator(ast, node, env, opts) do
case node do
{{:tag_start, _, _, _, _, iterator = {:for, _, _}, _, _, _}, _} ->
compile_for_expr(iterator, ast, env, opts)
_ ->
ast
end
end
@spec maybe_wrap_in_condition(Macro.t(), [Ast.leaf()], Macro.Env.t(), options()) :: Macro.t()
defp maybe_wrap_in_condition(ast, [head | tail], env, opts) do
case head do
{{:tag_start, _, _, _, token = {condition, _, _}, _, _, _, _}, _}
when condition in [:if, :unless] ->
compile_cond_expr(token, ast, tail, env, opts)
_ ->
{ast, tail}
end
end
@spec compile_attrs([Ast.tag_attr()], Macro.Env.t(), options()) :: [Macro.t()]
defp compile_attrs(attrs, env, opts) do
{attrs_ast, base_attrs, merge_attrs, static_attr_tokens} =
group_and_transform_tag_attrs(attrs, env, opts)
static_attrs_ast = Enum.map(static_attr_tokens, &compile_attr(&1, env, opts))
case {attrs_ast, merge_attrs} do
{nil, []} ->
static_attrs_ast
{nil, _} ->
base_attrs = X.Html.merge_attrs(base_attrs, merge_attrs)
[
?\s,
quote do
X.Html.attrs_to_iodata(unquote(base_attrs))
end
| static_attrs_ast
]
_ ->
static_attrs = Enum.map(static_attr_tokens, &attr_token_to_tuple(&1, env, opts))
base_attrs = X.Html.merge_attrs(base_attrs, merge_attrs)
quote do
case unquote({:{}, [], [attrs_ast, base_attrs, static_attrs]}) do
{attrs_, base_attrs_, static_attrs_}
when attrs_ not in [nil, []] or base_attrs_ != [] ->
[
?\s,
X.Html.attrs_to_iodata(X.Html.merge_attrs(base_attrs_ ++ static_attrs_, attrs_))
]
_ ->
unquote(static_attrs_ast)
end
end
end
end
@spec compile_attr(Ast.tag_attr(), Macro.Env.t(), options()) :: Macro.t()
defp compile_attr({:tag_attr, cur, name, value, true}, env, opts) do
name_string = :erlang.iolist_to_binary(name)
quote do
case unquote(compile_expr(value, cur, env, opts)) do
true ->
unquote(" " <> name_string <> "=\"true\"")
value when value not in [nil, false] ->
[
unquote(" " <> name_string <> "=\""),
X.Html.attr_value_to_iodata(
value,
unquote(name_string)
),
?"
]
_ ->
[]
end
end
end
defp compile_attr({:tag_attr, _cur, name, [], false}, _env, _opts) do
[?\s, :erlang.iolist_to_binary(name)]
end
defp compile_attr({:tag_attr, _cur, name, value, false}, _env, _opts) do
[?\s, :erlang.iolist_to_binary(name), ?=, ?", :unicode.characters_to_binary(value), ?"]
end
@spec compile_assigns([Ast.tag_attr()], Macro.Env.t(), options()) :: Macro.t()
defp compile_assigns(attrs, env, opts) do
{assigns, attrs, assigns_list, attrs_list, merge_attrs} =
group_and_transform_component_attrs(attrs, env, opts)
merged_attrs =
case {attrs_list, merge_attrs} do
{[], _} -> []
{_, []} -> attrs_list
{_, _} -> quote(do: X.Html.merge_attrs(unquote(attrs_list), unquote(merge_attrs)))
end
attrs_ast =
case {attrs, merged_attrs} do
{nil, []} -> nil
{nil, _} -> merged_attrs
{_, []} -> attrs
{_, _} -> quote(do: X.Html.merge_attrs(unquote(merged_attrs), unquote(attrs)))
end
case {assigns, attrs_ast} do
{nil, nil} -> {:%{}, [], assigns_list}
{nil, _} -> {:%{}, [], [{:attrs, attrs_ast} | assigns_list]}
{_, nil} -> assigns
{_, _} -> quote(do: Map.put(unquote(assigns), :attrs, unquote(attrs_ast)))
end
end
@spec compile_cond_expr(
Ast.tag_condition(),
Macro.t(),
[Ast.leaf()],
Macro.Env.t(),
options()
) ::
{Macro.t(), [Ast.leaf()]}
defp compile_cond_expr({:unless, cur, expr}, ast, tail, env, opts) do
compile_cond_expr({:if, cur, '!(' ++ expr ++ ')'}, ast, tail, env, opts)
end
defp compile_cond_expr(condition = {:if, cur = {_, row}, expr}, ast, tail, env, opts) do
{else_ast, tail} = find_cond_else_expr(condition, ast, tail, env, opts)
ast =
quote line: row + Keyword.get(opts, :line, 0) do
if(unquote(compile_expr(expr, cur, env, opts)),
do: unquote(compact_ast(ast)),
else: unquote(compact_ast(else_ast))
)
end
{ast, tail}
end
@spec find_cond_else_expr(
Ast.tag_condition(),
Macro.t(),
[Ast.leaf()],
Macro.Env.t(),
options()
) ::
{Macro.t(), [Ast.leaf()]}
defp find_cond_else_expr(condition, ast, tail, env, opts) do
case tail do
[next = {{:tag_start, _, _, _, {:else, _cur, _}, _, _, _, _}, _} | rest] ->
{compile(next, env, opts), rest}
[next = {{:tag_start, _, _, _, {:elseif, cur, expr}, _, _, _, _}, _} | rest] ->
compile_cond_expr({:if, cur, expr}, compile(next, env, opts), rest, env, opts)
[{{:text_group, _, _}, [{{:tag_text, _, _, _, true}, _}]} | rest] ->
find_cond_else_expr(condition, ast, rest, env, opts)
_ ->
{[], tail}
end
end
@spec compile_for_expr(Ast.tag_iterator(), Macro.t(), Macro.Env.t(), options()) :: Macro.t()
defp compile_for_expr({:for, cur = {_, row}, expr}, ast, env, opts) do
expr =
case expr do
'[' ++ _ -> expr
_ -> [?[ | expr] ++ ']'
end
quote line: row + Keyword.get(opts, :line, 0) do
for(unquote_splicing(compile_expr(expr, cur, env, opts)),
do: unquote(compact_ast(ast)),
into: []
)
end
end
@spec compile_expr(charlist(), Ast.cursor(), Macro.Env.t(), options()) :: Macro.t()
defp compile_expr(charlist, {_, row}, env, opts) do
quoted = Code.string_to_quoted!(charlist, line: row + Keyword.get(opts, :line, 0))
transform_expresion(quoted, Keyword.get(opts, :context), env)
end
@spec compile_component(
charlist(),
[Ast.tag_attr()],
[Ast.leaf()],
Ast.cursor(),
Macro.Env.t(),
options()
) ::
Macro.t()
defp compile_component(component, attrs, children, cur = {_, row}, env, opts) do
component_ast = compile_expr(component, cur, env, opts)
assigns_ast = compile_assigns(attrs, env, opts)
line = row + Keyword.get(opts, :line, 0)
assigns_list =
case assigns_ast do
{:%{}, _, assigns_list} -> assigns_list
_ -> nil
end
if Keyword.get(opts, :inline, true) &&
!is_nil(assigns_list) &&
is_atom(component_ast) &&
Code.ensure_compiled?(component_ast) &&
function_exported?(component_ast, :template_ast, 0) do
children_ast = children |> Ast.drop_whitespace() |> compile_tree(env, opts)
quote line: line do
unquote(transform_inline_component(component_ast, assigns_list, children_ast, line))
end
else
children_ast = call(children, env, opts)
args_ast =
case children do
[] -> [assigns_ast]
_ -> [assigns_ast, [do: children_ast]]
end
quote line: line do
unquote(component_ast).render(unquote_splicing(args_ast))
end
end
end
@spec compile_text_group([Ast.token()], Macro.Env.t(), options(), list()) :: [Macro.t()]
defp compile_text_group([{{:tag_text, _, _, _, true}, _} | tail], env, opts, acc = [" " | _]) do
compile_text_group(tail, env, opts, acc)
end
defp compile_text_group([head | tail], env, opts, acc) do
result =
case head do
{{:tag_text, _, _, _, true}, _} ->
" "
{{:tag_text, _, [char | _], true, _}, _} ->
result = String.trim_leading(compile(head, env, opts))
case char do
?\n -> "\n" <> result
_ -> " " <> result
end
head ->
compile(head, env, opts)
end
compile_text_group(tail, env, opts, [result | acc])
end
defp compile_text_group([], _, _opts, acc) do
:lists.reverse(acc)
end
@spec group_and_transform_tag_attrs([Ast.tag_attr()], Macro.Env.t(), options()) :: {
attrs_ast :: Macro.t() | nil,
base_attrs :: [{binary(), Macro.t()}],
merge_attrs :: [{binary(), Macro.t()}],
static_attrs :: [Ast.tag_attr()]
}
defp group_and_transform_tag_attrs(attrs, env, opts) do
Enum.reduce(attrs, {nil, [], [], []}, fn attr_token,
{attr_ast, base_attrs, merge_attrs,
static_attr_tokens} ->
case attr_token do
{_, cur, @attrs_key_name, value, true} ->
{compile_expr(value, cur, env, opts), base_attrs, merge_attrs, static_attr_tokens}
{_, _, name, _, is_dynamic} ->
case List.keytake(static_attr_tokens, name, 2) do
{m_attr_token = {:tag_attr, _, _, _, true}, rest_attrs} when is_dynamic == false ->
{
attr_ast,
[attr_token_to_tuple(m_attr_token, env, opts) | base_attrs],
[attr_token_to_tuple(attr_token, env, opts) | merge_attrs],
rest_attrs
}
{m_attr_token = {:tag_attr, _, _, _, false}, rest_attrs} when is_dynamic == true ->
{
attr_ast,
[attr_token_to_tuple(attr_token, env, opts) | base_attrs],
[attr_token_to_tuple(m_attr_token, env, opts) | merge_attrs],
rest_attrs
}
nil ->
{attr_ast, base_attrs, merge_attrs, [attr_token | static_attr_tokens]}
end
end
end)
end
@spec group_and_transform_component_attrs([Ast.tag_attr()], Macro.Env.t(), options()) :: {
attrs_ast :: Macro.t() | nil,
assigns_ast :: Macro.t() | nil,
assigns_list :: [{binary(), Macro.t()}],
attrs_list :: [{binary(), Macro.t()}],
merge_attrs_list :: [Ast.tag_attr()]
}
def group_and_transform_component_attrs(attrs, env, opts) do
Enum.reduce(attrs, {nil, nil, [], [], []}, fn token = {_, cur, name, value, is_dynamic},
{assigns, attrs, assigns_acc, attrs_acc,
merge_acc} ->
if is_dynamic && name not in @special_attr_assigns do
value = compile_expr(value, cur, env, opts)
case name do
@assigns_key_name ->
{value, attrs, assigns_acc, attrs_acc, merge_acc}
@attrs_key_name ->
{assigns, value, assigns_acc, attrs_acc, merge_acc}
_ ->
{assigns, attrs, [{attr_key_to_atom(name), value} | assigns_acc], attrs_acc,
merge_acc}
end
else
case List.keytake(attrs_acc, to_string(name), 0) do
{attr = {_, value}, rest_attrs} when is_binary(value) ->
{assigns, attrs, assigns_acc, [attr | rest_attrs],
[attr_token_to_tuple(token, env, opts) | merge_acc]}
{attr, rest_attrs} ->
{assigns, attrs, assigns_acc, [attr_token_to_tuple(token, env, opts) | rest_attrs],
[attr | merge_acc]}
nil ->
{assigns, attrs, assigns_acc, [attr_token_to_tuple(token, env, opts) | attrs_acc],
merge_acc}
end
end
end)
end
@spec attr_token_to_tuple(Ast.tag_attr(), Macro.Env.t(), options()) :: {String.t(), Macro.t()}
defp attr_token_to_tuple(token, env, opts) do
case token do
{:tag_attr, _cur, name, value, false} ->
{:erlang.iolist_to_binary(name), :unicode.characters_to_binary(value)}
{:tag_attr, cur, name, value, true} ->
{:erlang.iolist_to_binary(name), compile_expr(value, cur, env, opts)}
end
end
@spec attr_key_to_atom(charlist()) :: atom()
defp attr_key_to_atom(name) do
String.to_atom(
for(
char <- name,
do:
case char do
?- -> "_"
_ -> <<char>>
end,
into: <<>>
)
)
end
end
| 31.644195 | 98 | 0.552906 |
f7c480e81a1c41921bc32c06e77548b8538042d3 | 3,297 | exs | Elixir | test/plug/redirect_test.exs | madebystitched/plug-redirect | 7593d355333a6ea8c36b8d9f39aef193b0128a4a | [
"MIT"
] | null | null | null | test/plug/redirect_test.exs | madebystitched/plug-redirect | 7593d355333a6ea8c36b8d9f39aef193b0128a4a | [
"MIT"
] | null | null | null | test/plug/redirect_test.exs | madebystitched/plug-redirect | 7593d355333a6ea8c36b8d9f39aef193b0128a4a | [
"MIT"
] | null | null | null | defmodule Plug.RedirectTest do
use ExUnit.Case
use Plug.Test
defmodule MyPlug do
use Plug.Redirect
redirect("/foo/bar", "/go/here", status: 301)
redirect("/jump/up", "/get/down", status: 302)
redirect("/ra/wavy", "/by/droid", status: 303)
redirect("/rock/on", "/roll/out", status: 307)
redirect("/no/status", "/301/default")
redirect("/blog/:slug", "/no-more-blog")
redirect("/users/:slug", "/profile/:slug")
redirect("/other/:slug", "http://somewhere.com/profile/:slug")
redirect("/pages?type=modern", "/pages/modern", status: 301, query: true)
redirect("/pages?type=old", "/pages?type=new", status: 302, query: true)
# Old API
redirect(301, "/old/foo/bar", "/go/here")
redirect(302, "/old/jump/up", "/get/down")
redirect(303, "/old/ra/wavy", "/by/droid")
redirect(307, "/old/rock/on", "/roll/out")
end
@opts MyPlug.init([])
@methods ~w(get head post put delete trace options connect patch)a
for method <- @methods do
test "it passes through when no redirects match a #{method}" do
conn = unquote(method) |> conn("/hello")
result = conn |> MyPlug.call(@opts)
assert conn == result
end
end
test "it can perform 301 redirects" do
conn = get("/foo/bar")
assert_redirect(conn, 301, "/go/here")
end
test "it can perform 302 redirects" do
conn = get("/jump/up")
assert_redirect(conn, 302, "/get/down")
end
test "it can perform 303 redirects" do
conn = get("/ra/wavy")
assert_redirect(conn, 303, "/by/droid")
end
test "it can perform 307 redirects" do
conn = get("/rock/on")
assert_redirect(conn, 307, "/roll/out")
end
describe "backwards compatibility with old API" do
test "it can perform 301 redirects" do
conn = get("/old/foo/bar")
assert_redirect(conn, 301, "/go/here")
end
test "it can perform 302 redirects" do
conn = get("/old/jump/up")
assert_redirect(conn, 302, "/get/down")
end
test "it can perform 303 redirects" do
conn = get("/old/ra/wavy")
assert_redirect(conn, 303, "/by/droid")
end
test "it can perform 307 redirects" do
conn = get("/old/rock/on")
assert_redirect(conn, 307, "/roll/out")
end
end
test "when given no status it defaults to 301" do
conn = get("/no/status")
assert_redirect(conn, 301, "/301/default")
end
test "variable sections can exist" do
conn = get("/blog/some-article")
assert_redirect(conn, 301, "/no-more-blog")
conn = get("/blog/another-article")
assert_redirect(conn, 301, "/no-more-blog")
end
test "other hosts can be redirected to" do
conn = get("/other/louis")
assert_redirect(conn, 301, "http://somewhere.com/profile/louis")
end
test "query parameters can be redirected" do
conn = get("/pages?type=modern")
assert_redirect(conn, 301, "/pages/modern")
end
test "query parameters can be redirected to" do
conn = get("/pages?type=old")
assert_redirect(conn, 302, "/pages?type=new")
end
defp get(path) do
:get |> conn(path) |> MyPlug.call(@opts)
end
defp assert_redirect(conn, code, to) do
assert conn.state == :set
assert conn.status == code
assert Plug.Conn.get_resp_header(conn, "location") == [to]
end
end
| 27.705882 | 77 | 0.632696 |
f7c4b01a5cef1caf4a4355923c525edac8d73163 | 17,242 | exs | Elixir | test/lib/bamboo/mailer_test.exs | mrcasals/bamboo | c3b82436d9594d7838775fcc906cd4bd8516690e | [
"MIT"
] | 1,845 | 2016-03-29T23:36:36.000Z | 2022-03-31T19:23:38.000Z | test/lib/bamboo/mailer_test.exs | mrcasals/bamboo | c3b82436d9594d7838775fcc906cd4bd8516690e | [
"MIT"
] | 407 | 2016-03-29T14:55:19.000Z | 2022-02-02T13:53:50.000Z | test/lib/bamboo/mailer_test.exs | mrcasals/bamboo | c3b82436d9594d7838775fcc906cd4bd8516690e | [
"MIT"
] | 361 | 2016-03-31T13:33:22.000Z | 2022-02-25T12:38:43.000Z | defmodule Bamboo.MailerTest do
use ExUnit.Case
alias Bamboo.Email
@mailer_config adapter: __MODULE__.DefaultAdapter, foo: :bar, interceptors: nil
setup context do
config =
Keyword.merge(
@mailer_config,
[adapter: context[:adapter], interceptors: context[:interceptors]],
fn
_key, default, nil -> default
_key, _default, override -> override
end
)
Application.put_env(:bamboo, __MODULE__.Mailer, config)
Process.register(self(), :mailer_test)
on_exit(fn -> Application.delete_env(:bamboo, __MODULE__.Mailer) end)
:ok
end
defmodule(Mailer, do: use(Bamboo.Mailer, otp_app: :bamboo))
defmodule DefaultAdapter do
def deliver(email, config) do
send(:mailer_test, {:deliver, email, config})
{:ok, email}
end
def handle_config(config), do: config
def supports_attachments?, do: true
end
defmodule FailureAdapter do
def deliver(_email, _config) do
{:error, %Bamboo.ApiError{message: "invalid email"}}
end
def handle_config(config), do: config
def supports_attachments?, do: true
end
test "deliver_now/1 returns :ok tuple with sent email" do
address = "[email protected]"
email = new_email(from: address, to: address, cc: address, bcc: address)
{:ok, delivered_email} = Mailer.deliver_now(email)
assert_received {:deliver, ^delivered_email, _}
end
@tag adapter: FailureAdapter
test "deliver_now/1 returns errors if adapter fails" do
address = "[email protected]"
email = new_email(from: address, to: address, cc: address, bcc: address)
{:error, %Bamboo.ApiError{}} = Mailer.deliver_now(email)
refute_received {:deliver, _, _}
end
@tag adapter: FailureAdapter
test "deliver_now!/1 raises errors if adapter fails" do
address = "[email protected]"
email = new_email(from: address, to: address, cc: address, bcc: address)
assert_raise Bamboo.ApiError, fn ->
Mailer.deliver_now!(email)
end
end
test "deliver_now!/1 returns email sent" do
address = "[email protected]"
email = new_email(from: address, to: address, cc: address, bcc: address)
delivered_email = Mailer.deliver_now!(email)
assert_received {:deliver, ^delivered_email, _}
end
test "deliver_now/1 converts binary addresses to %{name: name, email: email}" do
address = "[email protected]"
email = new_email(from: address, to: address, cc: address, bcc: address)
Mailer.deliver_now(email)
converted_address = {nil, address}
assert_received {:deliver, delivered_email, _}
assert delivered_email.from == converted_address
assert delivered_email.to == [converted_address]
assert delivered_email.cc == [converted_address]
assert delivered_email.bcc == [converted_address]
end
test "converts structs with custom protocols" do
user = %Bamboo.Test.User{first_name: "Paul", email: "[email protected]"}
email = new_email(from: user, to: user, cc: user, bcc: user)
Mailer.deliver_now(email)
converted_recipient = {user.first_name, user.email}
assert_received {:deliver, delivered_email, _}
assert delivered_email.from == {"#{user.first_name} (MyApp)", user.email}
assert delivered_email.to == [converted_recipient]
assert delivered_email.cc == [converted_recipient]
assert delivered_email.bcc == [converted_recipient]
end
test "deliver_later/1 calls deliver on the adapter" do
email = new_email()
Mailer.deliver_later(email)
assert_receive {:deliver, delivered_email, _config}
assert delivered_email == Bamboo.Mailer.normalize_addresses(email)
end
test "deliver_later/1 returns the email that will be sent" do
email = new_email()
{:ok, delivered_email} = Mailer.deliver_later(email)
assert_receive {:deliver, ^delivered_email, _config}
end
test "deliver_now/1 wraps the recipients in a list" do
address = {"Someone", "[email protected]"}
email = new_email(to: address, cc: address, bcc: address)
Mailer.deliver_now(email)
assert_received {:deliver, delivered_email, _}
assert delivered_email.to == [address]
assert delivered_email.cc == [address]
assert delivered_email.bcc == [address]
end
test "sets a default deliver_later_strategy if none is set" do
email = new_email(to: "[email protected]")
Mailer.deliver_now(email)
assert_received {:deliver, _email, config}
assert config.deliver_later_strategy == Bamboo.TaskSupervisorStrategy
end
test "deliver_now/1 calls the adapter with the email and config as a map" do
email = new_email(to: "[email protected]")
expected_final_config =
@mailer_config
|> Enum.into(%{})
|> Map.put(:deliver_later_strategy, Bamboo.TaskSupervisorStrategy)
{:ok, returned_email} = Mailer.deliver_now(email)
assert returned_email == Bamboo.Mailer.normalize_addresses(email)
assert_received {:deliver, %Bamboo.Email{}, ^expected_final_config}
end
test "deliver/1 raises a helpful error message" do
assert_raise RuntimeError, ~r/Use deliver_now/, fn ->
Mailer.deliver(:anything)
end
end
test "deliver_now/1 with no from address returns an error" do
{:error, %Bamboo.EmptyFromAddressError{}} = Mailer.deliver_now(new_email(from: nil))
{:error, %Bamboo.EmptyFromAddressError{}} = Mailer.deliver_now(new_email(from: {"foo", nil}))
end
test "deliver_now!/1 with no from address raises an error" do
assert_raise Bamboo.EmptyFromAddressError, fn ->
Mailer.deliver_now!(new_email(from: nil))
end
assert_raise Bamboo.EmptyFromAddressError, fn ->
Mailer.deliver_now!(new_email(from: {"foo", nil}))
end
end
test "deliver_now/1 with empty recipient lists does not deliver email" do
{:ok, email} = new_email(to: [], cc: [], bcc: []) |> Mailer.deliver_now()
refute_received {:deliver, ^email, _}
{:ok, email} = new_email(to: [], cc: nil, bcc: nil) |> Mailer.deliver_now()
refute_received {:deliver, ^email, _}
{:ok, email} = new_email(to: nil, cc: [], bcc: nil) |> Mailer.deliver_now()
refute_received {:deliver, ^email, _}
{:ok, email} = new_email(to: nil, cc: nil, bcc: []) |> Mailer.deliver_now()
refute_received {:deliver, ^email, _}
end
test "deliver_later/1 with empty lists for recipients does not deliver email" do
new_email(to: [], cc: [], bcc: []) |> Mailer.deliver_later()
refute_received {:deliver, _, _}
new_email(to: [], cc: nil, bcc: nil) |> Mailer.deliver_later()
refute_received {:deliver, _, _}
new_email(to: nil, cc: [], bcc: nil) |> Mailer.deliver_later()
refute_received {:deliver, _, _}
new_email(to: nil, cc: nil, bcc: []) |> Mailer.deliver_later()
refute_received {:deliver, _, _}
end
test "returns an error if all recipients are nil" do
{:error, %Bamboo.NilRecipientsError{}} =
new_email(to: nil, cc: nil, bcc: nil)
|> Mailer.deliver_now()
{:error, %Bamboo.NilRecipientsError{}} =
new_email(to: {"foo", nil})
|> Mailer.deliver_now()
{:error, %Bamboo.NilRecipientsError{}} =
new_email(to: [{"foo", nil}])
|> Mailer.deliver_now()
{:error, %Bamboo.NilRecipientsError{}} =
new_email(to: [nil])
|> Mailer.deliver_now()
end
test "raises on deliver_now! if all recipients are nil" do
assert_raise Bamboo.NilRecipientsError, fn ->
new_email(to: nil, cc: nil, bcc: nil)
|> Mailer.deliver_now!()
end
assert_raise Bamboo.NilRecipientsError, fn ->
new_email(to: {"foo", nil})
|> Mailer.deliver_now!()
end
assert_raise Bamboo.NilRecipientsError, fn ->
new_email(to: [{"foo", nil}])
|> Mailer.deliver_now!()
end
assert_raise Bamboo.NilRecipientsError, fn ->
new_email(to: [nil])
|> Mailer.deliver_now!()
end
end
test "raises an error if an address does not have a protocol implemented" do
email = new_email(from: 1)
assert_raise Protocol.UndefinedError, fn ->
Mailer.deliver_now(email)
end
end
test "raises if a map is passed in" do
email = new_email(from: %{foo: :bar})
assert_raise ArgumentError, fn ->
Mailer.deliver_now(email)
end
end
test "raises an error if deliver_now or deliver_later or the ! equivalents are called directly" do
email = new_email(from: %{foo: :bar})
assert_raise RuntimeError, ~r/cannot call Bamboo.Mailer/, fn ->
Bamboo.Mailer.deliver_now(email)
end
assert_raise RuntimeError, ~r/cannot call Bamboo.Mailer/, fn ->
Bamboo.Mailer.deliver_now!(email)
end
assert_raise RuntimeError, ~r/cannot call Bamboo.Mailer/, fn ->
Bamboo.Mailer.deliver_later(email)
end
assert_raise RuntimeError, ~r/cannot call Bamboo.Mailer/, fn ->
Bamboo.Mailer.deliver_later!(email)
end
end
describe "attachments" do
defmodule AdapterWithoutAttachmentSupport do
def deliver(_email, _config), do: {:ok, :noop}
def handle_config(config), do: config
def supports_attachments?, do: false
end
@tag adapter: AdapterWithoutAttachmentSupport
test "returns errors if adapter does not support attachments and attachments are sent" do
path = Path.join(__DIR__, "../../support/attachment.docx")
email = new_email(to: "[email protected]") |> Email.put_attachment(path)
assert {:error, error} = Mailer.deliver_now(email)
assert error =~ "does not support attachments"
assert {:error, error} = Mailer.deliver_later(email)
assert error =~ "does not support attachments"
end
@tag adapter: AdapterWithoutAttachmentSupport
test "raise errors with deliver_x! if adapter does not support attachments and attachments are sent" do
path = Path.join(__DIR__, "../../support/attachment.docx")
email = new_email(to: "[email protected]") |> Email.put_attachment(path)
assert_raise RuntimeError, ~r/does not support attachments/, fn ->
Mailer.deliver_now!(email)
end
assert_raise RuntimeError, ~r/does not support attachments/, fn ->
Mailer.deliver_later!(email)
end
end
@tag adapter: AdapterWithoutAttachmentSupport
test "does not raise if no attachments are on the email" do
email = new_email(to: "[email protected]")
Mailer.deliver_now(email)
end
@tag adapter: DefaultAdapter
test "does not raise if adapter supports attachments" do
path = Path.join(__DIR__, "../../support/attachment.docx")
email = new_email(to: "[email protected]") |> Email.put_attachment(path)
Mailer.deliver_now(email)
assert_received {:deliver, _email, _config}
end
end
describe "configuration" do
defmodule CustomConfigAdapter do
def deliver(email, config) do
send(:mailer_test, {:deliver, email, config})
{:ok, email}
end
def handle_config(config) do
config |> Map.put(:custom_key, "Set by the adapter")
end
end
@tag adapter: CustomConfigAdapter
test "uses adapter's handle_config/1 to customize or validate the config" do
email = new_email(to: "[email protected]")
Mailer.deliver_now(email)
assert_received {:deliver, _email, config}
assert config.custom_key == "Set by the adapter"
end
test "deliver_now/2 overrides Adapter config with the 'config:' option" do
email = new_email(to: "[email protected]")
override_config = %{
foo: :baz,
something: :new
}
Mailer.deliver_now(email, config: override_config)
assert_received {:deliver, _email, config}
assert config.foo == :baz
assert config.something == :new
end
test "deliver_later/2 overrides Adapter config with the 'config:' option" do
email = new_email(to: "[email protected]")
override_config = %{
foo: :qux,
something: :groovy
}
Mailer.deliver_later(email, config: override_config)
assert_receive {:deliver, _email, config}
assert config.foo == :qux
assert config.something == :groovy
end
end
describe "option to return response" do
defmodule ResponseAdapter do
def deliver(_email, _config) do
response = %{status_code: 202, headers: [%{}], body: ""}
send(:mailer_test, response)
{:ok, response}
end
def handle_config(config), do: config
end
@tag adapter: ResponseAdapter
test "deliver_now/2 returns {:ok, email, reponse} when passing response: true option" do
email = new_email(to: "[email protected]")
{:ok, email, response} = Mailer.deliver_now(email, response: true)
assert %Email{} = email
assert %{body: _, headers: _, status_code: _} = response
end
@tag adapter: ResponseAdapter
test "deliver_now/1 does not return response when not passing in response: true option" do
email = new_email(to: "[email protected]")
{:ok, email} = Mailer.deliver_now(email)
assert %Email{} = email
end
@tag adapter: ResponseAdapter
test "deliver_now!/1 returns email when not passing in response: true option" do
email = new_email(to: "[email protected]")
email = Mailer.deliver_now!(email)
assert %Email{} = email
end
@tag adapter: ResponseAdapter
test "deliver_now/1 returns email and response when passing in both response: true and a custom config option" do
email = new_email(to: "[email protected]")
{:ok, email, response} = Mailer.deliver_now(email, config: %{}, response: true)
assert %Email{} = email
assert %{body: _, headers: _, status_code: _} = response
end
@tag adapter: ResponseAdapter
test "does not return a response if email is not sent" do
email = new_email(to: [], cc: [], bcc: [])
{:ok, email} = Mailer.deliver_now(email, response: true)
refute_received {:deliver, ^email, _}
end
end
describe "interceptors" do
@tag interceptors: [Bamboo.DenyListInterceptor, Bamboo.EnvInterceptor]
test "deliver_now/1 must apply interceptors and send email if not intercepted" do
email = new_email(to: "[email protected]")
assert {:ok, %Bamboo.Email{blocked: false}} = Mailer.deliver_now(email)
assert_receive {:deliver, %Bamboo.Email{to: [{nil, "[email protected]"}], subject: "test - "},
_config}
end
@tag interceptors: [Bamboo.DenyListInterceptor, Bamboo.EnvInterceptor]
test "deliver_now/1 must apply interceptors and block email if intercepted" do
email = new_email(to: "[email protected]")
assert {:ok, %Bamboo.Email{blocked: true}} = Mailer.deliver_now(email)
refute_receive {:deliver, %Bamboo.Email{to: [{nil, "[email protected]"}]}, _config}
end
@tag interceptors: [Bamboo.DenyListInterceptor, Bamboo.EnvInterceptor]
test "deliver_now!/1 must apply interceptors and send email if not intercepted" do
email = new_email(to: "[email protected]")
assert %Bamboo.Email{blocked: false} = Mailer.deliver_now!(email)
assert_receive {:deliver, %Bamboo.Email{to: [{nil, "[email protected]"}], subject: "test - "},
_config}
end
@tag interceptors: [Bamboo.DenyListInterceptor, Bamboo.EnvInterceptor]
test "deliver_now!/1 must apply interceptors and block email if intercepted" do
email = new_email(to: "[email protected]")
assert %Bamboo.Email{blocked: true} = Mailer.deliver_now!(email)
refute_receive {:deliver, %Bamboo.Email{to: [{nil, "[email protected]"}]}, _config}
end
@tag interceptors: [Bamboo.DenyListInterceptor, Bamboo.EnvInterceptor]
test "deliver_later/1 must apply interceptors and send email if not intercepted" do
email = new_email(to: "[email protected]")
assert {:ok, %Bamboo.Email{blocked: false}} = Mailer.deliver_later(email)
assert_receive {:deliver, %Bamboo.Email{to: [{nil, "[email protected]"}], subject: "test - "},
_config}
end
@tag interceptors: [Bamboo.DenyListInterceptor, Bamboo.EnvInterceptor]
test "deliver_later/1 must apply interceptors and block email if intercepted" do
email = new_email(to: "[email protected]")
assert {:ok, %Bamboo.Email{blocked: true}} = Mailer.deliver_later(email)
refute_receive {:deliver, %Bamboo.Email{to: [{nil, "[email protected]"}]}, _config}
end
@tag interceptors: [Bamboo.DenyListInterceptor, Bamboo.EnvInterceptor]
test "deliver_later!/1 must apply interceptors and send email if not intercepted" do
email = new_email(to: "[email protected]")
assert %Bamboo.Email{blocked: false} = Mailer.deliver_later!(email)
assert_receive {:deliver, %Bamboo.Email{to: [{nil, "[email protected]"}], subject: "test - "},
_config}
end
@tag interceptors: [Bamboo.DenyListInterceptor, Bamboo.EnvInterceptor]
test "deliver_later!/1 must apply interceptors and block email if intercepted" do
email = new_email(to: "[email protected]")
assert %Bamboo.Email{blocked: true} = Mailer.deliver_later!(email)
refute_receive {:deliver, %Bamboo.Email{to: [{nil, "[email protected]"}]}, _config}
end
end
defp new_email(attrs \\ []) do
attrs = Keyword.merge([from: "[email protected]", to: "[email protected]"], attrs)
Email.new_email(attrs)
end
end
| 32.593573 | 117 | 0.671094 |
f7c4dbb4772fdc0d402577a5285612d15e5a5021 | 1,642 | ex | Elixir | lib/mix/lib/mix/tasks/compile.leex.ex | QuinnWilton/elixir | e42e3e55ca1561fe56b58d6f51c7b0faae6a7a1e | [
"Apache-2.0"
] | 1 | 2015-11-12T19:23:45.000Z | 2015-11-12T19:23:45.000Z | lib/mix/lib/mix/tasks/compile.leex.ex | enokd/elixir | e39b32f235082b8a29fcb22d250c822cca98609f | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/compile.leex.ex | enokd/elixir | e39b32f235082b8a29fcb22d250c822cca98609f | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.Compile.Leex do
alias Mix.Tasks.Compile.Erlang
use Mix.Task
@recursive true
@manifest ".compile.leex"
@moduledoc """
Compile Leex source files.
When this task runs, it will check the modification time of every file, and
if it has changed, the file will be compiled. Files will be
compiled in the same source directory with a .erl extension.
You can force compilation regardless of modification times by passing
the `--force` option.
## Command line options
* `--force` - forces compilation regardless of modification times;
## Configuration
* `:erlc_paths` - directories to find source files.
Defaults to `["src"]`, can be configured as:
```
[erlc_paths: ["src", "other"]]
```
* `:leex_options` - compilation options that apply
to Leex's compiler. There are many available options
here: http://www.erlang.org/doc/man/leex.html#file-2
"""
@doc """
Runs this task.
"""
def run(args) do
{ opts, _, _ } = OptionParser.parse(args, switches: [force: :boolean])
project = Mix.project
source_paths = project[:erlc_paths]
mappings = Enum.zip(source_paths, source_paths)
options = project[:leex_options] || []
Erlang.compile_mappings(manifest(), mappings, :xrl, :erl, opts[:force], fn
input, output ->
options = options ++ [scannerfile: Erlang.to_erl_file(output), report: true]
:leex.file(Erlang.to_erl_file(input), options)
end)
end
@doc """
Returns Leex manifests.
"""
def manifests, do: [manifest]
defp manifest, do: Path.join(Mix.Project.manifest_path, @manifest)
end
| 26.918033 | 84 | 0.669915 |
f7c517fea3cc69ad0b95525f046a0de55d5dae1d | 2,374 | ex | Elixir | clients/ad_mob/lib/google_api/ad_mob/v1/model/date.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/ad_mob/lib/google_api/ad_mob/v1/model/date.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/ad_mob/lib/google_api/ad_mob/v1/model/date.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AdMob.V1.Model.Date do
@moduledoc """
Represents a whole or partial calendar date, e.g. a birthday. The time of day
and time zone are either specified elsewhere or are not significant. The date
is relative to the Proleptic Gregorian Calendar. This can represent:
* A full date, with non-zero year, month and day values
* A month and day value, with a zero year, e.g. an anniversary
* A year on its own, with zero month and day values
* A year and month value, with a zero day, e.g. a credit card expiration date
Related types are google.type.TimeOfDay and `google.protobuf.Timestamp`.
## Attributes
* `day` (*type:* `integer()`, *default:* `nil`) - Day of month. Must be from 1 to 31 and valid for the year and month, or 0
if specifying a year by itself or a year and month where the day is not
significant.
* `month` (*type:* `integer()`, *default:* `nil`) - Month of year. Must be from 1 to 12, or 0 if specifying a year without a
month and day.
* `year` (*type:* `integer()`, *default:* `nil`) - Year of date. Must be from 1 to 9999, or 0 if specifying a date without
a year.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:day => integer(),
:month => integer(),
:year => integer()
}
field(:day)
field(:month)
field(:year)
end
defimpl Poison.Decoder, for: GoogleApi.AdMob.V1.Model.Date do
def decode(value, options) do
GoogleApi.AdMob.V1.Model.Date.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AdMob.V1.Model.Date do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.969697 | 128 | 0.699242 |
f7c52eacd11da7b2c62326ede936d61557e5670d | 2,405 | ex | Elixir | lib/ueberauth/strategy/strava/oauth.ex | Tiltify/ueberauth_strava | 6f77ae7dd43dd7510eec521698d0637b7ebe55ce | [
"MIT"
] | null | null | null | lib/ueberauth/strategy/strava/oauth.ex | Tiltify/ueberauth_strava | 6f77ae7dd43dd7510eec521698d0637b7ebe55ce | [
"MIT"
] | null | null | null | lib/ueberauth/strategy/strava/oauth.ex | Tiltify/ueberauth_strava | 6f77ae7dd43dd7510eec521698d0637b7ebe55ce | [
"MIT"
] | null | null | null | defmodule Ueberauth.Strategy.Strava.OAuth do
@moduledoc """
OAuth2 for Strava.
Add `client_id` and `client_secret` to your configuration:
config :ueberauth, Ueberauth.Strategy.Strava.OAuth,
client_id: System.get_env("Strava_APP_ID"),
client_secret: System.get_env("Strava_APP_SECRET")
"""
use OAuth2.Strategy
alias OAuth2.Strategy.AuthCode
@defaults [
strategy: __MODULE__,
site: "https://www.strava.com/",
authorize_url: "https://www.strava.com/oauth/authorize",
token_url: "https://www.strava.com/oauth/token"
]
@doc """
Construct a client for requests to Strava.
This will be setup automatically for you in `Ueberauth.Strategy.Strava`.
These options are only useful for usage outside the normal callback phase
of Ueberauth.
"""
def client(opts \\ []) do
config = Application.get_env(:ueberauth, Ueberauth.Strategy.Strava.OAuth, [])
opts = @defaults |> Keyword.merge(config) |> Keyword.merge(opts)
json_library = Ueberauth.json_library()
opts
|> OAuth2.Client.new()
|> OAuth2.Client.put_serializer("application/json", json_library)
end
@doc """
Provides the authorize url for the request phase of Ueberauth.
No need to call this usually.
"""
def authorize_url!(params \\ [], opts \\ []) do
client = client(opts)
OAuth2.Client.authorize_url!(client, params)
end
def get(token, url, headers \\ [], opts \\ []) do
[token: token]
|> client()
|> put_param(:client_secret, client().client_secret)
|> OAuth2.Client.get(url, headers, opts)
end
def get_token(params \\ [], opts \\ []) do
client = client(opts)
code = Map.get(params, "code")
case OAuth2.Client.get_token(client, code: code) do
{:error, %{body: %{"errors" => errors, "message" => description}}} ->
{:error, {errors, description}}
{:ok, %{token: %{access_token: nil} = token}} ->
%{"errors" => errors, "message" => description} = token.other_params
{:error, {errors, description}}
{:ok, %{token: token}} ->
{:ok, token}
end
end
# Strategy Callbacks
def authorize_url(client, params), do: AuthCode.authorize_url(client, params)
def get_token(client, params, headers) do
client
|> put_param(:client_secret, client.client_secret)
|> put_header("Accept", "application/json")
|> AuthCode.get_token(params, headers)
end
end
| 29.691358 | 81 | 0.661123 |
f7c552188fc5a38f9f2cb55b5f551b7eec1ec9b7 | 5,993 | exs | Elixir | test/ex_dice_roller_test.exs | rishenko/dice_roller | 1cdea207a2a6007963e52dca817129245ffb470e | [
"Apache-2.0"
] | 8 | 2018-09-24T22:37:07.000Z | 2021-06-17T00:47:20.000Z | test/ex_dice_roller_test.exs | rishenko/dice_roller | 1cdea207a2a6007963e52dca817129245ffb470e | [
"Apache-2.0"
] | 14 | 2018-10-05T11:17:45.000Z | 2018-11-12T13:47:10.000Z | test/ex_dice_roller_test.exs | rishenko/dice_roller | 1cdea207a2a6007963e52dca817129245ffb470e | [
"Apache-2.0"
] | null | null | null | defmodule ExDiceRollerTest do
@moduledoc """
Tests around tokenizing, parsing, and rolling.
Note that test setup seeds the randomizer for each test, which allows for
predictable test results.
"""
use ExDiceRoller.Case
doctest ExDiceRoller
alias ExDiceRoller.Cache
describe "rolls" do
test "basic" do
1 = ExDiceRoller.roll("1")
3 = ExDiceRoller.roll("3.14159")
2 = ExDiceRoller.roll("1+1")
1 = ExDiceRoller.roll("1d4")
8 = ExDiceRoller.roll("2d6")
6 = ExDiceRoller.roll("1d12+2")
2 = ExDiceRoller.roll("1,2")
3 = ExDiceRoller.roll("3,3")
83 = ExDiceRoller.roll("11,5,83,42,36")
1 = ExDiceRoller.roll("2,1", opts: [:lowest])
2 = ExDiceRoller.roll("5%3")
8 = ExDiceRoller.roll("2^3")
end
test "unary" do
-1 = ExDiceRoller.roll("-1")
3 = ExDiceRoller.roll("-1*-3")
-3 = ExDiceRoller.roll("-1*+3")
4 = ExDiceRoller.roll("1--3")
4 = ExDiceRoller.roll("1-(-3)")
-2 = ExDiceRoller.roll("-3/2")
end
test "variables" do
4 = ExDiceRoller.roll("1d8+x", x: 3)
2 = ExDiceRoller.roll("1dy", y: 6)
8 = ExDiceRoller.roll("1+y", y: 7)
10 = ExDiceRoller.roll("1+z", z: "1d6+3")
5 = ExDiceRoller.roll("1+x", x: "1+3")
end
test "variable with list" do
[1, 6, 9] = ExDiceRoller.roll("xdy", x: [1, 2, 3], y: 4)
[19, 20, 19] = ExDiceRoller.roll("xdy", x: 5, y: [6, 7, 8])
[13, 4, 10, 14, 12, 22, 24, 16, 12] = ExDiceRoller.roll("xdy", x: [3, 4, 5], y: [6, 7, 8])
end
test "complex" do
25 = ExDiceRoller.roll("(1/3*6)d(6d4+3-4) + (4*3d5-18)")
16_298 = ExDiceRoller.roll("2d5d6d7d8d9d10")
-18 = ExDiceRoller.roll("1d7d(9/8)+(5-6d8)")
4 = ExDiceRoller.roll("1d8+(-3/2)")
5 = ExDiceRoller.roll("-3/2+2d4")
6 = ExDiceRoller.roll("4d1, 6d1")
18 = ExDiceRoller.roll("3d6+9.5678,1d4")
1 = ExDiceRoller.roll("13%(1d4)")
0 = ExDiceRoller.roll("(5d3)%3")
1 = ExDiceRoller.roll("(6d4)%(2d3)")
169 = ExDiceRoller.roll("13^(1d4)")
2197 = ExDiceRoller.roll("(5d3)^3")
196 = ExDiceRoller.roll("(6d4)^(2d3)")
end
test "variations of expressions" do
4 = ExDiceRoller.roll("(1d4)d(2d8)")
13 = ExDiceRoller.roll("1d4 + 2d8")
-1 = ExDiceRoller.roll("1d4 - 2d8")
24 = ExDiceRoller.roll("1d4 * 2d8")
0 = ExDiceRoller.roll("1d4 / 2d8")
3 = ExDiceRoller.roll("1d4 + 1.54")
-3 = ExDiceRoller.roll("1d4 - 4")
6 = ExDiceRoller.roll("1d4 * 2")
1 = ExDiceRoller.roll("1d4 / 3")
4 = ExDiceRoller.roll("4d1, 3.1459")
3 = ExDiceRoller.roll("3,10d1", opts: [:lowest])
60 = ExDiceRoller.roll("5d1,3d1,60d1,10d1", opts: [:highest])
end
test "basic arithmetic with variables" do
22 = ExDiceRoller.roll("x+15", x: 7)
7 = ExDiceRoller.roll("x+y", x: 3, y: 4)
10 = ExDiceRoller.roll("x+x", x: 5)
-65 = ExDiceRoller.roll("x+x-y*y", x: 8, y: 9)
250 = ExDiceRoller.roll("x/y", x: 1000, y: 4)
3 = ExDiceRoller.roll("x+x/y", x: 2, y: 4)
4 = ExDiceRoller.roll("x/y", x: 15, y: 4)
end
test "list comprehensions" do
[3, 12] = ExDiceRoller.roll("2d4 + 2d8", opts: [:keep])
[1] = ExDiceRoller.roll("1d4 - 2", opts: [:keep])
[12, 8] = ExDiceRoller.roll("2 * 2d8", opts: [:keep])
[4, 6] = ExDiceRoller.roll("2d6,2d8", opts: [:keep])
[6, 3] = ExDiceRoller.roll("2d6, 3", opts: [:keep])
[6, 3, 3] = ExDiceRoller.roll("3, 3d6", opts: [:keep])
end
test "errors using separator with lists" do
assert_raise ArgumentError, fn -> ExDiceRoller.roll("2d4, 1d8", opts: [:keep]) end
end
test "errors with lists" do
assert_raise ArgumentError, fn -> ExDiceRoller.roll("2d4 + 1d8", opts: [:keep]) end
end
test "keep roll values" do
values = ExDiceRoller.roll("3d6", opts: [:keep])
require Logger
assert length(values) == 3
end
test "with spaces" do
5 = ExDiceRoller.roll("1 d 4 - 2+ (50+1 ) / 2d5")
end
test "with newlines" do
expr = """
1 +
2 *9-
1d4-1
*8
"""
10 = ExDiceRoller.roll(expr)
end
test "exploding dice" do
{:ok, fun} = ExDiceRoller.compile("1d2")
assert 1 == ExDiceRoller.execute(fun, opts: [:explode])
assert 7 == ExDiceRoller.execute(fun, opts: [:explode])
assert 9 == ExDiceRoller.execute(fun, opts: [:explode])
assert 9 == ExDiceRoller.execute(fun, opts: [:explode])
assert 1 == ExDiceRoller.execute(fun, opts: [:explode])
end
test "that error on a negative number of dice" do
assert_raise(ArgumentError, fn -> ExDiceRoller.roll("-1d4") end)
end
test "that error on values" do
assert_raise(ArgumentError, ~s/no variable 'z' was found in the arguments/, fn ->
ExDiceRoller.roll("1dz")
end)
assert_raise(ArgumentError, ~s/no variable 'z' was found in the arguments/, fn ->
ExDiceRoller.roll("1dz", z: nil)
end)
end
test "that error during tokenizing" do
assert {:error, {:tokenizing_failed, _}} = ExDiceRoller.roll("1d6+$")
end
test "that error during parsing" do
assert {:error, {:token_parsing_failed, _}} = ExDiceRoller.roll("1d6++")
end
test "starting cache" do
{:ok, ExDiceRoller.Cache.Test} = ExDiceRoller.start_cache(ExDiceRoller.Cache.Test)
assert [] == Cache.all(ExDiceRoller.Cache.Test)
end
end
describe "caching" do
test "rolls" do
{:ok, _} = ExDiceRoller.start_cache()
roll = "1d20"
assert [] == Cache.all()
assert 9 == ExDiceRoller.roll(roll, cache: true)
assert length(Cache.all()) == 1
end
test "variables" do
{:ok, _} = ExDiceRoller.start_cache()
roll = "1d6+x-y"
assert [] == Cache.all()
assert 6 == ExDiceRoller.roll(roll, x: 7, y: 4, cache: true)
assert length(Cache.all()) == 1
end
end
end
| 31.376963 | 96 | 0.576673 |
f7c552ab7ee9b741e5e5fd6841e7d5ba1fd98031 | 251 | ex | Elixir | apps/database/lib/database/schema/mod_tag.ex | gregredhead/belethor | 255925396b18ba4a6950f386abf8a9e17a7e5e7c | [
"Apache-2.0"
] | 3 | 2018-07-20T22:14:36.000Z | 2018-12-21T19:54:48.000Z | apps/database/lib/database/schema/mod_tag.ex | gregredhead/belethor | 255925396b18ba4a6950f386abf8a9e17a7e5e7c | [
"Apache-2.0"
] | 36 | 2018-09-15T21:46:54.000Z | 2020-03-28T16:10:18.000Z | apps/database/lib/database/schema/mod_tag.ex | gregredhead/belethor | 255925396b18ba4a6950f386abf8a9e17a7e5e7c | [
"Apache-2.0"
] | 2 | 2018-07-22T08:47:07.000Z | 2021-12-11T01:39:19.000Z | defmodule Database.Schema.ModTag do
@moduledoc """
Stores info about each tag
"""
use Ecto.Schema
schema "mod_tag" do
field(:name, :string)
many_to_many(:mods, Database.Schema.Mod, join_through: "mods_tags", unique: true)
end
end
| 20.916667 | 85 | 0.697211 |
f7c55fde344bee211a2aa1f06e0e1a91437ec0d5 | 1,109 | exs | Elixir | mix.exs | gabiseabra/logger_batched_backend | fc9e2e3ef93d4bb37e4cd37b633dd712191a08aa | [
"MIT"
] | null | null | null | mix.exs | gabiseabra/logger_batched_backend | fc9e2e3ef93d4bb37e4cd37b633dd712191a08aa | [
"MIT"
] | null | null | null | mix.exs | gabiseabra/logger_batched_backend | fc9e2e3ef93d4bb37e4cd37b633dd712191a08aa | [
"MIT"
] | null | null | null | defmodule LoggerBatchedBackend.MixProject do
use Mix.Project
def project do
[
app: :logger_batched_backend,
version: "0.1.0",
elixir: "~> 1.9",
start_permanent: Mix.env() == :prod,
build_embedded: Mix.env() == :prod,
source_url: "https://github.com/gabiseabra/logger_batched_backend",
package: package(),
description: description(),
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:mox, "~> 0.5", only: :test},
{:ex_doc, ">= 0.0.0", only: :dev, runtime: false}
]
end
defp description() do
"Service agnostic logger backend that handles batching and retries."
end
defp package() do
[
maintainers: ["Gabriela Seabra"],
licenses: ["MIT"],
links: %{
"GitHub" => "https://github.com/gabiseabra/logger_batched_backend",
"Logger" => "https://hexdocs.pm/logger/Logger.html"
}
]
end
end
| 23.104167 | 75 | 0.599639 |
f7c57d61e7f0292143b426405d6a128deca3c44e | 748 | ex | Elixir | lib/ex_esi/api/skills.ex | lukasni/ex-esi | b902a2a4d9ef834473c71b5c470320cc2854e8eb | [
"MIT"
] | null | null | null | lib/ex_esi/api/skills.ex | lukasni/ex-esi | b902a2a4d9ef834473c71b5c470320cc2854e8eb | [
"MIT"
] | null | null | null | lib/ex_esi/api/skills.ex | lukasni/ex-esi | b902a2a4d9ef834473c71b5c470320cc2854e8eb | [
"MIT"
] | null | null | null | defmodule ExEsi.API.Skills do
@moduledoc false
alias ExEsi.API
alias ExEsi.API.Character
@version "v1"
@spec attributes(ExEsi.API.Character.t()) :: ExEsi.Operation.JSON.t()
def attributes(%Character{id: character_id}) do
"/#{@version}/characters/#{character_id}/attributes/"
|> API.get()
end
@version "v2"
@spec skillqueue(ExEsi.API.Character.t()) :: ExEsi.Operation.JSON.t()
def skillqueue(%Character{id: character_id}) do
"/#{@version}/characters/#{character_id}/skillqueue/"
|> API.get()
end
@version "v4"
@spec skills(ExEsi.API.Character.t()) :: ExEsi.Operation.JSON.t()
def skills(%Character{id: character_id}) do
"/#{@version}/characters/#{character_id}/skills/"
|> API.get()
end
end
| 26.714286 | 71 | 0.67246 |
f7c5850092b5368e1e1046dab96a43a8736ecbc6 | 455 | ex | Elixir | lib/hl7/2.5.1/segments/db1.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.5.1/segments/db1.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.5.1/segments/db1.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | defmodule HL7.V2_5_1.Segments.DB1 do
@moduledoc false
require Logger
alias HL7.V2_5_1.{DataTypes}
use HL7.Segment,
fields: [
segment: nil,
set_id_db1: nil,
disabled_person_code: nil,
disabled_person_identifier: DataTypes.Cx,
disabled_indicator: nil,
disability_start_date: nil,
disability_end_date: nil,
disability_return_to_work_date: nil,
disability_unable_to_work_date: nil
]
end
| 22.75 | 47 | 0.701099 |
f7c5885c20fb36847664c1afe5cef99384d0a66f | 12,852 | ex | Elixir | lib/kaffy/resource_form.ex | sulphur/kaffy | 72f07149f811925e247d119a0e7053d6a3e241a1 | [
"MIT"
] | null | null | null | lib/kaffy/resource_form.ex | sulphur/kaffy | 72f07149f811925e247d119a0e7053d6a3e241a1 | [
"MIT"
] | null | null | null | lib/kaffy/resource_form.ex | sulphur/kaffy | 72f07149f811925e247d119a0e7053d6a3e241a1 | [
"MIT"
] | null | null | null | defmodule Kaffy.ResourceForm do
use Phoenix.HTML
def form_label_string({field, options}), do: Map.get(options, :label, field)
def form_label_string(field) when is_atom(field), do: field
def form_label(form, field) do
label_text = form_label_string(field)
label(form, label_text)
end
def form_help_text({_field, options}), do: Map.get(options, :help_text, nil)
def form_help_text(field) when is_atom(field), do: nil
def bare_form_field(resource, form, {field, options}) do
options = options || %{}
type = Map.get(options, :type, Kaffy.ResourceSchema.field_type(resource[:schema], field))
permission = Map.get(options, :permission, :write)
choices = Map.get(options, :choices)
cond do
!is_nil(choices) ->
select(form, field, choices, class: "custom-select")
permission == :read ->
content_tag(
:div,
label(form, field, Kaffy.ResourceSchema.kaffy_field_value(resource[:schema], field))
)
true ->
build_html_input(resource[:schema], form, field, type, [])
end
end
def form_field(changeset, form, field, opts \\ [])
def form_field(changeset, form, {field, options}, opts) do
options = options || %{}
type =
Map.get(options, :type, Kaffy.ResourceSchema.field_type(changeset.data.__struct__, field))
opts =
if type == :textarea do
rows = Map.get(options, :rows, 5)
Keyword.put(opts, :rows, rows)
else
opts
end
permission =
case is_nil(changeset.data.id) do
true -> Map.get(options, :create, :editable)
false -> Map.get(options, :update, :editable)
end
choices = Map.get(options, :choices)
cond do
!is_nil(choices) ->
select(form, field, choices, class: "custom-select")
type == :image ->
build_image_preview(changeset.data, options)
type == :html ->
build_custom_html(changeset.data, options)
true ->
build_html_input(changeset.data, form, field, type, opts, permission == :readonly)
end
end
def form_field(changeset, form, field, opts) do
type = Kaffy.ResourceSchema.field_type(changeset.data.__struct__, field)
build_html_input(changeset.data, form, field, type, opts)
end
defp build_image_preview(data, opts) do
url = extract_value(data, opts)
content_tag :img, "", src: url, style: Map.get(opts, :inline_css)
end
defp build_custom_html(data, opts) do
value = extract_value(data, opts)
{:safe, value}
end
defp extract_value(data, %{value_fn: fun}) when is_function(fun), do: fun.(data)
defp extract_value(_data, %{value: value}), do: value
defp extract_value(_data, _), do: raise (":value or :value_fn is missing")
defp build_html_input(schema, form, field, type, opts, readonly \\ false) do
data = schema
{conn, opts} = Keyword.pop(opts, :conn)
opts = Keyword.put(opts, :readonly, readonly)
schema = schema.__struct__
case type do
{:embed, _} ->
embed = Kaffy.ResourceSchema.embed_struct(schema, field)
embed_fields = Kaffy.ResourceSchema.fields(embed)
embed_changeset = Ecto.Changeset.change(Map.get(data, field) || embed.__struct__)
inputs_for(form, field, fn fp ->
[
{:safe, ~s(<div class="card ml-3" style="padding:15px;">)},
Enum.reduce(embed_fields, [], fn f, all ->
content_tag :div, class: "form-group" do
[
[
form_label(fp, f),
form_field(embed_changeset, fp, f, class: "form-control")
]
| all
]
end
end),
{:safe, "</div>"}
]
end)
:id ->
case Kaffy.ResourceSchema.primary_key(schema) == [field] do
true -> text_input(form, field, opts)
false -> text_or_assoc(conn, schema, form, field, opts)
end
:binary_id ->
case Kaffy.ResourceSchema.primary_key(schema) == [field] do
true -> text_input(form, field, opts)
false -> text_or_assoc(conn, schema, form, field, opts)
end
:string ->
text_input(form, field, opts)
:richtext ->
opts = Keyword.put(opts, :class, "kaffy-editor")
textarea(form, field, opts)
:textarea ->
textarea(form, field, opts)
:integer ->
number_input(form, field, opts)
:float ->
text_input(form, field, opts)
:decimal ->
text_input(form, field, opts)
t when t in [:boolean, :boolean_checkbox] ->
checkbox_opts = Keyword.put(opts, :class, "custom-control-input")
label_opts = Keyword.put(opts, :class, "custom-control-label")
[
{:safe, ~s(<div class="custom-control custom-checkbox">)},
checkbox(form, field, checkbox_opts),
label(form, field, label_opts),
{:safe, "</div>"}
]
:boolean_switch ->
checkbox_opts = Keyword.put(opts, :class, "custom-control-input")
label_opts = Keyword.put(opts, :class, "custom-control-label")
[
{:safe, ~s(<div class="custom-control custom-switch">)},
checkbox(form, field, checkbox_opts),
label(form, field, label_opts),
{:safe, "</div>"}
]
:map ->
value = Map.get(data, field, "")
value =
cond do
is_map(value) -> Kaffy.Utils.json().encode!(value, escape: :html_safe, pretty: true)
true -> value
end
textarea(form, field, [value: value, rows: 4, placeholder: "JSON Content"] ++ opts)
{:array, _} ->
value =
data
|> Map.get(field, "")
|> Kaffy.Utils.json().encode!(escape: :html_safe, pretty: true)
textarea(form, field, [value: value, rows: 4, placeholder: "JSON Content"] ++ opts)
:file ->
file_input(form, field, opts)
:select ->
select(form, field, opts)
:date ->
flatpickr_date(form, field, opts)
:time ->
flatpickr_time(form, field, opts)
:naive_datetime ->
flatpickr_datetime(form, field, opts)
:naive_datetime_usec ->
flatpickr_datetime_usec(form, field, opts)
:utc_datetime ->
flatpickr_datetime(form, field, opts)
:utc_datetime_usec ->
flatpickr_datetime_usec(form, field, opts)
_ ->
text_input(form, field, opts)
end
end
defp flatpickr_time(form, field, opts) do
flatpickr_generic(form, field, opts, "Select Date...", "flatpickr-wrap-time", "🕒")
end
defp flatpickr_date(form, field, opts) do
flatpickr_generic(form, field, opts, "Select Date...", "flatpickr-wrap-date", "🗓️")
end
defp flatpickr_datetime(form, field, opts) do
flatpickr_generic(form, field, opts, "Select Datetime...", "flatpickr-wrap-datetime")
end
defp flatpickr_datetime_usec(form, field, opts) do
flatpickr_generic(form, field, opts, "Select Datetime...", "flatpickr-wrap-datetime-usec")
end
defp flatpickr_generic(form, field, opts, placeholder, fp_class, icon \\ "📅") do
opts = Keyword.put(opts, :class, "flatpickr-input")
opts = Keyword.put(opts, :class, "form-control")
opts = Keyword.put(opts, :id, "inlineFormInputGroup")
opts = Keyword.put(opts, :placeholder, placeholder)
opts = Keyword.put(opts, :"data-input", "")
[
{:safe, ~s(
<div class="input-group mb-2 flatpickr #{fp_class}">
<div class="input-group-prepend">
<div class="input-group-text" data-clear>❌</div>
</div>
<div class="input-group-prepend">
<div class="input-group-text" data-toggle>#{icon}</div>
</div>
)},
text_input(form, field, opts),
{:safe, "</div>"}
]
end
defp text_or_assoc(conn, schema, form, field, opts) do
actual_assoc =
Enum.filter(Kaffy.ResourceSchema.associations(schema), fn a ->
Kaffy.ResourceSchema.association(schema, a).owner_key == field
end)
|> Enum.at(0)
field_no_id =
case actual_assoc do
nil -> field
_ -> Kaffy.ResourceSchema.association(schema, actual_assoc).field
end
case field_no_id in Kaffy.ResourceSchema.associations(schema) do
true ->
assoc = Kaffy.ResourceSchema.association_schema(schema, field_no_id)
option_count = Kaffy.ResourceQuery.cached_total_count(assoc, true, assoc)
case option_count > 100 do
true ->
target_context = Kaffy.Utils.get_context_for_schema(conn, assoc)
target_resource = Kaffy.Utils.get_schema_key(conn, target_context, assoc)
content_tag :div, class: "input-group" do
[
number_input(form, field,
class: "form-control",
id: field,
aria_describedby: field
),
content_tag :div, class: "input-group-append" do
content_tag :span, class: "input-group-text", id: field do
link(content_tag(:i, "", class: "fas fa-search"),
to:
Kaffy.Utils.router().kaffy_resource_path(
conn,
:index,
target_context,
target_resource,
c: conn.params["context"],
r: conn.params["resource"],
pick: field
),
id: "pick-raw-resource"
)
end
end
]
end
false ->
options = Kaffy.Utils.repo().all(assoc)
fields = Kaffy.ResourceSchema.fields(assoc)
string_fields =
Enum.filter(fields, fn {_f, options} ->
options.type == :string or
(Kaffy.Utils.is_module(options.type) and
Kernel.function_exported?(options.type, :type, 0) and
options.type.type == :string)
end)
popular_strings =
string_fields
|> Enum.filter(fn {f, _} -> f in [:name, :title] end)
|> Enum.at(0)
string_field =
case is_nil(popular_strings) do
true -> (Enum.at(string_fields, 0) || {:id}) |> elem(0)
false -> elem(popular_strings, 0)
end
select(
form,
field,
Enum.map(options, fn o -> {Map.get(o, string_field, "Resource ##{o.id}"), o.id} end),
class: "custom-select"
)
end
false ->
number_input(form, field, opts)
end
end
def get_field_error(changeset, field) do
changeset
|> build_error_messages()
|> Map.get(field)
|> case do
nil ->
{nil, ""}
# # In case of field is a embedded schema
%{} ->
{nil, ""}
messages ->
error_msg =
Kaffy.ResourceAdmin.humanize_term(field) <> " " <> Enum.join(messages, ", ") <> "!"
{error_msg, "is-invalid"}
end
end
defp build_error_messages(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {msg, opts} ->
Enum.reduce(opts, msg, fn {key, value}, acc ->
String.replace(acc, "%{#{key}}", build_changeset_value(value))
end)
end)
end
defp build_changeset_value(value) when is_tuple(value),
do: value |> Tuple.to_list() |> Enum.join(", ")
defp build_changeset_value(value), do: to_string(value)
def kaffy_input(conn, changeset, form, field, options) do
ft = Kaffy.ResourceSchema.field_type(changeset.data.__struct__, field)
case Kaffy.Utils.is_module(ft) && Keyword.has_key?(ft.__info__(:functions), :render_form) do
true ->
ft.render_form(conn, changeset, form, field, options)
false ->
{error_msg, error_class} = get_field_error(changeset, field)
help_text = form_help_text({field, options})
content_tag :div, class: "form-group #{error_class}" do
label_tag = if ft != :boolean, do: form_label(form, {field, options}), else: ""
field_tag =
form_field(changeset, form, {field, options},
class: "form-control #{error_class}",
conn: conn
)
field_feeback = [
content_tag :div, class: "invalid-feedback" do
error_msg
end,
content_tag :p, class: "help_text" do
help_text
end
]
[label_tag, field_tag, field_feeback]
end
end
end
end
| 30.820144 | 99 | 0.565126 |
f7c5caabc4243abe8fa573364a91e5c0507d739b | 743 | ex | Elixir | lib/wabanex/imc.ex | vitorrsbarbosa/wabanex | b4e723dc3afa7c5cba20a587f90858b6276ac215 | [
"MIT"
] | null | null | null | lib/wabanex/imc.ex | vitorrsbarbosa/wabanex | b4e723dc3afa7c5cba20a587f90858b6276ac215 | [
"MIT"
] | null | null | null | lib/wabanex/imc.ex | vitorrsbarbosa/wabanex | b4e723dc3afa7c5cba20a587f90858b6276ac215 | [
"MIT"
] | null | null | null | defmodule Wabanex.IMC do
@moduledoc """
Um módulo é um agrupamento de funções
"""
def calculate(%{"filename" => filename}) do
filename
|> File.read()
|> handle_file()
end
defp handle_file({:ok, content}) do
data =
content
|> String.split("\n")
|> Enum.map(fn line -> parse_line(line) end)
|> Enum.into(%{})
{:ok, data}
end
defp handle_file({:error, _reason}) do
{:error, "Error while opening file"}
end
defp parse_line(line) do
line
|> String.split(",")
|> List.update_at(1, &String.to_float/1)
|> List.update_at(2, &String.to_float/1)
|> calculate_imc()
end
defp calculate_imc([name, height, weight]), do: {name, weight / (height * height)}
end
| 21.228571 | 84 | 0.597577 |
f7c5f3d16c2e753c6fbd2463e34df8734c4caf50 | 638 | ex | Elixir | lib/makeup/styles/html/pygments/igor.ex | rubysolo/makeup | e1c5d27e3f14c04904d74b94b6298bbc28a406c3 | [
"BSD-2-Clause"
] | null | null | null | lib/makeup/styles/html/pygments/igor.ex | rubysolo/makeup | e1c5d27e3f14c04904d74b94b6298bbc28a406c3 | [
"BSD-2-Clause"
] | null | null | null | lib/makeup/styles/html/pygments/igor.ex | rubysolo/makeup | e1c5d27e3f14c04904d74b94b6298bbc28a406c3 | [
"BSD-2-Clause"
] | null | null | null |
defmodule Makeup.Styles.HTML.IgorStyle do
@moduledoc false
require Makeup.Token.TokenTypes
alias Makeup.Token.TokenTypes, as: Tok
@styles %{
Tok.keyword => "#0000FF",
Tok.name_class => "#007575",
Tok.name_decorator => "#CC00A3",
Tok.name_function => "#C34E00",
Tok.string => "#009C00",
Tok.comment => "italic #FF0000",
}
alias Makeup.Styles.HTML.Style
@style_struct Style.make_style(
short_name: "igor",
long_name: "Igor Style",
background_color: "#ffffff",
highlight_color: "#ffffcc",
styles: @styles)
def style() do
@style_struct()
end
end | 21.266667 | 41 | 0.626959 |
f7c5fea645b31aaaecd4e885fae03e0ebea0086e | 933 | exs | Elixir | flatten-array/flatten_array_test.exs | ChrisWilding/exercism-elixir | ee966439b460f82ef9581611f71c2f626402d14a | [
"MIT"
] | null | null | null | flatten-array/flatten_array_test.exs | ChrisWilding/exercism-elixir | ee966439b460f82ef9581611f71c2f626402d14a | [
"MIT"
] | null | null | null | flatten-array/flatten_array_test.exs | ChrisWilding/exercism-elixir | ee966439b460f82ef9581611f71c2f626402d14a | [
"MIT"
] | null | null | null | if !System.get_env("EXERCISM_TEST_EXAMPLES") do
Code.load_file("flatten_array.exs", __DIR__)
end
ExUnit.start()
ExUnit.configure(exclude: :pending, trace: true)
defmodule FlattenArrayTest do
use ExUnit.Case
test "returns original list if there is nothing to flatten" do
assert FlattenArray.flatten([1, 2, 3]) == [1, 2, 3]
end
test "flattens an empty nested list" do
assert FlattenArray.flatten([[]]) == []
end
test "flattens a nested list" do
assert FlattenArray.flatten([1, [2, [3], 4], 5, [6, [7, 8]]]) == [1, 2, 3, 4, 5, 6, 7, 8]
end
test "removes nil from list" do
assert FlattenArray.flatten([1, nil, 2]) == [1, 2]
end
test "removes nil from a nested list" do
assert FlattenArray.flatten([1, [2, nil, 4], 5]) == [1, 2, 4, 5]
end
test "returns an empty list if all values in nested list are nil" do
assert FlattenArray.flatten([nil, [nil], [nil, [nil]]]) == []
end
end
| 26.657143 | 93 | 0.647374 |
f7c6720eb2b71c03f2e2642a3b0d21df9599c80e | 604 | ex | Elixir | spec/support/element/trivial_pipeline.ex | mkaput/membrane-core | f65ae3d847f2c10f3ab20d0c7aa75b0faa274ec7 | [
"Apache-2.0"
] | null | null | null | spec/support/element/trivial_pipeline.ex | mkaput/membrane-core | f65ae3d847f2c10f3ab20d0c7aa75b0faa274ec7 | [
"Apache-2.0"
] | null | null | null | spec/support/element/trivial_pipeline.ex | mkaput/membrane-core | f65ae3d847f2c10f3ab20d0c7aa75b0faa274ec7 | [
"Apache-2.0"
] | null | null | null | defmodule Membrane.Support.Element.TrivialPipeline do
alias Membrane.Support.Element.{TrivialSource, TrivialFilter, TrivialSink}
use Membrane.Pipeline
@impl true
def handle_init(_) do
children = [
producer: TrivialSource,
filter: TrivialFilter,
consumer: TrivialSink
]
links = %{
{:producer, :source} => {:filter, :sink, pull_buffer: [preferred_size: 10]},
{:filter, :source} => {:consumer, :sink, pull_buffer: [preferred_size: 10]}
}
spec = %Pipeline.Spec{
children: children,
links: links
}
{{:ok, spec}, %{}}
end
end
| 23.230769 | 82 | 0.635762 |
f7c68b063d62c01003634bc98dcdf1b04371f48e | 295 | ex | Elixir | lib/helpdesk_assistant/responses/utter_help.ex | r26D/elixir-rasa-action-server | df41d36189f4ed737752343457dfe06d37a8e758 | [
"MIT"
] | 1 | 2021-04-19T18:05:10.000Z | 2021-04-19T18:05:10.000Z | lib/helpdesk_assistant/responses/utter_help.ex | r26D/elixir-rasa-action-server | df41d36189f4ed737752343457dfe06d37a8e758 | [
"MIT"
] | null | null | null | lib/helpdesk_assistant/responses/utter_help.ex | r26D/elixir-rasa-action-server | df41d36189f4ed737752343457dfe06d37a8e758 | [
"MIT"
] | null | null | null | defmodule HelpdeskAssistant.Responses.UtterHelp do
use RasaSDK.Responses.Response
def simple_text(), do: "I can help you open a service request ticket.
You can ask me things like \"Open an incident\", \"Help me reset my password\", or
\"I'm having a issue with my email.\""
end | 49.166667 | 88 | 0.715254 |
f7c6924c29ee40a8a4075371da156f0d301303fd | 10,032 | ex | Elixir | lib/aws/generated/timestream_write.ex | kw7oe/aws-elixir | 4ba60502dde270c83143822c9964018c7770bad7 | [
"Apache-2.0"
] | 341 | 2018-04-04T19:06:19.000Z | 2022-03-25T21:34:23.000Z | lib/aws/generated/timestream_write.ex | kw7oe/aws-elixir | 4ba60502dde270c83143822c9964018c7770bad7 | [
"Apache-2.0"
] | 82 | 2018-04-04T17:32:33.000Z | 2022-03-24T15:12:04.000Z | lib/aws/generated/timestream_write.ex | kw7oe/aws-elixir | 4ba60502dde270c83143822c9964018c7770bad7 | [
"Apache-2.0"
] | 76 | 2018-04-10T20:19:44.000Z | 2022-03-15T13:49:19.000Z | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.TimestreamWrite do
@moduledoc """
Amazon Timestream is a fast, scalable, fully managed time series database
service that makes it easy to store and analyze trillions of time series data
points per day.
With Timestream, you can easily store and analyze IoT sensor data to derive
insights from your IoT applications. You can analyze industrial telemetry to
streamline equipment management and maintenance. You can also store and analyze
log data and metrics to improve the performance and availability of your
applications. Timestream is built from the ground up to effectively ingest,
process, and store time series data. It organizes data to optimize query
processing. It automatically scales based on the volume of data ingested and on
the query volume to ensure you receive optimal performance while inserting and
querying data. As your data grows over time, Timestream’s adaptive query
processing engine spans across storage tiers to provide fast analysis while
reducing costs.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "Timestream Write",
api_version: "2018-11-01",
content_type: "application/x-amz-json-1.0",
credential_scope: nil,
endpoint_prefix: "ingest.timestream",
global?: false,
protocol: "json",
service_id: "Timestream Write",
signature_version: "v4",
signing_name: "timestream",
target_prefix: "Timestream_20181101"
}
end
@doc """
Creates a new Timestream database.
If the KMS key is not specified, the database will be encrypted with a
Timestream managed KMS key located in your account. Refer to [AWS managed KMS keys](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#aws-managed-cmk)
for more info. Service quotas apply. For more information, see [Access Management](https://docs.aws.amazon.com/timestream/latest/developerguide/ts-limits.html)
in the Timestream Developer Guide.
"""
def create_database(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateDatabase", input, options)
end
@doc """
The CreateTable operation adds a new table to an existing database in your
account.
In an AWS account, table names must be at least unique within each Region if
they are in the same database. You may have identical table names in the same
Region if the tables are in seperate databases. While creating the table, you
must specify the table name, database name, and the retention properties.
Service quotas apply. For more information, see [Access Management](https://docs.aws.amazon.com/timestream/latest/developerguide/ts-limits.html)
in the Timestream Developer Guide.
"""
def create_table(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateTable", input, options)
end
@doc """
Deletes a given Timestream database.
*This is an irreversible operation. After a database is deleted, the time series
data from its tables cannot be recovered.*
All tables in the database must be deleted first, or a ValidationException error
will be thrown.
Due to the nature of distributed retries, the operation can return either
success or a ResourceNotFoundException. Clients should consider them equivalent.
"""
def delete_database(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteDatabase", input, options)
end
@doc """
Deletes a given Timestream table.
This is an irreversible operation. After a Timestream database table is deleted,
the time series data stored in the table cannot be recovered.
Due to the nature of distributed retries, the operation can return either
success or a ResourceNotFoundException. Clients should consider them equivalent.
"""
def delete_table(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteTable", input, options)
end
@doc """
Returns information about the database, including the database name, time that
the database was created, and the total number of tables found within the
database.
Service quotas apply. For more information, see [Access Management](https://docs.aws.amazon.com/timestream/latest/developerguide/ts-limits.html)
in the Timestream Developer Guide.
"""
def describe_database(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeDatabase", input, options)
end
@doc """
DescribeEndpoints returns a list of available endpoints to make Timestream API
calls against.
This API is available through both Write and Query.
Because Timestream’s SDKs are designed to transparently work with the service’s
architecture, including the management and mapping of the service endpoints, *it
is not recommended that you use this API unless*:
* Your application uses a programming language that does not yet
have SDK support
* You require better control over the client-side implementation
For detailed information on how to use DescribeEndpoints, see [The Endpoint Discovery Pattern and REST
APIs](https://docs.aws.amazon.com/timestream/latest/developerguide/Using-API.endpoint-discovery.html).
"""
def describe_endpoints(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeEndpoints", input, options)
end
@doc """
Returns information about the table, including the table name, database name,
retention duration of the memory store and the magnetic store.
Service quotas apply. For more information, see [Access Management](https://docs.aws.amazon.com/timestream/latest/developerguide/ts-limits.html)
in the Timestream Developer Guide.
"""
def describe_table(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeTable", input, options)
end
@doc """
Returns a list of your Timestream databases.
Service quotas apply. For more information, see [Access Management](https://docs.aws.amazon.com/timestream/latest/developerguide/ts-limits.html)
in the Timestream Developer Guide.
"""
def list_databases(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListDatabases", input, options)
end
@doc """
A list of tables, along with the name, status and retention properties of each
table.
"""
def list_tables(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTables", input, options)
end
@doc """
List all tags on a Timestream resource.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Associate a set of tags with a Timestream resource.
You can then activate these user-defined tags so that they appear on the Billing
and Cost Management console for cost allocation tracking.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Removes the association of tags from a Timestream resource.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Modifies the KMS key for an existing database.
While updating the database, you must specify the database name and the
identifier of the new KMS key to be used (`KmsKeyId`). If there are any
concurrent `UpdateDatabase` requests, first writer wins.
"""
def update_database(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateDatabase", input, options)
end
@doc """
Modifies the retention duration of the memory store and magnetic store for your
Timestream table.
Note that the change in retention duration takes effect immediately. For
example, if the retention period of the memory store was initially set to 2
hours and then changed to 24 hours, the memory store will be capable of holding
24 hours of data, but will be populated with 24 hours of data 22 hours after
this change was made. Timestream does not retrieve data from the magnetic store
to populate the memory store.
Service quotas apply. For more information, see [Access Management](https://docs.aws.amazon.com/timestream/latest/developerguide/ts-limits.html)
in the Timestream Developer Guide.
"""
def update_table(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateTable", input, options)
end
@doc """
The WriteRecords operation enables you to write your time series data into
Timestream.
You can specify a single data point or a batch of data points to be inserted
into the system. Timestream offers you with a flexible schema that auto detects
the column names and data types for your Timestream tables based on the
dimension names and data types of the data points you specify when invoking
writes into the database. Timestream support eventual consistency read
semantics. This means that when you query data immediately after writing a batch
of data into Timestream, the query results might not reflect the results of a
recently completed write operation. The results may also include some stale
data. If you repeat the query request after a short time, the results should
return the latest data. Service quotas apply. For more information, see [Access Management](https://docs.aws.amazon.com/timestream/latest/developerguide/ts-limits.html)
in the Timestream Developer Guide.
"""
def write_records(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "WriteRecords", input, options)
end
end
| 42.871795 | 170 | 0.745514 |
f7c69f646faa1c0123e4ecf6a5ac3c89943563b5 | 1,658 | ex | Elixir | lib/mix/lib/mix/shell/io.ex | jquadrin/elixir | 98746e08eaa2bf58c202e8500b6cf83ed2368cc0 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/shell/io.ex | jquadrin/elixir | 98746e08eaa2bf58c202e8500b6cf83ed2368cc0 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/shell/io.ex | jquadrin/elixir | 98746e08eaa2bf58c202e8500b6cf83ed2368cc0 | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Shell.IO do
@moduledoc """
This is Mix's default shell.
It simply prints messages to stdio and stderr.
"""
@behaviour Mix.Shell
@doc """
Prints the currently running application if it
was not printed yet.
"""
def print_app do
if name = Mix.Shell.printable_app_name do
IO.puts "==> #{name}"
end
end
@doc """
Executes the given command and prints its output
to stdout as it comes.
"""
def cmd(command, opts \\ []) do
print_app? = Keyword.get(opts, :print_app, true)
Mix.Shell.cmd(command, fn data ->
if print_app?, do: print_app()
IO.write(data)
end)
end
@doc """
Writes a message to the shell followed by new line.
"""
def info(message) do
print_app
IO.puts IO.ANSI.format message
end
@doc """
Writes an error message to the shell followed by new line.
"""
def error(message) do
print_app
IO.puts :stderr, IO.ANSI.format(red(message))
end
@doc """
Writes a message shell followed by prompting the user for
input. Input will be consumed until enter is pressed.
"""
def prompt(message) do
print_app
IO.gets message <> " "
end
@doc """
Receives a message and asks the user if they want to proceed.
The user must press enter or type anything that matches the "yes"
regex `~r/^Y(es)?$/i`.
"""
def yes?(message) do
print_app
got_yes? IO.gets(message <> " [Yn] ")
end
defp got_yes?(answer) when is_binary(answer) do
answer =~ ~r/^(Y(es)?)?$/i
end
# The io server may return :eof or :error
defp got_yes?(_), do: false
defp red(message) do
[:red, :bright, message]
end
end
| 21.25641 | 67 | 0.638118 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.