hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f7f2d34a59fa5dce753315c7c88ad18238ac0258 | 2,897 | ex | Elixir | lib/exchange_api_web/live/order_book_live.ex | realyarilabs/exchange_api | c7dd9af9356277a022b164675cc1622359af8a76 | [
"Apache-2.0"
] | 3 | 2020-08-10T10:09:26.000Z | 2020-08-28T08:41:36.000Z | lib/exchange_api_web/live/order_book_live.ex | realyarilabs/exchange_api | c7dd9af9356277a022b164675cc1622359af8a76 | [
"Apache-2.0"
] | 30 | 2020-08-17T10:38:24.000Z | 2022-02-28T07:06:42.000Z | lib/exchange_api_web/live/order_book_live.ex | realyarilabs/exchange_api | c7dd9af9356277a022b164675cc1622359af8a76 | [
"Apache-2.0"
] | 1 | 2020-09-17T13:08:47.000Z | 2020-09-17T13:08:47.000Z | defmodule ExchangeApiWeb.OrderBookLive do
@moduledoc false
use ExchangeApiWeb, :live_view
alias ExchangeApiWeb.Ticker
def mount(%{"ticker" => ticker}, _session, socket) do
if connected?(socket), do: :timer.send_interval(1000, self(), :tick)
{:ok, tick} = Ticker.get_ticker(ticker)
{:ok, open_orders} = Exchange.open_orders(tick)
{:ok, last_price_sell} = Exchange.last_price(tick, :sell)
{:ok, last_price_buy} = Exchange.last_price(tick, :buy)
{:ok, last_size_sell} = Exchange.last_size(tick, :sell)
{:ok, last_size_buy} = Exchange.last_size(tick, :buy)
{:ok, spread} = Exchange.spread(tick)
{:ok, highest_ask_volume} = Exchange.highest_ask_volume(tick)
{:ok, lowest_ask_price} = Exchange.lowest_ask_price(tick)
{:ok, highest_bid_volume} = Exchange.highest_bid_volume(tick)
{:ok, highest_bid_price} = Exchange.highest_bid_price(tick)
sell_orders = open_orders |> Enum.filter(fn order -> order.side == :sell end)
buy_orders = open_orders |> Enum.filter(fn order -> order.side == :buy end)
{:ok,
assign(socket,
ticker: ticker,
last_price_sell: last_price_sell,
last_price_buy: last_price_buy,
last_size_sell: last_size_sell,
last_size_buy: last_size_buy,
sell_orders: sell_orders,
buy_orders: buy_orders,
spread: spread,
highest_ask_volume: highest_ask_volume,
lowest_ask_price: lowest_ask_price,
highest_bid_volume: highest_bid_volume,
highest_bid_price: highest_bid_price
)}
end
def handle_info(:tick, socket) do
{:ok, ticker} = Ticker.get_ticker(socket.assigns.ticker)
{:ok, open_orders} = Exchange.open_orders(ticker)
{:ok, last_price_sell} = Exchange.last_price(ticker, :sell)
{:ok, last_price_buy} = Exchange.last_price(ticker, :buy)
{:ok, last_size_sell} = Exchange.last_size(ticker, :sell)
{:ok, last_size_buy} = Exchange.last_size(ticker, :buy)
{:ok, spread} = Exchange.spread(ticker)
{:ok, highest_ask_volume} = Exchange.highest_ask_volume(ticker)
{:ok, lowest_ask_price} = Exchange.lowest_ask_price(ticker)
{:ok, highest_bid_volume} = Exchange.highest_bid_volume(ticker)
{:ok, highest_bid_price} = Exchange.highest_bid_price(ticker)
sell_orders = open_orders |> Enum.filter(fn order -> order.side == :sell end)
buy_orders = open_orders |> Enum.filter(fn order -> order.side == :buy end)
{:noreply,
assign(socket,
last_price_sell: last_price_sell,
last_price_buy: last_price_buy,
last_size_sell: last_size_sell,
last_size_buy: last_size_buy,
sell_orders: sell_orders,
buy_orders: buy_orders,
spread: spread,
highest_ask_volume: highest_ask_volume,
lowest_ask_price: lowest_ask_price,
highest_bid_volume: highest_bid_volume,
highest_bid_price: highest_bid_price
)}
end
end
| 39.684932 | 81 | 0.698309 |
f7f34b92c7b7677ed0e3856ea8e2bea65a24cef5 | 205 | ex | Elixir | lib/oli_web/plugs/set_live_csrf.ex | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 45 | 2020-04-17T15:40:27.000Z | 2022-03-25T00:13:30.000Z | lib/oli_web/plugs/set_live_csrf.ex | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 944 | 2020-02-13T02:37:01.000Z | 2022-03-31T17:50:07.000Z | lib/oli_web/plugs/set_live_csrf.ex | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 23 | 2020-07-28T03:36:13.000Z | 2022-03-17T14:29:02.000Z | defmodule OliWeb.SetLiveCSRF do
import Plug.Conn, only: [put_session: 3]
def init(_opts), do: nil
def call(conn, _opts), do: put_session(conn, :csrf_token, Phoenix.Controller.get_csrf_token())
end
| 25.625 | 96 | 0.741463 |
f7f36ed85a229318d15e4b1e0d2ab55edc1a927e | 1,075 | exs | Elixir | mix.exs | acu-online/canvas | 594e435abe8907097a3a66d25627e96cb940b07a | [
"MIT"
] | null | null | null | mix.exs | acu-online/canvas | 594e435abe8907097a3a66d25627e96cb940b07a | [
"MIT"
] | null | null | null | mix.exs | acu-online/canvas | 594e435abe8907097a3a66d25627e96cb940b07a | [
"MIT"
] | null | null | null | defmodule Canvas.MixProject do
use Mix.Project
def project do
[
app: :canvas,
version: "0.0.1",
elixir: "~> 1.10",
description: "Elixir client for the Canvas LMS API",
start_permanent: Mix.env() == :prod,
package: package(),
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:httpoison, "~> 1.5"},
{:poison, "~> 4.0"},
{:uri_query, "~> 0.1.1"},
{:exvcr, "~> 0.10", only: :test},
{:ex_doc, "~> 0.19", only: :dev, runtime: false},
{:dialyxir, "~> 1.0.0-rc.6", only: [:dev], runtime: false}
]
end
defp package do
[
files: ["lib", "mix.exs", "*.md"],
maintainers: ["John Kaczmarek"],
licenses: ["MIT"],
links: %{
"GitHub" => "https://github.com/acu-dev/canvas",
"API docs" => "https://canvas.instructure.com/doc/api/index.html"
}
]
end
end
| 22.87234 | 73 | 0.531163 |
f7f39d86d035dfc71ac3198bc0993b728666c91f | 372 | ex | Elixir | mysite/web/views/error_view.ex | palm86/Your-first-Phoenix-app-tutorial | e7de041e141254da6a731aead3a180fb9bd20c79 | [
"MIT"
] | null | null | null | mysite/web/views/error_view.ex | palm86/Your-first-Phoenix-app-tutorial | e7de041e141254da6a731aead3a180fb9bd20c79 | [
"MIT"
] | null | null | null | mysite/web/views/error_view.ex | palm86/Your-first-Phoenix-app-tutorial | e7de041e141254da6a731aead3a180fb9bd20c79 | [
"MIT"
] | null | null | null | defmodule Mysite.ErrorView do
use Mysite.Web, :view
def render("404.html", _assigns) do
"Page not found"
end
def render("500.html", _assigns) do
"Internal server error"
end
# In case no render clause matches or no
# template is found, let's render it as 500
def template_not_found(_template, assigns) do
render "500.html", assigns
end
end
| 20.666667 | 47 | 0.696237 |
f7f3c93d7fcf2f0913ec3534c3d34eb14525130a | 10,702 | exs | Elixir | test/mongooseice/udp/allocate_test.exs | glassechidna/MongooseICE | c2ea99f47460fd7293b51eaa72fbce122a60affe | [
"Apache-2.0"
] | null | null | null | test/mongooseice/udp/allocate_test.exs | glassechidna/MongooseICE | c2ea99f47460fd7293b51eaa72fbce122a60affe | [
"Apache-2.0"
] | null | null | null | test/mongooseice/udp/allocate_test.exs | glassechidna/MongooseICE | c2ea99f47460fd7293b51eaa72fbce122a60affe | [
"Apache-2.0"
] | null | null | null | defmodule MongooseICE.UDP.AllocateTest do
use ExUnit.Case
use Helper.Macros
alias Helper.UDP
alias Jerboa.Params
alias Jerboa.Format
alias Jerboa.Format.Body.Attribute.{XORMappedAddress, Lifetime,
XORRelayedAddress, ErrorCode,
RequestedTransport, EvenPort,
ReservationToken, Lifetime}
require Integer
describe "allocate request" do
setup do
{:ok, [udp: UDP.setup_connection([], :ipv4)]}
end
test "fails without RequestedTransport attribute", ctx do
udp = ctx.udp
id = Params.generate_id()
req = UDP.allocate_request(id, [])
resp = no_auth(UDP.communicate(udp, 0, req))
params = Format.decode!(resp)
assert %Params{class: :failure,
method: :allocate,
identifier: ^id,
attributes: [error]} = params
assert %ErrorCode{code: 400} = error
end
test "fails if EvenPort and ReservationToken are supplied", ctx do
udp = ctx.udp
id = Params.generate_id()
req = UDP.allocate_request(id, [
%RequestedTransport{protocol: :udp},
%EvenPort{},
%ReservationToken{value: "12345678"}
])
resp = no_auth(UDP.communicate(udp, 0, req))
params = Format.decode!(resp)
assert %Params{class: :failure,
method: :allocate,
identifier: ^id,
attributes: [error]} = params
assert %ErrorCode{code: 400} = error
end
test "returns response with IPv4 XOR relayed address attribute", ctx do
udp = ctx.udp
%{server_address: server_address, client_address: client_address} = udp
client_port = UDP.client_port(udp, 0)
id = Params.generate_id()
req = UDP.allocate_request(id)
resp = no_auth(UDP.communicate(udp, 0, req))
params = Format.decode!(resp)
assert %Params{class: :success,
method: :allocate,
identifier: ^id,
attributes: attrs} = params
[lifetime, mapped, relayed] = Enum.sort(attrs)
assert %Lifetime{duration: 600} = lifetime
assert %XORMappedAddress{address: ^client_address,
port: ^client_port,
family: :ipv4} = mapped
assert %XORRelayedAddress{address: ^server_address,
port: relayed_port,
family: :ipv4} = relayed
assert relayed_port != udp.server_port
end
test "returns error after second allocation with different id", ctx do
udp = ctx.udp
id1 = Params.generate_id()
id2 = Params.generate_id()
req1 = UDP.allocate_request(id1)
req2 = UDP.allocate_request(id2)
resp1 = no_auth(UDP.communicate(udp, 0, req1))
params1 = Format.decode!(resp1)
assert %Params{class: :success,
method: :allocate,
identifier: ^id1} = params1
resp2 = no_auth(UDP.communicate(udp, 0, req2))
params2 = Format.decode!(resp2)
assert %Params{class: :failure,
method: :allocate,
identifier: ^id2,
attributes: [error]} = params2
assert %ErrorCode{code: 437} = error
end
test "returns success after second allocation with the same id", ctx do
udp = ctx.udp
id = Params.generate_id()
req = UDP.allocate_request(id)
resp1 = no_auth(UDP.communicate(udp, 0, req))
params1 = Format.decode!(resp1)
assert %Params{class: :success,
method: :allocate,
identifier: ^id} = params1
resp2 = no_auth(UDP.communicate(udp, 0, req))
params2 = Format.decode!(resp2)
assert %Params{class: :success,
method: :allocate,
identifier: ^id,
attributes: attrs} = params2
assert 3 = length(attrs)
end
end
describe "allocate request with EVEN-PORT attribute" do
test "allocates an even port" do
addr = {127, 0, 0, 1}
for _ <- 1..10 do
udp = UDP.connect(addr, addr, 1)
id = Params.generate_id()
req = UDP.allocate_request(id, [
%RequestedTransport{protocol: :udp},
%EvenPort{}
])
resp = no_auth(UDP.communicate(udp, 0, req))
params = Format.decode!(resp)
assert %Params{class: :success,
method: :allocate,
identifier: ^id} = params
%XORRelayedAddress{port: relay_port} = Params.get_attr(params, XORRelayedAddress)
assert Integer.is_even(relay_port)
UDP.close(udp)
end
end
test "reserves a higher port if requested" do
## given a TURN server
addr = {127, 0, 0, 1}
## when allocating a UDP relay address with an even port
## and reserving the next port
udp1 = UDP.connect(addr, addr, 1)
on_exit fn -> UDP.close(udp1) end
params1 = UDP.allocate(udp1, attributes: [
%RequestedTransport{protocol: :udp},
%EvenPort{reserved?: true}
])
%XORRelayedAddress{port: relay_port1} = Params.get_attr(params1, XORRelayedAddress)
reservation_token = Params.get_attr(params1, ReservationToken)
## then the next allocation with a RESERVATION-TOKEN
## allocates a relay address with the reserved port
udp2 = UDP.connect(addr, addr, 1)
on_exit fn -> UDP.close(udp2) end
params2 = UDP.allocate(udp2, attributes: [reservation_token])
%XORRelayedAddress{port: relay_port2} = Params.get_attr(params2, XORRelayedAddress)
assert Integer.is_even(relay_port1)
assert relay_port2 == relay_port1 + 1
end
end
describe "allocation" do
import Mock
setup ctx do
{:ok, [udp: UDP.setup_connection(ctx)]}
end
test "expires after timeout", ctx do
## given an existing allocation
client_id = 0
UDP.allocate(ctx.udp)
## when its timeout is reached
mref = Helper.Allocation.monitor_owner(ctx)
now = MongooseICE.Time.system_time(:second)
future = now + 10_000
with_mock MongooseICE.Time, [system_time: fn(:second) -> future end] do
## send indication to trigger timeout
:ok = UDP.send(ctx.udp, client_id, UDP.binding_indication(Params.generate_id()))
## then the allocation is deleted
assert_receive {:DOWN, ^mref, :process, _pid, _info}, 3_000
assert called MongooseICE.Time.system_time(:second)
end
end
end
describe "reservation" do
import Mock
test "expires after timeout", _ctx do
## Set reservation timeout to 1 second
with_mock MongooseICE.TURN.Reservation, [:passthrough], [default_timeout: fn() -> 1 end] do
## given a TURN server
addr = {127, 0, 0, 1}
## given the allocation
udp1 = UDP.connect(addr, addr, 1)
on_exit fn -> UDP.close(udp1) end
params1 = UDP.allocate(udp1, attributes: [
%RequestedTransport{protocol: :udp},
%EvenPort{reserved?: true}
])
reservation_token = Params.get_attr(params1, ReservationToken)
## when reservation lifetime ends
Process.sleep(1500)
## then the reservation expires
udp2 = UDP.connect(addr, addr, 1)
on_exit fn -> UDP.close(udp2) end
id = Params.generate_id()
req = UDP.allocate_request(id, [
reservation_token,
%RequestedTransport{protocol: :udp}
])
resp = no_auth(UDP.communicate(udp2, 0, req))
params = Format.decode!(resp)
assert %Params{class: :failure,
method: :allocate,
identifier: ^id,
attributes: [error]} = params
assert %ErrorCode{name: :insufficient_capacity} = error
end
end
test "expires if original allocation is deleted" do
## given a TURN server
addr = {127, 0, 0, 1}
## given the allocation
udp1 = UDP.connect(addr, addr, 1)
on_exit fn -> UDP.close(udp1) end
params1 = UDP.allocate(udp1, attributes: [
%RequestedTransport{protocol: :udp},
%EvenPort{reserved?: true}
])
reservation_token = Params.get_attr(params1, ReservationToken)
## when the reservation is manually removed
UDP.refresh(udp1, [%Lifetime{duration: 0}])
## when cleanups have finished
Process.sleep(100)
## then the reservation expires
udp2 = UDP.connect(addr, addr, 1)
on_exit fn -> UDP.close(udp2) end
id = Params.generate_id()
req = UDP.allocate_request(id, [
reservation_token,
%RequestedTransport{protocol: :udp}
])
resp = no_auth(UDP.communicate(udp2, 0, req))
params = Format.decode!(resp)
assert %Params{class: :failure,
method: :allocate,
identifier: ^id,
attributes: [error]} = params
assert %ErrorCode{name: :insufficient_capacity} = error
end
test "expires if original allocation expires" do
## given a TURN server
addr = {127, 0, 0, 1}
## given the allocation
udp1 = UDP.connect(addr, addr, 1)
on_exit fn -> UDP.close(udp1) end
params1 = UDP.allocate(udp1, attributes: [
%RequestedTransport{protocol: :udp},
%EvenPort{reserved?: true}
])
reservation_token = Params.get_attr(params1, ReservationToken)
## when the allocation timeouts
now = MongooseICE.Time.system_time(:second)
future = now + 10_000
with_mock MongooseICE.Time, [system_time: fn(:second) -> future end] do
## send indication to trigger timeout
:ok = UDP.send(udp1, 0, UDP.binding_indication(Params.generate_id()))
assert eventually called MongooseICE.Time.system_time(:second)
end
## when cleanups have finished
Process.sleep(100)
## then the reservation expires
udp2 = UDP.connect(addr, addr, 1)
on_exit fn -> UDP.close(udp2) end
id = Params.generate_id()
req = UDP.allocate_request(id, [
reservation_token,
%RequestedTransport{protocol: :udp}
])
resp = no_auth(UDP.communicate(udp2, 0, req))
params = Format.decode!(resp)
assert %Params{class: :failure,
method: :allocate,
identifier: ^id,
attributes: [error]} = params
assert %ErrorCode{name: :insufficient_capacity} = error
end
end
end
| 33.236025 | 97 | 0.590264 |
f7f420938c65c14390195443e249b31e3f443a43 | 1,000 | ex | Elixir | apps/concierge_site/lib/controllers/digest_feedback_controller.ex | mbta/alerts_concierge | d8e643445ef06f80ca273f2914c6959daea146f6 | [
"MIT"
] | null | null | null | apps/concierge_site/lib/controllers/digest_feedback_controller.ex | mbta/alerts_concierge | d8e643445ef06f80ca273f2914c6959daea146f6 | [
"MIT"
] | 21 | 2021-03-12T17:05:30.000Z | 2022-02-16T21:48:35.000Z | apps/concierge_site/lib/controllers/digest_feedback_controller.ex | mbta/alerts_concierge | d8e643445ef06f80ca273f2914c6959daea146f6 | [
"MIT"
] | 1 | 2021-12-09T15:09:53.000Z | 2021-12-09T15:09:53.000Z | defmodule ConciergeSite.DigestFeedbackController do
use ConciergeSite.Web, :controller
alias ConciergeSite.DigestFeedback
alias DigestFeedback.DigestRating, as: DigestRating
alias DigestFeedback.DigestRatingReason, as: DigestRatingReason
def feedback(conn, params) do
case DigestFeedback.parse_digest_rating(params) do
{:ok, %DigestRating{rating: rating} = digest_rating} ->
DigestFeedback.log_digest_rating(digest_rating)
template = if rating == "yes", do: "thanks.html", else: "form.html"
render(conn, template)
{:error, _error} ->
render(conn, "error.html")
end
end
def new(conn, params) do
case DigestFeedback.parse_digest_rating_reason(params) do
{:ok, %DigestRatingReason{} = digest_rating_reason} ->
DigestFeedback.log_digest_rating_reason(digest_rating_reason)
json(conn, %{status: "ok"})
{:error, _error} ->
json(conn, %{status: "error", error: "invalid input"})
end
end
end
| 33.333333 | 75 | 0.699 |
f7f440efeb84f169b68f6b68dea7c2cbfb6b796f | 200 | exs | Elixir | pop_kube/test/pop_kube_web/controllers/page_controller_test.exs | pastleo/k8s-challenge-2021 | 83e4403344292e1de53ef901cf2c3457bbeb24dd | [
"MIT"
] | 1 | 2022-01-10T14:55:55.000Z | 2022-01-10T14:55:55.000Z | pop_kube/test/pop_kube_web/controllers/page_controller_test.exs | pastleo/k8s-challenge-2021 | 83e4403344292e1de53ef901cf2c3457bbeb24dd | [
"MIT"
] | null | null | null | pop_kube/test/pop_kube_web/controllers/page_controller_test.exs | pastleo/k8s-challenge-2021 | 83e4403344292e1de53ef901cf2c3457bbeb24dd | [
"MIT"
] | null | null | null | defmodule PopKubeWeb.PageControllerTest do
use PopKubeWeb.ConnCase
test "GET /", %{conn: conn} do
conn = get(conn, "/")
assert html_response(conn, 200) =~ "Welcome to Phoenix!"
end
end
| 22.222222 | 60 | 0.68 |
f7f458f8c8327f30b60a54e21b5b6491a660c723 | 7,164 | exs | Elixir | test/absinthe/schema/notation/experimental/import_sdl_test.exs | RadekMolenda/absinthe | 9ca4d391c76df701174b3b306a9dec021c74008e | [
"MIT"
] | null | null | null | test/absinthe/schema/notation/experimental/import_sdl_test.exs | RadekMolenda/absinthe | 9ca4d391c76df701174b3b306a9dec021c74008e | [
"MIT"
] | null | null | null | test/absinthe/schema/notation/experimental/import_sdl_test.exs | RadekMolenda/absinthe | 9ca4d391c76df701174b3b306a9dec021c74008e | [
"MIT"
] | null | null | null | defmodule Absinthe.Schema.Notation.Experimental.ImportSdlTest do
use Absinthe.Case
import ExperimentalNotationHelpers
@moduletag :experimental
@moduletag :sdl
defmodule WithFeatureDirective do
use Absinthe.Schema.Prototype
directive :feature do
arg :name, non_null(:string)
on [:interface]
end
end
defmodule Definition do
use Absinthe.Schema
@prototype_schema WithFeatureDirective
# Embedded SDL
import_sdl """
directive @foo(name: String!) on SCALAR | OBJECT
directive @bar(name: String!) on SCALAR | OBJECT
type Query {
"A list of posts"
posts(filter: PostFilter, reverse: Boolean): [Post]
admin: User!
droppedField: String
}
type Comment {
author: User!
subject: Post!
order: Int
deprecatedField: String @deprecated
deprecatedFieldWithReason: String @deprecated(reason: "Reason")
}
enum Category {
NEWS
OPINION
}
enum PostState {
SUBMITTED
ACCEPTED
REJECTED
}
interface Named {
name: String!
}
interface Titled @feature(name: "bar") {
title: String!
}
scalar B
union SearchResult = Post | User
union Content = Post | Comment
"""
# Read SDL from file manually at compile-time
import_sdl File.read!("test/support/fixtures/import_sdl_binary_fn.graphql")
# Read from file at compile time (with support for automatic recompilation)
import_sdl path: "test/support/fixtures/import_sdl_path_option.graphql"
import_sdl path: Path.join("test/support", "fixtures/import_sdl_path_option_fn.graphql")
def get_posts(_, _, _) do
posts = [
%{title: "Foo", body: "A body.", author: %{name: "Bruce"}},
%{title: "Bar", body: "A body.", author: %{name: "Ben"}}
]
{:ok, posts}
end
def upcase_title(post, _, _) do
{:ok, Map.get(post, :title) |> String.upcase()}
end
def hydrate(%{identifier: :admin}, [%{identifier: :query} | _]) do
{:description, "The admin"}
end
def hydrate(%{identifier: :filter}, [%{identifier: :posts} | _]) do
{:description, "A filter argument"}
end
def hydrate(%{identifier: :posts}, [%{identifier: :query} | _]) do
{:resolve, &__MODULE__.get_posts/3}
end
def hydrate(%Absinthe.Blueprint{}, _) do
%{
query: %{
posts: %{
reverse: {:description, "Just reverse the list, if you want"}
}
},
post: %{
upcased_title: [
{:description, "The title, but upcased"},
{:resolve, &__MODULE__.upcase_title/3}
]
}
}
end
def hydrate(_node, _ancestors) do
[]
end
end
describe "custom prototype schema" do
test "is set" do
assert Definition.__absinthe_prototype_schema__() == WithFeatureDirective
end
end
describe "locations" do
test "have evaluated file values" do
Absinthe.Blueprint.prewalk(Definition.__absinthe_blueprint__(), nil, fn
%{__reference__: %{location: %{file: file}}} = node, _ ->
assert is_binary(file)
{node, nil}
node, _ ->
{node, nil}
end)
end
end
describe "directives" do
test "can be defined" do
assert %{name: "foo", identifier: :foo, locations: [:object, :scalar]} =
lookup_compiled_directive(Definition, :foo)
assert %{name: "bar", identifier: :bar, locations: [:object, :scalar]} =
lookup_compiled_directive(Definition, :bar)
end
end
describe "deprecations" do
test "can be defined without a reason" do
object = lookup_compiled_type(Definition, :comment)
assert %{deprecation: %{}} = object.fields.deprecated_field
end
test "can be defined with a reason" do
object = lookup_compiled_type(Definition, :comment)
assert %{deprecation: %{reason: "Reason"}} = object.fields.deprecated_field_with_reason
end
end
describe "query root type" do
test "is defined" do
assert %{name: "Query", identifier: :query} = lookup_type(Definition, :query)
end
test "defines fields" do
assert %{name: "posts"} = lookup_field(Definition, :query, :posts)
end
end
describe "non-root type" do
test "is defined" do
assert %{name: "Post", identifier: :post} = lookup_type(Definition, :post)
end
test "defines fields" do
assert %{name: "title"} = lookup_field(Definition, :post, :title)
assert %{name: "body"} = lookup_field(Definition, :post, :body)
end
end
describe "descriptions" do
test "work on objects" do
assert %{description: "A submitted post"} = lookup_type(Definition, :post)
end
test "work on fields" do
assert %{description: "A list of posts"} = lookup_field(Definition, :query, :posts)
end
test "work on fields, defined deeply" do
assert %{description: "The title, but upcased"} =
lookup_compiled_field(Definition, :post, :upcased_title)
end
test "work on arguments, defined deeply" do
assert %{description: "Just reverse the list, if you want"} =
lookup_compiled_argument(Definition, :query, :posts, :reverse)
end
test "can be multiline" do
assert %{description: "The post author\n(is a user)"} =
lookup_field(Definition, :post, :author)
end
test "can be added by hydrating a field" do
assert %{description: "The admin"} = lookup_compiled_field(Definition, :query, :admin)
end
test "can be added by hydrating an argument" do
field = lookup_compiled_field(Definition, :query, :posts)
assert %{description: "A filter argument"} = field.args.filter
end
end
describe "resolve" do
test "work on fields, defined deeply" do
assert %{middleware: mw} = lookup_compiled_field(Definition, :post, :upcased_title)
assert length(mw) > 0
end
end
describe "multiple invocations" do
test "can add definitions" do
assert %{name: "User", identifier: :user} = lookup_type(Definition, :user)
end
end
@query """
{ admin { name } }
"""
describe "execution with root_value" do
test "works" do
assert {:ok, %{data: %{"admin" => %{"name" => "Bruce"}}}} =
Absinthe.run(@query, Definition, root_value: %{admin: %{name: "Bruce"}})
end
end
@query """
{ posts { title } }
"""
describe "execution with hydration-defined resolvers" do
test "works" do
assert {:ok, %{data: %{"posts" => [%{"title" => "Foo"}, %{"title" => "Bar"}]}}} =
Absinthe.run(@query, Definition)
end
end
@tag :pending
@query """
{ posts { upcasedTitle } }
"""
describe "execution with deeply hydration-defined resolvers" do
test "works" do
assert {:ok,
%{data: %{"posts" => [%{"upcasedTitle" => "FOO"}, %{"upcasedTitle" => "BAR"}]}}} =
Absinthe.run(@query, Definition)
end
end
describe "Absinthe.Schema.used_types/1" do
test "works" do
assert Absinthe.Schema.used_types(Definition)
end
end
end
| 26.63197 | 96 | 0.61502 |
f7f4700a1748d5c4d755632033b2bd259e8dae6e | 89 | ex | Elixir | lib/planner/web/views/todo_list_view.ex | sprql/planner | 9f42b34dc511bfe1668b7092d0a68924b9dc9501 | [
"MIT"
] | null | null | null | lib/planner/web/views/todo_list_view.ex | sprql/planner | 9f42b34dc511bfe1668b7092d0a68924b9dc9501 | [
"MIT"
] | null | null | null | lib/planner/web/views/todo_list_view.ex | sprql/planner | 9f42b34dc511bfe1668b7092d0a68924b9dc9501 | [
"MIT"
] | null | null | null | defmodule Planner.Web.TodoListView do
use Planner.Web, :view
alias Planner.Todo
end
| 14.833333 | 37 | 0.775281 |
f7f4932f36ca7b8a01c394fa8c05b3a3a75247ed | 950 | exs | Elixir | mix.exs | kacperduras/pixelconversionserver | e8a170411395ce9d787b20d0bde775c8babe8b6a | [
"MIT"
] | 1 | 2021-06-26T14:18:35.000Z | 2021-06-26T14:18:35.000Z | mix.exs | mypolitics/pixelconversionserver | e8a170411395ce9d787b20d0bde775c8babe8b6a | [
"MIT"
] | null | null | null | mix.exs | mypolitics/pixelconversionserver | e8a170411395ce9d787b20d0bde775c8babe8b6a | [
"MIT"
] | 2 | 2021-06-20T23:29:35.000Z | 2021-06-20T23:52:12.000Z | defmodule PixelConversionServer.MixProject do
use Mix.Project
def project do
[
app: :pixelconversionserver,
version: "1.0.1",
elixir: "~> 1.11",
start_permanent: Mix.env() == :prod,
deps: deps(),
escript: escript()
]
end
def application do
applications = [:logger, :plug_cowboy, :poison, :httpoison, :quantum, :cachex, :decimal, :rollbax]
mod = {PixelConversionServer, []}
case :init.get_plain_arguments |> Enum.any?(&(&1=='escript.build')) do
true ->
[applications: applications]
_ ->
[applications: applications, mod: mod]
end
end
def escript do
[
main_module: PixelConversionServer
]
end
defp deps do
[
{:plug_cowboy, "~> 2.0"},
{:poison, "~> 3.1"},
{:httpoison, "~> 1.7"},
{:quantum, "~> 3.0"},
{:cachex, "~> 3.3"},
{:decimal, "~> 2.0"},
{:rollbax, "~> 0.11.0"}
]
end
end
| 21.590909 | 102 | 0.544211 |
f7f49ae52d2e265fa6c1e4f28b121f75e417ef1d | 131 | ex | Elixir | lib/inventory_management/repo.ex | web2solutions/inventory_management | 5ade39c983e344caa53db5fe4eb0684ba08b5dd1 | [
"MIT"
] | null | null | null | lib/inventory_management/repo.ex | web2solutions/inventory_management | 5ade39c983e344caa53db5fe4eb0684ba08b5dd1 | [
"MIT"
] | null | null | null | lib/inventory_management/repo.ex | web2solutions/inventory_management | 5ade39c983e344caa53db5fe4eb0684ba08b5dd1 | [
"MIT"
] | null | null | null | defmodule InventoryManagement.Repo do
use Ecto.Repo,
otp_app: :inventory_management,
adapter: Ecto.Adapters.Postgres
end
| 21.833333 | 37 | 0.778626 |
f7f4b4cc60873167acc657f9ad7e8e34e93b3e48 | 928 | exs | Elixir | test/rolodex/config_test.exs | hauleth/rolodex | 405749d2e845a2c4259b12ebc266680039aa1cef | [
"MIT"
] | null | null | null | test/rolodex/config_test.exs | hauleth/rolodex | 405749d2e845a2c4259b12ebc266680039aa1cef | [
"MIT"
] | null | null | null | test/rolodex/config_test.exs | hauleth/rolodex | 405749d2e845a2c4259b12ebc266680039aa1cef | [
"MIT"
] | null | null | null | defmodule Rolodex.ConfigTest do
use ExUnit.Case
alias Rolodex.{Config, PipelineConfig, WriterConfig}
describe "#new/1" do
test "It parses nested writer config into a struct to set defaults" do
default_config = Config.new()
default_writer_config = WriterConfig.new()
assert match?(%Config{writer: ^default_writer_config}, default_config)
config = Config.new(writer: [file_name: "testing.json"])
writer_config = WriterConfig.new(file_name: "testing.json")
assert match?(%Config{writer: ^writer_config}, config)
end
test "It parses pipeline configs into structs to set defaults" do
config =
Config.new(
pipelines: [
api: [
body: [
id: :uuid
]
]
]
)
assert match?(%Config{pipelines: %{api: %PipelineConfig{body: %{id: :uuid}}}}, config)
end
end
end
| 26.514286 | 92 | 0.607759 |
f7f4be04760e95cfc7b9fe69e5d55b5f4045dc3f | 8,366 | ex | Elixir | lib/livebook_cli/server.ex | oo6/livebook | 0e059f4f840a56c122266a62cc8fdb3b97920efc | [
"Apache-2.0"
] | null | null | null | lib/livebook_cli/server.ex | oo6/livebook | 0e059f4f840a56c122266a62cc8fdb3b97920efc | [
"Apache-2.0"
] | null | null | null | lib/livebook_cli/server.ex | oo6/livebook | 0e059f4f840a56c122266a62cc8fdb3b97920efc | [
"Apache-2.0"
] | null | null | null | defmodule LivebookCLI.Server do
@moduledoc false
@behaviour LivebookCLI.Task
@external_resource "README.md"
[_, environment_variables, _] =
"README.md"
|> File.read!()
|> String.split("<!-- Environment variables -->")
@environment_variables String.trim(environment_variables)
@impl true
def usage() do
"""
Usage: livebook server [options] [open-command]
An optional open-command can be given as argument. It will open
up a browser window according these rules:
* If the open-command is "new", the browser window will point
to a new notebook
* If the open-command is a URL, the notebook at the given URL
will be imported
The open-command runs after the server is started. If a server is
already running, the browser window will point to the server
currently running.
## Available options
--cookie Sets a cookie for the app distributed node
--data-path The directory to store Livebook configuration,
defaults to "livebook" under the default user data directory
--default-runtime Sets the runtime type that is used by default when none is started
explicitly for the given notebook, defaults to standalone
Supported options:
* standalone - Elixir standalone
* mix[:PATH] - Mix standalone
* attached:NODE:COOKIE - Attached
* embedded - Embedded
--home The home path for the Livebook instance
--ip The ip address to start the web application on, defaults to 127.0.0.1
Must be a valid IPv4 or IPv6 address
--name Set a name for the app distributed node
--no-token Disable token authentication, enabled by default
If LIVEBOOK_PASSWORD is set, it takes precedence over token auth
--open Open browser window pointing to the application
-p, --port The port to start the web application on, defaults to 8080
--sname Set a short name for the app distributed node
The --help option can be given to print this notice.
## Environment variables
#{@environment_variables}
## Examples
Starts a server:
livebook server
Starts a server and opens up a browser at a new notebook:
livebook server new
Starts a server and imports the notebook at the given URL:
livebook server https://example.com/my-notebook.livemd
"""
end
@impl true
def call(args) do
{opts, extra_args} = args_to_options(args)
config_entries = opts_to_config(opts, [])
put_config_entries(config_entries)
port = Application.get_env(:livebook, LivebookWeb.Endpoint)[:http][:port]
base_url = "http://localhost:#{port}"
case check_endpoint_availability(base_url) do
:livebook_running ->
IO.puts("Livebook already running on #{base_url}")
open_from_options(base_url, opts, extra_args)
:taken ->
print_error(
"Another application is already running on port #{port}." <>
" Either ensure this port is free or specify a different port using the --port option"
)
:available ->
# We configure the endpoint with `server: true`,
# so it's gonna start listening
case Application.ensure_all_started(:livebook) do
{:ok, _} ->
open_from_options(LivebookWeb.Endpoint.access_url(), opts, extra_args)
Process.sleep(:infinity)
{:error, error} ->
print_error("Livebook failed to start with reason: #{inspect(error)}")
end
end
end
# Takes a list of {app, key, value} config entries
# and overrides the current applications' configuration accordingly.
# Multiple values for the same key are deeply merged (provided they are keyword lists).
defp put_config_entries(config_entries) do
config_entries
|> Enum.reduce([], fn {app, key, value}, acc ->
acc = Keyword.put_new_lazy(acc, app, fn -> Application.get_all_env(app) end)
Config.Reader.merge(acc, [{app, [{key, value}]}])
end)
|> Application.put_all_env(persistent: true)
end
defp check_endpoint_availability(base_url) do
Application.ensure_all_started(:inets)
health_url = append_path(base_url, "/health")
case Livebook.Utils.HTTP.request(:get, health_url) do
{:ok, status, _headers, body} ->
with 200 <- status,
{:ok, body} <- Jason.decode(body),
%{"application" => "livebook"} <- body do
:livebook_running
else
_ -> :taken
end
{:error, _error} ->
:available
end
end
defp open_from_options(base_url, opts, []) do
if opts[:open] do
Livebook.Utils.browser_open(base_url)
end
end
defp open_from_options(base_url, _opts, ["new"]) do
base_url
|> append_path("/explore/notebooks/new")
|> Livebook.Utils.browser_open()
end
defp open_from_options(base_url, _opts, [url]) do
base_url
|> Livebook.Utils.notebook_import_url(url)
|> Livebook.Utils.browser_open()
end
defp open_from_options(_base_url, _opts, _extra_args) do
print_error(
"Too many arguments entered. Ensure only one argument is used to specify the file path and all other arguments are preceded by the relevant switch"
)
end
@switches [
data_path: :string,
cookie: :string,
default_runtime: :string,
ip: :string,
name: :string,
open: :boolean,
port: :integer,
home: :string,
sname: :string,
token: :boolean
]
@aliases [
p: :port
]
defp args_to_options(args) do
{opts, extra_args} = OptionParser.parse!(args, strict: @switches, aliases: @aliases)
validate_options!(opts)
{opts, extra_args}
end
defp validate_options!(opts) do
if Keyword.has_key?(opts, :name) and Keyword.has_key?(opts, :sname) do
raise "the provided --sname and --name options are mutually exclusive, please specify only one of them"
end
end
defp opts_to_config([], config), do: config
defp opts_to_config([{:token, false} | opts], config) do
if Livebook.Config.auth_mode() == :token do
opts_to_config(opts, [{:livebook, :authentication_mode, :disabled} | config])
else
opts_to_config(opts, config)
end
end
defp opts_to_config([{:port, port} | opts], config) do
opts_to_config(opts, [{:livebook, LivebookWeb.Endpoint, http: [port: port]} | config])
end
defp opts_to_config([{:ip, ip} | opts], config) do
ip = Livebook.Config.ip!("--ip", ip)
opts_to_config(opts, [{:livebook, LivebookWeb.Endpoint, http: [ip: ip]} | config])
end
defp opts_to_config([{:home, home} | opts], config) do
home = Livebook.Config.writable_dir!("--home", home)
opts_to_config(opts, [{:livebook, :home, home} | config])
end
defp opts_to_config([{:sname, sname} | opts], config) do
sname = String.to_atom(sname)
opts_to_config(opts, [{:livebook, :node, {:shortnames, sname}} | config])
end
defp opts_to_config([{:name, name} | opts], config) do
name = String.to_atom(name)
opts_to_config(opts, [{:livebook, :node, {:longnames, name}} | config])
end
defp opts_to_config([{:cookie, cookie} | opts], config) do
cookie = String.to_atom(cookie)
opts_to_config(opts, [{:livebook, :cookie, cookie} | config])
end
defp opts_to_config([{:default_runtime, default_runtime} | opts], config) do
default_runtime = Livebook.Config.default_runtime!("--default-runtime", default_runtime)
opts_to_config(opts, [{:livebook, :default_runtime, default_runtime} | config])
end
defp opts_to_config([{:data_path, path} | opts], config) do
data_path = Livebook.Config.writable_dir!("--data-path", path)
opts_to_config(opts, [{:livebook, :data_path, data_path} | config])
end
defp opts_to_config([_opt | opts], config), do: opts_to_config(opts, config)
defp append_path(url, path) do
url
|> URI.parse()
|> Map.update!(:path, &((&1 || "") <> path))
|> URI.to_string()
end
defp print_error(message) do
IO.ANSI.format([:red, message]) |> IO.puts()
end
end
| 32.301158 | 153 | 0.639254 |
f7f4d44ce7e67f3e398d17845527bbf312951b0b | 2,298 | ex | Elixir | lib/example_16_web/telemetry.ex | pzingg/phoenix_16_example | 7f4160de837229101e18a4e1cc97a00f4af5aaa7 | [
"MIT"
] | null | null | null | lib/example_16_web/telemetry.ex | pzingg/phoenix_16_example | 7f4160de837229101e18a4e1cc97a00f4af5aaa7 | [
"MIT"
] | null | null | null | lib/example_16_web/telemetry.ex | pzingg/phoenix_16_example | 7f4160de837229101e18a4e1cc97a00f4af5aaa7 | [
"MIT"
] | null | null | null | defmodule Example16Web.Telemetry do
use Supervisor
import Telemetry.Metrics
def start_link(arg) do
Supervisor.start_link(__MODULE__, arg, name: __MODULE__)
end
@impl true
def init(_arg) do
children = [
# Telemetry poller will execute the given period measurements
# every 10_000ms. Learn more here: https://hexdocs.pm/telemetry_metrics
{:telemetry_poller, measurements: periodic_measurements(), period: 10_000}
# Add reporters as children of your supervision tree.
# {Telemetry.Metrics.ConsoleReporter, metrics: metrics()}
]
Supervisor.init(children, strategy: :one_for_one)
end
def metrics do
[
# Phoenix Metrics
summary("phoenix.endpoint.stop.duration",
unit: {:native, :millisecond}
),
summary("phoenix.router_dispatch.stop.duration",
tags: [:route],
unit: {:native, :millisecond}
),
# Database Metrics
summary("example_16.repo.query.total_time",
unit: {:native, :millisecond},
description: "The sum of the other measurements"
),
summary("example_16.repo.query.decode_time",
unit: {:native, :millisecond},
description: "The time spent decoding the data received from the database"
),
summary("example_16.repo.query.query_time",
unit: {:native, :millisecond},
description: "The time spent executing the query"
),
summary("example_16.repo.query.queue_time",
unit: {:native, :millisecond},
description: "The time spent waiting for a database connection"
),
summary("example_16.repo.query.idle_time",
unit: {:native, :millisecond},
description:
"The time the connection spent waiting before being checked out for the query"
),
# VM Metrics
summary("vm.memory.total", unit: {:byte, :kilobyte}),
summary("vm.total_run_queue_lengths.total"),
summary("vm.total_run_queue_lengths.cpu"),
summary("vm.total_run_queue_lengths.io")
]
end
defp periodic_measurements do
[
# A module, function and arguments to be invoked periodically.
# This function must call :telemetry.execute/3 and a metric must be added above.
# {Example16Web, :count_users, []}
]
end
end
| 31.916667 | 88 | 0.656223 |
f7f4dbf921bdc9be7b57420633ac75ae2506070b | 1,586 | ex | Elixir | plugins/ucc_chat/lib/ucc_chat/robot/adapters/ucx_chat.ex | josephkabraham/ucx_ucc | 0dbd9e3eb5940336b4870cff033482ceba5f6ee7 | [
"MIT"
] | null | null | null | plugins/ucc_chat/lib/ucc_chat/robot/adapters/ucx_chat.ex | josephkabraham/ucx_ucc | 0dbd9e3eb5940336b4870cff033482ceba5f6ee7 | [
"MIT"
] | null | null | null | plugins/ucc_chat/lib/ucc_chat/robot/adapters/ucx_chat.ex | josephkabraham/ucx_ucc | 0dbd9e3eb5940336b4870cff033482ceba5f6ee7 | [
"MIT"
] | null | null | null | defmodule UccChat.Robot.Adapters.UccChat do
use Hedwig.Adapter
alias UccChat.Robot.Adapters.UccChat.{Connection}
@doc false
def init({robot, opts}) do
{:ok, conn} = Connection.start(opts)
# Kernel.send(self(), :connected)
# {:ok, %{conn: conn, opts: opts, robot: robot}}
Kernel.send(self(), :connected)
{:ok, %{conn: conn, opts: opts, robot: robot}}
end
def status(pid), do: GenServer.call(pid, :status)
@doc false
def handle_cast({:send, msg}, %{conn: conn} = state) do
Kernel.send(conn, {:reply, msg})
{:noreply, state}
end
@doc false
def handle_cast({:reply, %{user: _user, text: text} = msg}, %{conn: conn} = state) do
# Kernel.send(conn, {:reply, %{msg | text: "#{user}: #{text}"}})
Kernel.send(conn, {:reply, %{msg | text: "#{text}"}})
{:noreply, state}
end
@doc false
def handle_cast({:emote, msg}, %{conn: conn} = state) do
Kernel.send(conn, {:reply, msg})
{:noreply, state}
end
@doc false
def handle_call(:status, _from, state) do
{:reply, state, state}
end
@doc false
def handle_info({:message, %{"text" => text, "user" => user, "channel" => channel}}, %{robot: robot} = state) do
msg = %Hedwig.Message{
ref: make_ref(),
robot: robot,
text: text,
type: "chat",
room: channel,
user: %Hedwig.User{id: user.id, name: user.name}
}
Hedwig.Robot.handle_in(robot, msg)
{:noreply, state}
end
def handle_info(:connected, %{robot: robot} = state) do
:ok = Hedwig.Robot.handle_connect(robot)
{:noreply, state}
end
end
| 25.580645 | 114 | 0.603405 |
f7f4edf4d0f5502248a4062c420c82b8493a45e8 | 845 | ex | Elixir | apps/ins_web/lib/ins_web/application.ex | ODYLIGHT/ins_umbrella | 40534551b18030b4621f882f33b0416ab60ba02a | [
"MIT"
] | null | null | null | apps/ins_web/lib/ins_web/application.ex | ODYLIGHT/ins_umbrella | 40534551b18030b4621f882f33b0416ab60ba02a | [
"MIT"
] | null | null | null | apps/ins_web/lib/ins_web/application.ex | ODYLIGHT/ins_umbrella | 40534551b18030b4621f882f33b0416ab60ba02a | [
"MIT"
] | null | null | null | defmodule InsWeb.Application do
use Application
def start(_type, _args) do
import Supervisor.Spec
# Define workers and child supervisors to be supervised
children = [
# Start the endpoint when the application starts
supervisor(InsWeb.Endpoint, []),
# Start your own worker by calling: InsWeb.Worker.start_link(arg1, arg2, arg3)
# worker(InsWeb.Worker, [arg1, arg2, arg3]),
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: InsWeb.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
InsWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 30.178571 | 84 | 0.712426 |
f7f4fd2ee32e581d2462edec494985a0a16f1dcf | 4,040 | exs | Elixir | integration_test/cases/query_test.exs | carl-al/wallaby | a4c9811902370b15db17fc62d451035ee7464eea | [
"MIT"
] | null | null | null | integration_test/cases/query_test.exs | carl-al/wallaby | a4c9811902370b15db17fc62d451035ee7464eea | [
"MIT"
] | null | null | null | integration_test/cases/query_test.exs | carl-al/wallaby | a4c9811902370b15db17fc62d451035ee7464eea | [
"MIT"
] | null | null | null | defmodule Wallaby.Integration.QueryTest do
use Wallaby.Integration.SessionCase, async: true
test "the driver can execute queries", %{session: session} do
elements =
session
|> Browser.visit("/")
|> Browser.find(Query.css("#child"))
assert elements != "Failure"
end
test "disregards elements that don't match all filters", %{session: session} do
elements =
session
|> Browser.visit("/page_1.html")
|> Browser.find(Query.css(".conflicting", count: 2, text: "Visible", visible: true))
assert Enum.count(elements) == 2
end
describe "filtering queries by visibility" do
test "finds elements that are invisible", %{session: session} do
assert_raise Wallaby.QueryError, fn ->
session
|> Browser.visit("/page_1.html")
|> Browser.find(Query.css(".invisible-elements", count: 3))
end
elements =
session
|> Browser.visit("/page_1.html")
|> Browser.find(Query.css(".invisible-elements", count: 3, visible: false))
assert Enum.count(elements) == 3
end
test "doesn't error if the count is 'any' and some elements are visible", %{session: session} do
element =
session
|> Browser.visit("/page_1.html")
|> Browser.find(Query.css("#same-selectors-with-different-visibilities"))
|> Browser.find(Query.css("span", text: "Visible", count: :any))
assert Enum.count(element) == 2
end
end
test "queries can check the number of elements", %{session: session} do
assert_raise Wallaby.QueryError, fn ->
session
|> Browser.visit("/page_1.html")
|> Browser.find(Query.css(".user"))
end
elements =
session
|> Browser.visit("/page_1.html")
|> Browser.find(Query.css(".user", count: 5))
assert Enum.count(elements) == 5
end
test "queries can select one element from a list", %{session: session} do
element =
session
|> Browser.visit("/page_1.html")
|> Browser.find(Query.css(".user", count: 5, at: 1))
assert Element.text(element) == "Grace H."
end
test "queries can not select an element off the start of the list", %{session: session} do
assert_raise Wallaby.QueryError, fn ->
session
|> Browser.visit("/page_1.html")
|> Browser.find(Query.css(".user", count: 5, at: -1))
end
end
test "queries can not select an element off the end of the list", %{session: session} do
assert_raise Wallaby.QueryError, fn ->
session
|> Browser.visit("/page_1.html")
|> Browser.find(Query.css(".user", count: 5, at: 5))
end
end
test "queries can specify element text", %{session: session} do
assert_raise Wallaby.QueryError, fn ->
session
|> Browser.visit("/page_1.html")
|> Browser.find(Query.css(".user", text: "Some fake text"))
end
element =
session
|> Browser.visit("/page_1.html")
|> Browser.find(Query.css(".user", text: "Chris K."))
assert element
end
test "trying to set a text when visible is false throws an error", %{session: session} do
assert_raise Wallaby.QueryError, fn ->
session
|> Browser.find(Query.css(".some-css", text: "test", visible: false))
end
end
test "queries can be retried", %{session: session} do
element =
session
|> Browser.visit("/wait.html")
|> Browser.find(Query.css(".main"))
assert element
elements =
session
|> Browser.find(Query.css(".orange", count: 5))
assert Enum.count(elements) == 5
end
test "queries can find an element by only text", %{session: session} do
element =
session
|> Browser.visit("/page_1.html")
|> Browser.find(Query.text("Chris K."))
assert element
end
test "all returns an empty list if nothing is found", %{session: session} do
elements =
session
|> Browser.visit("/page_1.html")
|> Browser.all(Query.css(".not_there"))
assert Enum.count(elements) == 0
end
end
| 28.055556 | 100 | 0.619554 |
f7f4fdf6d29bd20fe3c8b3da5f8fa8f8c4fffd7a | 61 | ex | Elixir | web_finngen_r8/lib/risteys_web/views/home_view.ex | vincent-octo/risteys | 5bb1e70b78988770048b91b42fad025faf98d84a | [
"MIT"
] | null | null | null | web_finngen_r8/lib/risteys_web/views/home_view.ex | vincent-octo/risteys | 5bb1e70b78988770048b91b42fad025faf98d84a | [
"MIT"
] | null | null | null | web_finngen_r8/lib/risteys_web/views/home_view.ex | vincent-octo/risteys | 5bb1e70b78988770048b91b42fad025faf98d84a | [
"MIT"
] | null | null | null | defmodule RisteysWeb.HomeView do
use RisteysWeb, :view
end
| 15.25 | 32 | 0.803279 |
f7f508082a3b09cad3a6c36afeee41895b0a9265 | 1,997 | ex | Elixir | lib/serum/project_info.ex | dragsubil/Serum | a465c48b388ef1e6d69ee6e8793f2869035b0520 | [
"MIT"
] | null | null | null | lib/serum/project_info.ex | dragsubil/Serum | a465c48b388ef1e6d69ee6e8793f2869035b0520 | [
"MIT"
] | null | null | null | lib/serum/project_info.ex | dragsubil/Serum | a465c48b388ef1e6d69ee6e8793f2869035b0520 | [
"MIT"
] | null | null | null | defmodule Serum.ProjectInfo do
@moduledoc """
This module defines a struct for storing Serum project metadata.
"""
import Serum.Util
@accepted_keys [
"site_name", "site_description", "base_url", "author", "author_email",
"date_format", "preview_length", "list_title_all", "list_title_tag",
"pagination", "posts_per_page"
]
defstruct [
site_name: "", site_description: "", base_url: "", author: "",
author_email: "", date_format: "{YYYY}-{0M}-{0D}", preview_length: 200,
list_title_all: "All Posts", list_title_tag: "Posts Tagged ~s",
pagination: false, posts_per_page: 5
]
@type t :: %Serum.ProjectInfo{}
@doc "A helper function for creating a new ProjectInfo struct."
@spec new(map) :: t
def new(map) do
default = %Serum.ProjectInfo{}
map_checked =
map |> check_date_format() |> check_list_title_format()
map_new =
for {k, v} <- map_checked, k in @accepted_keys, into: %{} do
{String.to_atom(k), v}
end
Map.merge default, map_new
end
@spec check_date_format(map) :: map
defp check_date_format(map) do
case map["date_format"] do
nil -> map
fmt when is_binary(fmt) ->
case Timex.validate_format fmt do
:ok -> map
{:error, message} ->
warn "Invalid date format string `date_format`:"
warn " " <> message
warn "The default format string will be used instead."
Map.delete map, "date_format"
end
end
end
@spec check_list_title_format(map) :: map
defp check_list_title_format(map) do
try do
case map["list_title_tag"] do
nil -> map
fmt when is_binary(fmt) ->
:io_lib.format(fmt, ["test"])
map
end
rescue
_e in ArgumentError ->
warn "Invalid post list title format string `list_title_tag`."
warn "The default format string will be used instead."
Map.delete map, "list_title_tag"
end
end
end
| 27.736111 | 75 | 0.620931 |
f7f52067824da9fe60474b07c90779c2bf23c83c | 4,817 | exs | Elixir | test/trento/application/usecases/hosts_test.exs | trento-project/web | 3260b30c781bffbbb0e5205cd650966c4026b9ac | [
"Apache-2.0"
] | 1 | 2022-03-22T16:59:34.000Z | 2022-03-22T16:59:34.000Z | test/trento/application/usecases/hosts_test.exs | trento-project/web | 3260b30c781bffbbb0e5205cd650966c4026b9ac | [
"Apache-2.0"
] | 24 | 2022-03-22T16:45:25.000Z | 2022-03-31T13:00:02.000Z | test/trento/application/usecases/hosts_test.exs | trento-project/web | 3260b30c781bffbbb0e5205cd650966c4026b9ac | [
"Apache-2.0"
] | 1 | 2022-03-30T14:16:16.000Z | 2022-03-30T14:16:16.000Z | defmodule Trento.HostsTest do
use ExUnit.Case
use Trento.DataCase
import Trento.Factory
alias Trento.Hosts
alias Trento.Repo
alias Trento.SlesSubscriptionReadModel
@moduletag :integration
describe "SLES Subscriptions" do
test "No SLES4SAP Subscriptions detected" do
assert 0 = Repo.all(SlesSubscriptionReadModel) |> length
assert 0 = Hosts.get_all_sles_subscriptions()
end
test "Detects the correct number of SLES4SAP Subscriptions" do
0..5
|> Enum.map(fn _ ->
insert(:sles_subscription, identifier: "SLES_SAP")
insert(:sles_subscription, identifier: "sle-module-server-applications")
end)
assert 12 = SlesSubscriptionReadModel |> Repo.all() |> length()
assert 6 = Hosts.get_all_sles_subscriptions()
end
end
describe "Connection settings" do
test "Returns connection settings map" do
host_id = Faker.UUID.v4()
host = insert(:host, id: host_id, ssh_address: "192.168.1.1")
insert(:host_connection_settings, id: host_id, user: "root")
assert %{
host_id: host_id,
ssh_address: "192.168.1.1",
user: "root",
default_user: "root",
hostname: host.hostname
} ==
Hosts.get_connection_settings(host_id)
end
end
describe "Connection Settings Management for the Hosts of a Cluster" do
setup do
cluster_id = Faker.UUID.v4()
insert(:cluster, id: cluster_id)
%{
cluster_id: cluster_id,
hosts: [
insert(:host, hostname: "A-01", cluster_id: cluster_id),
insert(:host, hostname: "B-01", cluster_id: cluster_id)
]
}
end
test "should retrieve connection settings for a given cluster", %{
cluster_id: cluster_id,
hosts: [
%{id: a_host_id, hostname: a_hostname, cluster_id: cluster_id},
%{id: another_host_id, hostname: another_hostname, cluster_id: cluster_id}
]
} do
settings = Hosts.get_all_connection_settings_by_cluster_id(cluster_id)
assert [
%{
host_id: ^a_host_id,
hostname: ^a_hostname,
default_user: "root",
user: nil
},
%{
host_id: ^another_host_id,
hostname: ^another_hostname,
default_user: "root",
user: nil
}
] = settings
end
test "should retrieve default connection user for a specific cloud platform" do
cluster_id = Faker.UUID.v4()
insert(:cluster, id: cluster_id)
%{id: a_host_id, hostname: a_hostname} =
insert(
:host,
hostname: "A-01",
cluster_id: cluster_id,
provider: :azure,
provider_data: %{admin_username: "adminuser123"}
)
%{id: another_host_id, hostname: another_hostname} =
insert(
:host,
hostname: "B-01",
cluster_id: cluster_id,
provider: :azure,
provider_data: %{admin_username: "adminuser345"}
)
settings = Hosts.get_all_connection_settings_by_cluster_id(cluster_id)
assert [
%{
host_id: ^a_host_id,
hostname: ^a_hostname,
default_user: "adminuser123",
user: nil
},
%{
host_id: ^another_host_id,
hostname: ^another_hostname,
default_user: "adminuser345",
user: nil
}
] = settings
end
test "should apply desired connection settings for the hosts of a given cluster", %{
cluster_id: cluster_id,
hosts: [
%{id: a_host_id, hostname: a_hostname, cluster_id: cluster_id},
%{id: another_host_id, hostname: another_hostname, cluster_id: cluster_id}
]
} do
connection_user = "luke"
new_settings = [
%{
host_id: a_host_id,
user: connection_user,
default_user: "root"
},
%{
host_id: another_host_id,
user: connection_user,
default_user: "root"
}
]
:ok = Hosts.save_hosts_connection_settings(new_settings)
stored_settings = Hosts.get_all_connection_settings_by_cluster_id(cluster_id)
assert [
%{
host_id: ^a_host_id,
hostname: ^a_hostname,
user: ^connection_user
},
%{
host_id: ^another_host_id,
hostname: ^another_hostname,
user: ^connection_user
}
] = stored_settings
end
end
end
| 28.335294 | 88 | 0.556778 |
f7f52459cad6d78a4c1add65add16f3a9b0624d2 | 23,618 | ex | Elixir | lib/elixir/lib/process.ex | mattmatters/elixir | e0d1c2e4cae0277e69fec086b92d82f13d2aa033 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/process.ex | mattmatters/elixir | e0d1c2e4cae0277e69fec086b92d82f13d2aa033 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/process.ex | mattmatters/elixir | e0d1c2e4cae0277e69fec086b92d82f13d2aa033 | [
"Apache-2.0"
] | null | null | null | defmodule Process do
@moduledoc """
Conveniences for working with processes and the process dictionary.
Besides the functions available in this module, the `Kernel` module
exposes and auto-imports some basic functionality related to processes
available through the following functions:
* `Kernel.spawn/1` and `Kernel.spawn/3`
* `Kernel.spawn_link/1` and `Kernel.spawn_link/3`
* `Kernel.spawn_monitor/1` and `Kernel.spawn_monitor/3`
* `Kernel.self/0`
* `Kernel.send/2`
While this module provides low-level conveniences to work with processes,
developers typically use abstractions such as `Agent`, `GenServer`,
`Registry`, `Supervisor` and `Task` for building their systems and
resort to this module for gathering information, trapping exits, links
and monitoring.
"""
@typedoc """
A process destination.
A remote or local PID, a local port, a locally registered name, or a tuple in
the form of `{registered_name, node}` for a registered name at another node.
"""
@type dest :: pid | port | (registered_name :: atom) | {registered_name :: atom, node}
@doc """
Tells whether the given process is alive on the local node.
If the process identified by `pid` is alive (that is, it's not exiting and has
not exited yet) than this function returns `true`. Otherwise, it returns
`false`.
`pid` must refer to a process running on the local node or `ArgumentError` is raised.
Inlined by the compiler.
"""
@spec alive?(pid) :: boolean
defdelegate alive?(pid), to: :erlang, as: :is_process_alive
@doc """
Returns all key-value pairs in the process dictionary.
Inlined by the compiler.
"""
@spec get() :: [{term, term}]
defdelegate get(), to: :erlang
@doc """
Returns the value for the given `key` in the process dictionary,
or `default` if `key` is not set.
## Examples
# Assuming :locale was not set
iex> Process.get(:locale, "pt")
"pt"
iex> Process.put(:locale, "fr")
nil
iex> Process.get(:locale, "pt")
"fr"
"""
@spec get(term, default :: term) :: term
def get(key, default \\ nil) do
case :erlang.get(key) do
:undefined -> default
value -> value
end
end
@doc """
Returns all keys in the process dictionary.
Inlined by the compiler.
## Examples
# Assuming :locale was not set
iex> :locale in Process.get_keys()
false
iex> Process.put(:locale, "pt")
nil
iex> :locale in Process.get_keys()
true
"""
@spec get_keys() :: [term]
defdelegate get_keys(), to: :erlang
@doc """
Returns all keys in the process dictionary that have the given `value`.
Inlined by the compiler.
"""
@spec get_keys(term) :: [term]
defdelegate get_keys(value), to: :erlang
@doc """
Stores the given `key`-`value` pair in the process dictionary.
The return value of this function is the value that was previously stored
under `key`, or `nil` in case no value was stored under `key`.
## Examples
# Assuming :locale was not set
iex> Process.put(:locale, "en")
nil
iex> Process.put(:locale, "fr")
"en"
"""
@spec put(term, term) :: term | nil
def put(key, value) do
nillify(:erlang.put(key, value))
end
@doc """
Deletes the given `key` from the process dictionary.
Returns the value that was under `key` in the process dictionary,
or `nil` if `key` was not stored in the process dictionary.
## Examples
iex> Process.put(:comments, ["comment", "other comment"])
iex> Process.delete(:comments)
["comment", "other comment"]
iex> Process.delete(:comments)
nil
"""
@spec delete(term) :: term | nil
def delete(key) do
nillify(:erlang.erase(key))
end
@doc """
Sends an exit signal with the given `reason` to `pid`.
The following behaviour applies if `reason` is any term except `:normal`
or `:kill`:
1. If `pid` is not trapping exits, `pid` will exit with the given
`reason`.
2. If `pid` is trapping exits, the exit signal is transformed into a
message `{:EXIT, from, reason}` and delivered to the message queue
of `pid`.
If `reason` is the atom `:normal`, `pid` will not exit (unless `pid` is
the calling process, in which case it will exit with the reason `:normal`).
If it is trapping exits, the exit signal is transformed into a message
`{:EXIT, from, :normal}` and delivered to its message queue.
If `reason` is the atom `:kill`, that is if `Process.exit(pid, :kill)` is called,
an untrappable exit signal is sent to `pid` which will unconditionally exit
with reason `:killed`.
Inlined by the compiler.
## Examples
Process.exit(pid, :kill)
#=> true
"""
@spec exit(pid, term) :: true
defdelegate exit(pid, reason), to: :erlang
@doc """
Sleeps the current process for the given `timeout`.
`timeout` is either the number of milliseconds to sleep as an
integer or the atom `:infinity`. When `:infinity` is given,
the current process will sleep forever, and not
consume or reply to messages.
**Use this function with extreme care**. For almost all situations
where you would use `sleep/1` in Elixir, there is likely a
more correct, faster and precise way of achieving the same with
message passing.
For example, if you are waiting for a process to perform some
action, it is better to communicate the progress of such action
with messages.
In other words, **do not**:
Task.start_link(fn ->
do_something()
...
end)
# Wait until work is done
Process.sleep(2000)
But **do**:
parent = self()
Task.start_link(fn ->
do_something()
send(parent, :work_is_done)
...
end)
receive do
:work_is_done -> :ok
after
# Optional timeout
30_000 -> :timeout
end
For cases like the one above, `Task.async/1` and `Task.await/2` are
preferred.
Similarly, if you are waiting for a process to terminate,
monitor that process instead of sleeping. **Do not**:
Task.start_link(fn ->
...
end)
# Wait until task terminates
Process.sleep(2000)
Instead **do**:
{:ok, pid} =
Task.start_link(fn ->
...
end)
ref = Process.monitor(pid)
receive do
{:DOWN, ^ref, _, _, _} -> :task_is_down
after
# Optional timeout
30_000 -> :timeout
end
"""
@spec sleep(timeout) :: :ok
def sleep(timeout)
when is_integer(timeout) and timeout >= 0
when timeout == :infinity do
receive after: (timeout -> :ok)
end
@doc """
Sends a message to the given `dest`.
`dest` may be a remote or local PID, a local port, a locally
registered name, or a tuple in the form of `{registered_name, node}` for a
registered name at another node.
Inlined by the compiler.
## Options
* `:noconnect` - when used, if sending the message would require an
auto-connection to another node the message is not sent and `:noconnect` is
returned.
* `:nosuspend` - when used, if sending the message would cause the sender to
be suspended the message is not sent and `:nosuspend` is returned.
Otherwise the message is sent and `:ok` is returned.
## Examples
iex> Process.send({:name, :node_that_does_not_exist}, :hi, [:noconnect])
:noconnect
"""
@spec send(dest, msg, [option]) :: :ok | :noconnect | :nosuspend
when dest: dest(),
msg: any,
option: :noconnect | :nosuspend
defdelegate send(dest, msg, options), to: :erlang
@doc """
Sends `msg` to `dest` after `time` milliseconds.
If `dest` is a PID, it must be the PID of a local process, dead or alive.
If `dest` is an atom, it must be the name of a registered process
which is looked up at the time of delivery. No error is produced if the name does
not refer to a process.
The message is not sent immediately. Therefore, `dest` can receive other messages
in-between even when `time` is `0`.
This function returns a timer reference, which can be read with `read_timer/1`
or canceled with `cancel_timer/1`.
The timer will be automatically canceled if the given `dest` is a PID
which is not alive or when the given PID exits. Note that timers will not be
automatically canceled when `dest` is an atom (as the atom resolution is done
on delivery).
Inlined by the compiler.
## Options
* `:abs` - (boolean) when `false`, `time` is treated as relative to the
current monotonic time. When `true`, `time` is the absolute value of the
Erlang monotonic time at which `msg` should be delivered to `dest`.
To read more about Erlang monotonic time and other time-related concepts,
look at the documentation for the `System` module. Defaults to `false`.
## Examples
timer_ref = Process.send_after(pid, :hi, 1000)
"""
@spec send_after(pid | atom, term, non_neg_integer, [option]) :: reference
when option: {:abs, boolean}
def send_after(dest, msg, time, opts \\ []) do
:erlang.send_after(time, dest, msg, opts)
end
@doc """
Cancels a timer returned by `send_after/3`.
When the result is an integer, it represents the time in milliseconds
left until the timer would have expired.
When the result is `false`, a timer corresponding to `timer_ref` could not be
found. This can happen either because the timer expired, because it has
already been canceled, or because `timer_ref` never corresponded to a timer.
Even if the timer had expired and the message was sent, this function does not
tell you if the timeout message has arrived at its destination yet.
Inlined by the compiler.
## Options
* `:async` - (boolean) when `false`, the request for cancellation is
synchronous. When `true`, the request for cancellation is asynchronous,
meaning that the request to cancel the timer is issued and `:ok` is
returned right away. Defaults to `false`.
* `:info` - (boolean) whether to return information about the timer being
cancelled. When the `:async` option is `false` and `:info` is `true`, then
either an integer or `false` (like described above) is returned. If
`:async` is `false` and `:info` is `false`, `:ok` is returned. If `:async`
is `true` and `:info` is `true`, a message in the form `{:cancel_timer,
timer_ref, result}` (where `result` is an integer or `false` like
described above) is sent to the caller of this function when the
cancellation has been performed. If `:async` is `true` and `:info` is
`false`, no message is sent. Defaults to `true`.
"""
@spec cancel_timer(reference, options) :: non_neg_integer | false | :ok
when options: [async: boolean, info: boolean]
defdelegate cancel_timer(timer_ref, options \\ []), to: :erlang
@doc """
Reads a timer created by `send_after/3`.
When the result is an integer, it represents the time in milliseconds
left until the timer will expire.
When the result is `false`, a timer corresponding to `timer_ref` could not be
found. This can be either because the timer expired, because it has already
been canceled, or because `timer_ref` never corresponded to a timer.
Even if the timer had expired and the message was sent, this function does not
tell you if the timeout message has arrived at its destination yet.
Inlined by the compiler.
"""
@spec read_timer(reference) :: non_neg_integer | false
defdelegate read_timer(timer_ref), to: :erlang
@type spawn_opt ::
:link
| :monitor
| {:priority, :low | :normal | :high}
| {:fullsweep_after, non_neg_integer}
| {:min_heap_size, non_neg_integer}
| {:min_bin_vheap_size, non_neg_integer}
@type spawn_opts :: [spawn_opt]
@doc """
Spawns the given function according to the given options.
The result depends on the given options. In particular,
if `:monitor` is given as an option, it will return a tuple
containing the PID and the monitoring reference, otherwise
just the spawned process PID.
More options are available; for the comprehensive list of available options
check `:erlang.spawn_opt/4`.
Inlined by the compiler.
## Examples
Process.spawn(fn -> 1 + 2 end, [:monitor])
#=> {#PID<0.93.0>, #Reference<0.18808174.1939079169.202418>}
Process.spawn(fn -> 1 + 2 end, [:link])
#=> #PID<0.95.0>
"""
@spec spawn((() -> any), spawn_opts) :: pid | {pid, reference}
defdelegate spawn(fun, opts), to: :erlang, as: :spawn_opt
@doc """
Spawns the given function `fun` from module `mod`, passing the given `args`
according to the given options.
The result depends on the given options. In particular,
if `:monitor` is given as an option, it will return a tuple
containing the PID and the monitoring reference, otherwise
just the spawned process PID.
It also accepts extra options, for the list of available options
check `:erlang.spawn_opt/4`.
Inlined by the compiler.
"""
@spec spawn(module, atom, list, spawn_opts) :: pid | {pid, reference}
defdelegate spawn(mod, fun, args, opts), to: :erlang, as: :spawn_opt
@doc """
Starts monitoring the given `item` from the calling process.
Once the monitored process dies, a message is delivered to the
monitoring process in the shape of:
{:DOWN, ref, :process, object, reason}
where:
* `ref` is a monitor reference returned by this function;
* `object` is either a `pid` of the monitored process (if monitoring
a PID) or `{name, node}` (if monitoring a remote or local name);
* `reason` is the exit reason.
If the process is already dead when calling `Process.monitor/1`, a
`:DOWN` message is delivered immediately.
See [the need for monitoring](http://elixir-lang.org/getting-started/mix-otp/genserver.html#the-need-for-monitoring)
for an example. See `:erlang.monitor/2` for more info.
Inlined by the compiler.
## Examples
pid = spawn(fn -> 1 + 2 end)
#=> #PID<0.118.0>
Process.monitor(pid)
#=> #Reference<0.906660723.3006791681.40191>
Process.exit(pid, :kill)
#=> true
receive do
msg -> msg
end
#=> {:DOWN, #Reference<0.906660723.3006791681.40191>, :process, #PID<0.118.0>, :noproc}
"""
@spec monitor(pid | {name, node} | name) :: reference when name: atom
def monitor(item) do
:erlang.monitor(:process, item)
end
@doc """
Demonitors the monitor identified by the given `reference`.
If `monitor_ref` is a reference which the calling process
obtained by calling `monitor/1`, that monitoring is turned off.
If the monitoring is already turned off, nothing happens.
See `:erlang.demonitor/2` for more info.
Inlined by the compiler.
## Examples
pid = spawn(fn -> 1 + 2 end)
ref = Process.monitor(pid)
Process.demonitor(ref)
#=> true
"""
@spec demonitor(reference, options :: [:flush | :info]) :: boolean
defdelegate demonitor(monitor_ref, options \\ []), to: :erlang
@doc """
Returns a list of PIDs corresponding to all the
processes currently existing on the local node.
Note that if a process is exiting, it is considered to exist but not be
alive. This means that for such process, `alive?/1` will return `false` but
its PID will be part of the list of PIDs returned by this function.
See `:erlang.processes/0` for more info.
Inlined by the compiler.
## Examples
Process.list()
#=> [#PID<0.0.0>, #PID<0.1.0>, #PID<0.2.0>, #PID<0.3.0>, ...]
"""
@spec list() :: [pid]
defdelegate list(), to: :erlang, as: :processes
@doc """
Creates a link between the calling process and the given item (process or
port).
Links are bidirectional. Linked processes can be unlinked by using `unlink/1`.
If such a link exists already, this function does nothing since there can only
be one link between two given processes. If a process tries to create a link
to itself, nothing will happen.
When two processes are linked, each one receives exit signals from the other
(see also `exit/2`). Let's assume `pid1` and `pid2` are linked. If `pid2`
exits with a reason other than `:normal` (which is also the exit reason used
when a process finishes its job) and `pid1` is not trapping exits (see
`flag/2`), then `pid1` will exit with the same reason as `pid2` and in turn
emit an exit signal to all its other linked processes. The behaviour when
`pid1` is trapping exits is described in `exit/2`.
See `:erlang.link/1` for more info.
Inlined by the compiler.
"""
@spec link(pid | port) :: true
defdelegate link(pid_or_port), to: :erlang
@doc """
Removes the link between the calling process and the given item (process or
port).
If there is no such link, this function does nothing. If `pid_or_port` does
not exist, this function does not produce any errors and simply does nothing.
The return value of this function is always `true`.
See `:erlang.unlink/1` for more info.
Inlined by the compiler.
"""
@spec unlink(pid | port) :: true
defdelegate unlink(pid_or_port), to: :erlang
@doc """
Registers the given `pid_or_port` under the given `name`.
`name` must be an atom and can then be used instead of the
PID/port identifier when sending messages with `Kernel.send/2`.
`register/2` will fail with `ArgumentError` in any of the following cases:
* the PID/Port is not existing locally and alive
* the name is already registered
* the `pid_or_port` is already registered under a different `name`
The following names are reserved and cannot be assigned to
processes nor ports:
* `nil`
* `false`
* `true`
* `:undefined`
## Examples
Process.register(self(), :test)
#=> true
send(:test, :hello)
#=> :hello
send(:wrong_name, :hello)
#=> ** (ArgumentError) argument error
"""
@spec register(pid | port, atom) :: true
def register(pid_or_port, name)
when is_atom(name) and name not in [nil, false, true, :undefined] do
:erlang.register(name, pid_or_port)
catch
:error, :badarg when node(pid_or_port) != node() ->
message = "could not register #{inspect(pid_or_port)} because it belongs to another node"
:erlang.error(ArgumentError.exception(message), [pid_or_port, name])
:error, :badarg ->
message =
"could not register #{inspect(pid_or_port)} with " <>
"name #{inspect(name)} because it is not alive, the name is already " <>
"taken, or it has already been given another name"
:erlang.error(ArgumentError.exception(message), [pid_or_port, name])
end
@doc """
Removes the registered `name`, associated with a PID
or a port identifier.
Fails with `ArgumentError` if the name is not registered
to any PID or port.
Inlined by the compiler.
## Examples
Process.register(self(), :test)
#=> true
Process.unregister(:test)
#=> true
Process.unregister(:wrong_name)
#=> ** (ArgumentError) argument error
"""
@spec unregister(atom) :: true
defdelegate unregister(name), to: :erlang
@doc """
Returns the PID or port identifier registered under `name` or `nil` if the
name is not registered.
See `:erlang.whereis/1` for more info.
## Examples
Process.register(self(), :test)
Process.whereis(:test)
#=> #PID<0.84.0>
Process.whereis(:wrong_name)
#=> nil
"""
@spec whereis(atom) :: pid | port | nil
def whereis(name) do
nillify(:erlang.whereis(name))
end
@doc """
Returns the PID of the group leader for the calling process.
Inlined by the compiler.
## Examples
Process.group_leader()
#=> #PID<0.53.0>
"""
@spec group_leader() :: pid
defdelegate group_leader(), to: :erlang
@doc """
Sets the group leader of the given `pid` to `leader`.
Typically, this is used when a process started from a certain shell should
have a group leader other than `:init`.
Inlined by the compiler.
"""
@spec group_leader(pid, leader :: pid) :: true
def group_leader(pid, leader) do
:erlang.group_leader(leader, pid)
end
@doc """
Returns a list of names which have been registered using `register/2`.
Inlined by the compiler.
## Examples
Process.register(self(), :test)
Process.registered()
#=> [:test, :elixir_config, :inet_db, ...]
"""
@spec registered() :: [atom]
defdelegate registered(), to: :erlang
@typep heap_size ::
non_neg_integer
| %{size: non_neg_integer, kill: boolean, error_logger: boolean}
@typep priority_level :: :low | :normal | :high | :max
@doc """
Sets the given `flag` to `value` for the calling process.
Returns the old value of `flag`.
See `:erlang.process_flag/2` for more info.
Inlined by the compiler.
"""
@spec flag(:error_handler, module) :: module
@spec flag(:max_heap_size, heap_size) :: heap_size
@spec flag(:message_queue_data, :erlang.message_queue_data()) :: :erlang.message_queue_data()
@spec flag(:min_bin_vheap_size, non_neg_integer) :: non_neg_integer
@spec flag(:min_heap_size, non_neg_integer) :: non_neg_integer
@spec flag(:monitor_nodes, term) :: term
@spec flag({:monitor_nodes, term()}, term) :: term
@spec flag(:priority, priority_level) :: priority_level
@spec flag(:save_calls, 0..10000) :: 0..10000
@spec flag(:sensitive, boolean) :: boolean
@spec flag(:trap_exit, boolean) :: boolean
defdelegate flag(flag, value), to: :erlang, as: :process_flag
@doc """
Sets the given `flag` to `value` for the given process `pid`.
Returns the old value of `flag`.
It raises `ArgumentError` if `pid` is not a local process.
The allowed values for `flag` are only a subset of those allowed in `flag/2`,
namely `:save_calls`.
See `:erlang.process_flag/3` for more info.
Inlined by the compiler.
"""
@spec flag(pid, :save_calls, 0..10000) :: 0..10000
defdelegate flag(pid, flag, value), to: :erlang, as: :process_flag
@doc """
Returns information about the process identified by `pid`, or returns `nil` if the process
is not alive.
Use this only for debugging information.
See `:erlang.process_info/1` for more info.
"""
@spec info(pid) :: keyword | nil
def info(pid) do
nillify(:erlang.process_info(pid))
end
@doc """
Returns information about the process identified by `pid`,
or returns `nil` if the process is not alive.
See `:erlang.process_info/2` for more info.
"""
@spec info(pid, atom | [atom]) :: {atom, term} | [{atom, term}] | nil
def info(pid, spec)
def info(pid, :registered_name) do
case :erlang.process_info(pid, :registered_name) do
:undefined -> nil
[] -> {:registered_name, []}
other -> other
end
end
def info(pid, spec) when is_atom(spec) or is_list(spec) do
nillify(:erlang.process_info(pid, spec))
end
@doc """
Puts the calling process into a "hibernation" state.
The calling process is put into a waiting state
where its memory allocation has been reduced as much as possible,
which is useful if the process does not expect to receive any messages
in the near future.
See `:erlang.hibernate/3` for more info.
Inlined by the compiler.
"""
@spec hibernate(module, atom, list) :: no_return
defdelegate hibernate(mod, fun_name, args), to: :erlang
@compile {:inline, nillify: 1}
defp nillify(:undefined), do: nil
defp nillify(other), do: other
end
| 29.934094 | 118 | 0.666737 |
f7f5260a246168ebd643024c7c0e19e443814022 | 380 | exs | Elixir | test/covid19_questionnaire_web/controllers/token_controller/create_test.exs | betagouv/covid19-algorithme-orientation-elixir | 7d99c0b79551438bd763ae4293b495096bc8d9ad | [
"MIT"
] | 3 | 2020-04-08T19:15:22.000Z | 2020-05-24T22:37:54.000Z | test/covid19_questionnaire_web/controllers/token_controller/create_test.exs | betagouv/covid19-algorithme-orientation-elixir | 7d99c0b79551438bd763ae4293b495096bc8d9ad | [
"MIT"
] | 10 | 2020-04-05T17:31:49.000Z | 2020-06-10T11:09:17.000Z | test/covid19_questionnaire_web/controllers/token_controller/create_test.exs | betagouv/covid19-algorithme-orientation-elixir | 7d99c0b79551438bd763ae4293b495096bc8d9ad | [
"MIT"
] | null | null | null | defmodule Covid19QuestionnaireWeb.TokenController.CreateTest do
use Covid19QuestionnaireWeb.ConnCase, async: true
test "création d'un token", %{conn: conn, spec: spec} do
body =
conn
|> post("/token")
|> response(201)
|> Jason.decode!()
assert_schema(body, "TokenResponse", spec)
assert body["data"]["uuid"] |> Ecto.UUID.cast()
end
end
| 25.333333 | 63 | 0.655263 |
f7f5285a111b688c34fa9fe8672d3e224605a409 | 660 | ex | Elixir | web/controllers/session_controller.ex | ajrob27/elixir-practice-rumbl | 55573bea782a0d4f56fbaf2995bee985fa0fe3be | [
"MIT"
] | 1 | 2016-09-19T01:31:35.000Z | 2016-09-19T01:31:35.000Z | web/controllers/session_controller.ex | ajrob27/elixir-practice-rumbl | 55573bea782a0d4f56fbaf2995bee985fa0fe3be | [
"MIT"
] | null | null | null | web/controllers/session_controller.ex | ajrob27/elixir-practice-rumbl | 55573bea782a0d4f56fbaf2995bee985fa0fe3be | [
"MIT"
] | null | null | null | defmodule Rumbl.SessionController do
use Rumbl.Web, :controller
def new(conn, _) do
render conn, "new.html"
end
def create(conn, %{"session" => %{"username" => user, "password" =>
pass}}) do
case Rumbl.Auth.login_by_username_and_pass(conn, user, pass, repo:
Repo) do
{:ok, conn} ->
conn
|> put_flash(:info, "Welcome back!")
|> redirect(to: page_path(conn, :index))
{:error, _reason, conn} ->
conn
|> put_flash(:error, "Invalid username/password combination")
|> render("new.html")
end
end
end
| 28.695652 | 70 | 0.524242 |
f7f564183e19a83a0478f97758e927a6d7001219 | 534 | ex | Elixir | exercises/concept/boutique-inventory/lib/boutique_inventory.ex | kwchang0831/elixir | 1b21ae1ca610de97db79e76db890503ba75ce466 | [
"MIT"
] | null | null | null | exercises/concept/boutique-inventory/lib/boutique_inventory.ex | kwchang0831/elixir | 1b21ae1ca610de97db79e76db890503ba75ce466 | [
"MIT"
] | null | null | null | exercises/concept/boutique-inventory/lib/boutique_inventory.ex | kwchang0831/elixir | 1b21ae1ca610de97db79e76db890503ba75ce466 | [
"MIT"
] | null | null | null | defmodule BoutiqueInventory do
def sort_by_price(inventory) do
# Please implement the sort_by_price/1 function
end
def with_missing_price(inventory) do
# Please implement the with_missing_price/1 function
end
def update_names(inventory, old_word, new_word) do
# Please implement the update_names/3 function
end
def increase_quantity(item, count) do
# Please implement the increase_quantity/2 function
end
def total_quantity(item) do
# Please implement the total_quantity/1 function
end
end
| 24.272727 | 56 | 0.76779 |
f7f5770ec2145721b1478ab1c39044630c5de2fb | 121 | ex | Elixir | lib/karibuex/msg/response.ex | yanovitchsky/karibu_ex | 85349c6dcda74c90db7fda8b9164680c6103eb49 | [
"MIT"
] | null | null | null | lib/karibuex/msg/response.ex | yanovitchsky/karibu_ex | 85349c6dcda74c90db7fda8b9164680c6103eb49 | [
"MIT"
] | null | null | null | lib/karibuex/msg/response.ex | yanovitchsky/karibu_ex | 85349c6dcda74c90db7fda8b9164680c6103eb49 | [
"MIT"
] | null | null | null | defmodule Karibuex.Msg.Response do
def encode(id, error, result) do
Msgpax.pack!([1, id, error, result])
end
end
| 20.166667 | 40 | 0.694215 |
f7f57af64fa1c3a0d4884f169afd0023b4fea985 | 7,741 | ex | Elixir | lib/ucx_ucc/ucc_model.ex | josephkabraham/ucx_ucc | 0dbd9e3eb5940336b4870cff033482ceba5f6ee7 | [
"MIT"
] | null | null | null | lib/ucx_ucc/ucc_model.ex | josephkabraham/ucx_ucc | 0dbd9e3eb5940336b4870cff033482ceba5f6ee7 | [
"MIT"
] | null | null | null | lib/ucx_ucc/ucc_model.ex | josephkabraham/ucx_ucc | 0dbd9e3eb5940336b4870cff033482ceba5f6ee7 | [
"MIT"
] | null | null | null | defmodule UccModel do
@moduledoc """
Model abstraction for UcxUcc.
"""
defmacro __using__(opts) do
quote do
opts = unquote(opts)
@repo opts[:repo] || UcxUcc.Repo
@schema opts[:schema] || raise(":schema option required")
@type id :: integer | String.t
import Ecto.Query, warn: false
@doc """
Create a default #{@schema} struct.
"""
@spec new() :: Struct.t
def new, do: %@schema{}
@doc """
Create a #{@schema} with the provided options.
"""
@spec new(Keyword.t) :: Struct.t
def new(opts), do: struct(new(), opts)
@doc """
Return the schema module.
"""
@spec schema() :: Module.t
def schema, do: @schema
@doc """
Returns an `%Ecto.Changeset{}` for tracking #{@schema} changes.
"""
@spec change(Struct.t, Keyword.t) :: Ecto.Changeset.t
def change(%@schema{} = schema, attrs) do
@schema.changeset(schema, attrs)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking #{@schema} changes.
"""
@spec change(Struct.t) :: Ecto.Changeset.t
def change(%@schema{} = schema) do
@schema.changeset(schema)
end
@spec change(Keyword.t) :: Ecto.Changeset.t
def change(attrs) when is_map(attrs) or is_list(attrs) do
@schema.changeset(%@schema{}, attrs)
end
@doc """
Get a list of #{@schema}'s.
## Options'
* `preload: list`
"""
@spec list(Keword.t) :: [Struct.t]
def list(opts \\ []) do
if preload = opts[:preload] do
@schema
|> preload(^preload)
|> order_by(asc: :inserted_at)
|> @repo.all
else
@repo.all @schema
end
end
@doc """
Get a list of #{@schema},s given a list of field value pairs.
## Preload
Pass a list of preloads with the `:preload` key.
## Examples
#{@schema}.list_by field1: value1, field2: field2, preload: [:association]
"""
@spec list_by(Keyword.t) :: List.t
def list_by(opts) do
{preload, opts} = Keyword.pop(opts, :preload, [])
opts
|> Enum.reduce(@schema, fn {k, v}, query ->
where(query, [b], field(b, ^k) == ^v)
end)
|> preload(^preload)
|> order_by(asc: :inserted_at)
|> @repo.all
end
@doc """
Get a single #{@schema}.
## Preload
Pass a list of preloads with the `:preload` key.
"""
@spec get(id, Keyword.t) :: Struct.t
def get(id, opts \\ []) do
if preload = opts[:preload] do
@repo.one from s in @schema, where: s.id == ^id, preload: ^preload
else
@repo.get @schema, id, opts
end
end
@spec get!(id, Keyword.t) :: Struct.t
def get!(id, opts \\ []) do
if preload = opts[:preload] do
@repo.one! from s in @schema, where: s.id == ^id, preload: ^preload
else
@repo.get! @schema, id, opts
end
end
@spec get_by(Keyword.t) :: Struct.t
def get_by(opts) do
if preload = opts[:preload] do
# TODO: Fix this with a single query
@schema
|> @repo.get_by(Keyword.delete(opts, :preload))
|> @repo.preload(preload)
else
@repo.get_by @schema, opts
end
end
@spec get_by!(Keyword.t) :: Struct.t
def get_by!(opts) do
if preload = opts[:preload] do
@schema
|> @repo.get_by(Keyword.delete(opts, :preload))
|> @repo.preload(preload)
else
@repo.get_by! @schema, opts
end
end
@spec create(Ecto.Changeset.t | Keyword.t | Map.t) :: {:ok, Struct.t} |
{:error, Ecto.Changeset.t}
def create(changeset_or_attrs \\ %{})
def create(%Ecto.Changeset{} = changeset) do
@repo.insert changeset
end
def create(attrs) do
create change(attrs)
end
def create!(changeset_or_attrs \\ %{})
@spec create!(Ecto.Changeset.t) :: Struct.t | no_return
def create!(%Ecto.Changeset{} = changeset) do
@repo.insert! changeset
end
@spec create!(Keyword.t) :: Struct.t | no_return
def create!(attrs) do
create! change(attrs)
end
@spec update(Ecto.Changeset.t) :: {:ok, Struct.t} |
{:error, Ecto.Changeset.t}
def update(%Ecto.Changeset{} = changeset) do
@repo.update changeset
end
@spec update(Struct.t, Keyword.t) :: {:ok, Struct.t} |
{:error, Ecto.Changeset.t}
def update(%@schema{} = schema, attrs) do
schema
|> change(attrs)
|> update
end
@spec update!(Ecto.Changeset.t) :: Struct.t | no_return
def update!(%Ecto.Changeset{} = changeset) do
@repo.update! changeset
end
@spec update!(Struct.t, Keyword.t) :: Struct.t | no_return
def update!(%@schema{} = schema, attrs) do
schema
|> change(attrs)
|> update!
end
@spec delete(Struct.t) :: {:ok, Struct.t} |
{:error, Ecto.Changeset.t}
def delete(%@schema{} = schema) do
delete change(schema)
end
@doc """
Delete the #{@schema} given by an `Ecto.Changeset`.
"""
@spec delete(Ecto.Changeset.t) :: {:ok, Struct.t} |
{:error, Ecto.Changeset.t}
def delete(%Ecto.Changeset{} = changeset) do
@repo.delete changeset
end
@doc """
Delete the #{@schema} given by an id.
"""
@spec delete(id) :: {:ok, Struct.t} |
{:error, Ecto.Changeset.t}
def delete(id) do
delete get(id)
end
@doc """
Delete the #{@schema} given a the struct, or raise an exception.
"""
@spec delete!(Struct.t) :: Struct.t | no_return
def delete!(%@schema{} = schema) do
delete! change(schema)
end
@doc """
Delete the #{@schema} given a changeset, or raise an exception.
"""
@spec delete!(Ecto.Changeset.t) :: {:ok, Struct.t} |
{:error, Ecto.Changeset.t}
def delete!(%Ecto.Changeset{} = changeset) do
@repo.delete! changeset
end
@doc """
Delete the given #{@schema} by id, or raise an exception.
"""
@spec delete!(id) :: Struct.t | no_return
def delete!(id) do
delete! get(id)
end
@doc """
Delete all #{@schema}'s.
"""
# @spec delete_all() :: any
def delete_all do
@repo.delete_all @schema
end
@doc """
Get the first #{@schema} ordered by creation date
"""
@spec first() :: Struct.t | nil
def first do
@schema
|> order_by(asc: :inserted_at)
|> first
|> @repo.one
end
@doc """
Get the last #{@schema} ordered by creation date
"""
@spec last() :: Struct.t | nil
def last do
@schema
|> order_by(asc: :inserted_at)
|> last
|> @repo.one
end
@doc """
Preload a #{@schema}.
"""
def preload_schema(schema, preload) do
@repo.preload schema, preload
end
defoverridable [
delete: 1, delete!: 1, update: 1, update: 2, update!: 1,
update!: 2, create: 1, create!: 1, get_by: 1, get_by!: 1,
get: 2, get!: 2, list: 0, change: 2, change: 1, delete_all: 0,
preload_schema: 2
]
end
end
end
| 26.419795 | 86 | 0.509107 |
f7f58494ea2ed12b17d1c870e8027a00e90f919d | 1,488 | exs | Elixir | test/wobserver2/util/helper_test.exs | aruki-delivery/wobserver | 7db6b219b405defc1e28bd86836f9a90eed235b6 | [
"MIT"
] | null | null | null | test/wobserver2/util/helper_test.exs | aruki-delivery/wobserver | 7db6b219b405defc1e28bd86836f9a90eed235b6 | [
"MIT"
] | null | null | null | test/wobserver2/util/helper_test.exs | aruki-delivery/wobserver | 7db6b219b405defc1e28bd86836f9a90eed235b6 | [
"MIT"
] | null | null | null | defmodule Wobserver2.Util.HelperTest do
use ExUnit.Case
alias Wobserver2.Util.Helper
alias Wobserver2.Util.Process
describe "string_to_module" do
test "returns module name without dots" do
assert Helper.string_to_module("Logger") == Logger
end
test "returns module name with dots" do
assert Helper.string_to_module("Logger.Supervisor") == Logger.Supervisor
end
test "returns atom" do
assert Helper.string_to_module("atom") == :atom
end
test "returns atom with spaces" do
assert Helper.string_to_module("has spaces") == :"has spaces"
end
end
describe "JSON implementations" do
test "PID" do
pid = Process.pid(33)
encoder = Jason.encode!(pid)
assert encoder.encode(pid, []) == [34, ["#PID<0.33.0>"], 34]
end
test "Port" do
port = :erlang.ports() |> List.first()
encoder = Jason.encode!(port)
assert encoder.encode(port, []) == [34, ["#Port<0.0>"], 34]
end
end
describe "format_function" do
test "nil" do
assert Helper.format_function(nil) == nil
end
test "with function typle" do
assert Helper.format_function({Logger, :log, 2}) == "Elixir.Logger.log/2"
end
test "returns function atom" do
assert Helper.format_function(:format_function) == "format_function"
end
end
describe "parallel_map" do
test "maps" do
assert Helper.parallel_map([1, 2, 3], fn x -> x * 2 end) == [2, 4, 6]
end
end
end
| 24.393443 | 79 | 0.639785 |
f7f5be77fb2c9284a7629ee5b8fe6e9af086f76f | 368 | exs | Elixir | exercises/sum-of-multiples/example.exs | darktef/elixir-exercism | bcaae351486b1405f0a01cd33b4d39555546298e | [
"MIT"
] | 1 | 2021-08-16T20:24:14.000Z | 2021-08-16T20:24:14.000Z | exercises/sum-of-multiples/example.exs | Triangle-Elixir/xelixir | 08d23bf47f57799f286567cb26f635291de2fde5 | [
"MIT"
] | null | null | null | exercises/sum-of-multiples/example.exs | Triangle-Elixir/xelixir | 08d23bf47f57799f286567cb26f635291de2fde5 | [
"MIT"
] | null | null | null | defmodule SumOfMultiples do
@doc """
Adds up all numbers from 1 to a given end number that are multiples of the factors provided.
"""
@spec to(non_neg_integer, [non_neg_integer]) :: non_neg_integer
def to(limit, factors) do
Enum.reduce(1..limit-1, 0, fn(n, acc) ->
if Enum.any?(factors, &(rem(n, &1) == 0)), do: acc + n, else: acc end)
end
end
| 28.307692 | 94 | 0.652174 |
f7f5d169aad9e8e844bdc37c92507e4d0ea29545 | 583 | exs | Elixir | apps/bookmarker/test/views/error_view_test.exs | allen-garvey/phoenix-umbrella | 1d444bbd62a5e7b5f51d317ce2be71ee994125d5 | [
"MIT"
] | 4 | 2019-10-04T16:11:15.000Z | 2021-08-18T21:00:13.000Z | apps/bookmarker/test/views/error_view_test.exs | allen-garvey/phoenix-umbrella | 1d444bbd62a5e7b5f51d317ce2be71ee994125d5 | [
"MIT"
] | 5 | 2020-03-16T23:52:25.000Z | 2021-09-03T16:52:17.000Z | apps/bookmarker/test/views/error_view_test.exs | allen-garvey/phoenix-umbrella | 1d444bbd62a5e7b5f51d317ce2be71ee994125d5 | [
"MIT"
] | null | null | null | defmodule Bookmarker.ErrorViewTest do
use Bookmarker.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(Bookmarker.ErrorView, "404.html", []) ==
"Page not found"
end
test "render 500.html" do
assert render_to_string(Bookmarker.ErrorView, "500.html", []) ==
"Internal server error"
end
test "render any other" do
assert render_to_string(Bookmarker.ErrorView, "505.html", []) ==
"Internal server error"
end
end
| 26.5 | 68 | 0.684391 |
f7f62b19ebef0b5991e8b3397002f8a1efa72ea4 | 3,609 | ex | Elixir | lib/ex_aws/request.ex | bettyblocks/ex_aws | 0c9b43b5a1afbfb5fb26131be7f80f69de4431d1 | [
"MIT",
"Unlicense"
] | null | null | null | lib/ex_aws/request.ex | bettyblocks/ex_aws | 0c9b43b5a1afbfb5fb26131be7f80f69de4431d1 | [
"MIT",
"Unlicense"
] | null | null | null | lib/ex_aws/request.ex | bettyblocks/ex_aws | 0c9b43b5a1afbfb5fb26131be7f80f69de4431d1 | [
"MIT",
"Unlicense"
] | 1 | 2021-01-22T12:16:23.000Z | 2021-01-22T12:16:23.000Z | defmodule ExAws.Request do
require Logger
@moduledoc """
Makes requests to AWS.
"""
@type http_status :: pos_integer
@type success_content :: %{body: binary, headers: [{binary, binary}]}
@type success_t :: {:ok, success_content}
@type error_t :: {:error, {:http_error, http_status, binary}}
@type response_t :: success_t | error_t
def request(http_method, url, data, headers, config, service) do
body = case data do
[] -> "{}"
d when is_binary(d) -> d
_ -> config[:json_codec].encode!(data)
end
request_and_retry(http_method, url, service, config, headers, body, {:attempt, 1})
end
def request_and_retry(_method, _url, _service, _config, _headers, _req_body, {:error, reason}), do: {:error, reason}
def request_and_retry(method, url, service, config, headers, req_body, {:attempt, attempt}) do
full_headers =
if service == :s3 && config[:s3_auth_version] == "2" do
ExAws.Auth.headers_v2(method, url, service, config, headers, req_body)
else
ExAws.Auth.headers(method, url, service, config, headers, req_body)
end
if config[:debug_requests] do
Logger.debug("Request URL: #{inspect url}")
Logger.debug("Request HEADERS: #{inspect full_headers}")
Logger.debug("Request BODY: #{inspect req_body}")
end
case config[:http_client].request(method, url, req_body, full_headers, Map.get(config, :http_opts, [])) do
{:ok, response = %{status_code: status}} when status in 200..299 ->
{:ok, response}
{:ok, %{status_code: status} = resp} when status in 400..499 ->
case client_error(resp, config[:json_codec]) do
{:retry, reason} ->
request_and_retry(method, url, service, config, headers, req_body, attempt_again?(attempt, reason, config))
{:error, reason} -> {:error, reason}
end
{:ok, %{status_code: status, body: body}} when status >= 500 ->
reason = {:http_error, status, body}
request_and_retry(method, url, service, config, headers, req_body, attempt_again?(attempt, reason, config))
{:error, %{reason: reason}} ->
Logger.warn("ExAws: HTTP ERROR: #{inspect reason}")
request_and_retry(method, url, service, config, headers, req_body, attempt_again?(attempt, reason, config))
end
end
def client_error(%{status_code: status, body: body} = error, json_codec) do
case json_codec.decode(body) do
{:ok, %{"__type" => error_type, "message" => message} = err} ->
error_type
|> String.split("#")
|> case do
[_, type] -> handle_aws_error(type, message)
_ -> {:error, {:http_error, status, err}}
end
_ -> {:error, {:http_error, status, error}}
end
end
def client_error(%{status_code: status} = error, _) do
{:error, {:http_error, status, error}}
end
def handle_aws_error("ProvisionedThroughputExceededException" = type, message) do
{:retry, {type, message}}
end
def handle_aws_error("ThrottlingException" = type, message) do
{:retry, {type, message}}
end
def handle_aws_error(type, message) do
{:error, {type, message}}
end
def attempt_again?(attempt, reason, config) do
if attempt >= config[:retries][:max_attempts] do
{:error, reason}
else
attempt |> backoff(config)
{:attempt, attempt + 1}
end
end
def backoff(attempt, config) do
(config[:retries][:base_backoff_in_ms] * :math.pow(2, attempt))
|> min(config[:retries][:max_backoff_in_ms])
|> trunc
|> :rand.uniform
|> :timer.sleep
end
end
| 35.382353 | 119 | 0.635633 |
f7f6455dbea175bb76b0349dc8ddc22c36d2f46b | 621 | ex | Elixir | clients/kratos/elixir/lib/ory/model/submit_self_service_verification_flow_body.ex | ory/sdk-generator | 958314d130922ad6f20f439b5230141a832231a5 | [
"Apache-2.0"
] | null | null | null | clients/kratos/elixir/lib/ory/model/submit_self_service_verification_flow_body.ex | ory/sdk-generator | 958314d130922ad6f20f439b5230141a832231a5 | [
"Apache-2.0"
] | null | null | null | clients/kratos/elixir/lib/ory/model/submit_self_service_verification_flow_body.ex | ory/sdk-generator | 958314d130922ad6f20f439b5230141a832231a5 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule Ory.Model.SubmitSelfServiceVerificationFlowBody do
@moduledoc """
nolint:deadcode,unused
"""
@derive [Poison.Encoder]
defstruct [
:"csrf_token",
:"email",
:"method"
]
@type t :: %__MODULE__{
:"csrf_token" => String.t | nil,
:"email" => String.t,
:"method" => String.t
}
end
defimpl Poison.Decoder, for: Ory.Model.SubmitSelfServiceVerificationFlowBody do
def decode(value, _options) do
value
end
end
| 20.7 | 91 | 0.681159 |
f7f64b7d6ac7d646992b8bef5e081be65c00f715 | 5,356 | ex | Elixir | lib/documents_design/accounts/accounts.ex | documents-org/documents.design-el | 6976254e175232afe5e913c29b04c13a86d2e9a2 | [
"MIT"
] | null | null | null | lib/documents_design/accounts/accounts.ex | documents-org/documents.design-el | 6976254e175232afe5e913c29b04c13a86d2e9a2 | [
"MIT"
] | null | null | null | lib/documents_design/accounts/accounts.ex | documents-org/documents.design-el | 6976254e175232afe5e913c29b04c13a86d2e9a2 | [
"MIT"
] | null | null | null | defmodule DocumentsDesign.Accounts do
@moduledoc """
The Accounts context.
"""
import Ecto.Query, warn: false
alias DocumentsDesign.Repo
alias DocumentsDesign.Accounts.User
@doc """
Returns the list of users.
## Examples
iex> list_users()
[%User{}, ...]
"""
def list_users do
Repo.all(User)
end
@doc """
Gets a single user.
Raises `Ecto.NoResultsError` if the User does not exist.
## Examples
iex> get_user!(123)
%User{}
iex> get_user!(456)
** (Ecto.NoResultsError)
"""
def get_user!(id), do: Repo.get!(User, id)
@doc """
Get an user by id, without throwing.
"""
def get_user(id), do: Repo.get(User, id)
@doc """
Creates a user.
## Examples
iex> create_user(%{field: value})
{:ok, %User{}}
iex> create_user(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_user(attrs \\ %{}) do
%User{}
|> User.changeset(prepare_new_user_attrs(attrs))
|> Repo.insert()
end
@doc """
Prepares an user for insertion when it comes from a register/reset form.
"""
def prepare_new_user_attrs(%{"password" => p} = attrs) do
attrs
|> Map.put("verify_token", DocumentsDesign.Utilities.random_token())
|> Map.put("reset_token", DocumentsDesign.Utilities.random_token())
|> Map.put("password", hash_password(p))
end
@doc """
Tries to set "verified" status to 1 when an user verifies their email.
"""
def verify_email(email, token) do
user = Repo.get_by(User, %{email: email, verify_token: token})
if user do
user
|> Ecto.Changeset.cast(%{verified: true}, [:verified])
|> Repo.update()
else
mystery_error_message()
end
end
@doc """
Tries to authentificate an user.
"""
def auth_user(email, password) do
user = Repo.get_by(User, %{email: email})
if user do
if verify_password(user, password) do
{:ok, user}
else
mystery_error_message()
end
else
Comeonin.Argon2.dummy_checkpw()
mystery_error_message()
end
end
@doc """
Verifies the password for a given user.
"""
def verify_password(user, password) do
Comeonin.Argon2.checkpw(password, user.password)
end
@doc """
Hashes a password using argon2, wraps Comeonin.
"""
def hash_password(password) do
password |> Comeonin.Argon2.hashpwsalt()
end
@doc """
Updates a user.
## Examples
iex> update_user(user, %{field: new_value})
{:ok, %User{}}
iex> update_user(user, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_user(%User{} = user, attrs) do
user
|> User.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a User.
## Examples
iex> delete_user(user)
{:ok, %User{}}
iex> delete_user(user)
{:error, %Ecto.Changeset{}}
"""
def delete_user(%User{} = user) do
Repo.delete(user)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking user changes.
## Examples
iex> change_user(user)
%Ecto.Changeset{source: %User{}}
"""
def change_user(%User{} = user) do
User.changeset(user, %{})
end
@doc """
Do we have at least an user created ?
"""
def has_user do
Repo.one(from u in "users", select: count()) > 0
end
@doc """
Launches a password reset by assigning a new reset token,
and setting the reset date to "now".
"""
def start_reset_password(email) do
user = Repo.get_by(User, %{email: email})
if user do
user
|> Ecto.Changeset.cast(
%{
reset_token: DocumentsDesign.Utilities.random_token(),
reset_date: NaiveDateTime.utc_now()
},
[:reset_date, :reset_token]
)
|> Repo.update()
{:ok, Repo.get_by(User, %{email: email})}
else
mystery_error_message()
end
end
@doc """
Does a password reset, checking email,
reset date, reset token, then sets password.
"""
def do_reset_password(email, token, password) do
user = Repo.get_by(User, %{email: email, reset_token: token})
IO.inspect(user)
if user do
case check_reset_date(user) do
{:ok, user} ->
update_password(user, password)
{:error, _} ->
mystery_error_message()
end
else
mystery_error_message()
end
end
@doc """
Checks an user's reset password date for expiration (was it in the last 24 hours ?)
"""
def check_reset_date(user) do
if user.reset_date do
case NaiveDateTime.compare(
NaiveDateTime.utc_now(),
NaiveDateTime.add(user.reset_date, 60 * 60 * 24)
) do
:lt -> {:ok, user}
_ -> mystery_error_message()
end
else
mystery_error_message()
end
end
@doc """
Updates password, randomizes reset token, randomizes verify token.
"""
def update_password(user, password) do
user
|> User.changeset(prepare_new_user_attrs(%{"password" => password}))
|> Repo.update()
end
@doc """
Standard and identical error message for every auth operation :
You don't want to communicate that an user exists, or doesn't,
or give too much reason on why auth/reset/verification failed.
This prevents enumeration.
"""
def mystery_error_message, do: {:error, "Bad Credentials"}
end
| 21.16996 | 85 | 0.612584 |
f7f64d44a26b202823f31f5596f10c3206b76090 | 1,744 | ex | Elixir | lib/harald/hci/event.ex | mattludwigs/harald | 82e67a71d9940d8572fd217eaf29575e81533151 | [
"MIT"
] | null | null | null | lib/harald/hci/event.ex | mattludwigs/harald | 82e67a71d9940d8572fd217eaf29575e81533151 | [
"MIT"
] | null | null | null | lib/harald/hci/event.ex | mattludwigs/harald | 82e67a71d9940d8572fd217eaf29575e81533151 | [
"MIT"
] | null | null | null | defmodule Harald.HCI.Event do
@moduledoc """
Serialization module for HCI Events.
> The HCI Event Packet is used by the Controller to notify the Host when events occur.
Reference: Version 5.0, Vol 2, Part E, 5.4.4
"""
alias Harald.HCI.Event.{InquiryComplete, LEMeta}
alias Harald.Serializable
@behaviour Serializable
@event_modules [InquiryComplete, LEMeta]
@typedoc """
> Each event is assigned a 1-Octet event code used to uniquely identify different types of
> events.
Reference: Version 5.0, Vol 2, Part E, 5.4.4
"""
@type event_code :: pos_integer()
@type t :: struct()
@type serialize_ret :: {:ok, binary()} | LEMeta.serialize_ret()
@type deserialize_ret :: {:ok, t() | [t()]} | {:error, binary()}
@doc """
A list of modules representing implemented events.
"""
def event_modules, do: @event_modules
@doc """
HCI packet indicator for HCI Event Packet.
Reference: Version 5.0, Vol 5, Part A, 2
"""
def indicator, do: 4
@impl Serializable
def serialize(event)
Enum.each(@event_modules, fn module ->
def serialize(%unquote(module){} = event) do
{:ok, bin} = unquote(module).serialize(event)
{:ok, <<unquote(module).event_code(), byte_size(bin), bin::binary>>}
end
end)
def serialize(event), do: {:error, {:bad_event, event}}
@impl Serializable
def deserialize(binary)
Enum.each(@event_modules, fn module ->
def deserialize(<<unquote(module.event_code()), length, event_parameters::binary>> = bin) do
if length == byte_size(event_parameters) do
unquote(module).deserialize(event_parameters)
else
{:error, bin}
end
end
end)
def deserialize(bin) when is_binary(bin), do: {:error, bin}
end
| 24.914286 | 96 | 0.666284 |
f7f65e6dfb42aee501fbfb48c5ac7f18468a01ef | 544 | exs | Elixir | test/north/scope_test.exs | camcaine/north | d1193d3cdab219a2b91136cf2c4938f30d0a9c86 | [
"MIT"
] | null | null | null | test/north/scope_test.exs | camcaine/north | d1193d3cdab219a2b91136cf2c4938f30d0a9c86 | [
"MIT"
] | null | null | null | test/north/scope_test.exs | camcaine/north | d1193d3cdab219a2b91136cf2c4938f30d0a9c86 | [
"MIT"
] | null | null | null | defmodule North.ScopeTest do
use ExUnit.Case, async: true
import North.Scope
describe "match/3" do
test "matching with wildcards" do
matcher = North.Scope.Wildcard
assert [[], []] = match(matcher, [], [])
assert [~w(foo), []] = match(matcher, ~w(foo), ~w(foo))
assert [[], ~w(foo)] = match(matcher, ~w(bar), ~w(foo))
assert [~w(foo bar), ~w(baz)] = match(matcher, ~w(foo bar), ~w(foo bar baz))
assert [~w(foo bar), ~w(baz)] = match(matcher, ~w(foo bar qux), ~w(foo bar baz))
end
end
end
| 27.2 | 86 | 0.575368 |
f7f6a115070e095c79d118ca6f1f2a75390c5507 | 325 | ex | Elixir | apps/admin/lib/admin/views/post_view.ex | glv/revista | 00ecb0780c62a5525155a773b959b169e0e0500d | [
"MIT"
] | 17 | 2019-01-31T18:33:09.000Z | 2022-01-18T12:38:49.000Z | apps/admin/lib/admin/views/post_view.ex | glv/revista | 00ecb0780c62a5525155a773b959b169e0e0500d | [
"MIT"
] | null | null | null | apps/admin/lib/admin/views/post_view.ex | glv/revista | 00ecb0780c62a5525155a773b959b169e0e0500d | [
"MIT"
] | 4 | 2018-11-10T01:56:17.000Z | 2020-06-09T21:10:41.000Z | defmodule Admin.PostView do
use Admin, :view
def post_body_html(conn) do
if conn.assigns[:post], do: conn.assigns[:post].body
end
def post_cancel_path(conn) do
if conn.assigns[:post] do
Routes.post_path(conn, :show, conn.assigns[:post])
else
Routes.post_path(conn, :index)
end
end
end
| 20.3125 | 56 | 0.673846 |
f7f7025c85e626d1c7bb7c46e93d1c73e8a7999a | 79 | ex | Elixir | apps/financial_system_web/lib/financial_system_web/views/layout_view.ex | juniornelson123/tech-challenge-stone | e27b767514bf42a5ade5228de56c3c7ea38459d7 | [
"MIT"
] | null | null | null | apps/financial_system_web/lib/financial_system_web/views/layout_view.ex | juniornelson123/tech-challenge-stone | e27b767514bf42a5ade5228de56c3c7ea38459d7 | [
"MIT"
] | 2 | 2021-03-10T03:19:32.000Z | 2021-09-02T04:33:17.000Z | apps/financial_system_web/lib/financial_system_web/views/layout_view.ex | juniornelson123/tech-challenge-stone | e27b767514bf42a5ade5228de56c3c7ea38459d7 | [
"MIT"
] | null | null | null | defmodule FinancialSystemWeb.LayoutView do
use FinancialSystemWeb, :view
end
| 19.75 | 42 | 0.848101 |
f7f7037927e823eb56b514c7587206ffee3d5263 | 4,948 | ex | Elixir | clients/policy_simulator/lib/google_api/policy_simulator/v1/model/google_cloud_policysimulator_v1_binding_explanation.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/policy_simulator/lib/google_api/policy_simulator/v1/model/google_cloud_policysimulator_v1_binding_explanation.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/policy_simulator/lib/google_api/policy_simulator/v1/model/google_cloud_policysimulator_v1_binding_explanation.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.PolicySimulator.V1.Model.GoogleCloudPolicysimulatorV1BindingExplanation do
@moduledoc """
Details about how a binding in a policy affects a principal's ability to use a permission.
## Attributes
* `access` (*type:* `String.t`, *default:* `nil`) - Required. Indicates whether _this binding_ provides the specified permission to the specified principal for the specified resource. This field does _not_ indicate whether the principal actually has the permission for the resource. There might be another binding that overrides this binding. To determine whether the principal actually has the permission, use the `access` field in the TroubleshootIamPolicyResponse.
* `condition` (*type:* `GoogleApi.PolicySimulator.V1.Model.GoogleTypeExpr.t`, *default:* `nil`) - A condition expression that prevents this binding from granting access unless the expression evaluates to `true`. To learn about IAM Conditions, see https://cloud.google.com/iam/docs/conditions-overview.
* `memberships` (*type:* `%{optional(String.t) => GoogleApi.PolicySimulator.V1.Model.GoogleCloudPolicysimulatorV1BindingExplanationAnnotatedMembership.t}`, *default:* `nil`) - Indicates whether each principal in the binding includes the principal specified in the request, either directly or indirectly. Each key identifies a principal in the binding, and each value indicates whether the principal in the binding includes the principal in the request. For example, suppose that a binding includes the following principals: * `user:[email protected]` * `group:[email protected]` The principal in the replayed access tuple is `user:[email protected]`. This user is a principal of the group `group:[email protected]`. For the first principal in the binding, the key is `user:[email protected]`, and the `membership` field in the value is set to `MEMBERSHIP_NOT_INCLUDED`. For the second principal in the binding, the key is `group:[email protected]`, and the `membership` field in the value is set to `MEMBERSHIP_INCLUDED`.
* `relevance` (*type:* `String.t`, *default:* `nil`) - The relevance of this binding to the overall determination for the entire policy.
* `role` (*type:* `String.t`, *default:* `nil`) - The role that this binding grants. For example, `roles/compute.serviceAgent`. For a complete list of predefined IAM roles, as well as the permissions in each role, see https://cloud.google.com/iam/help/roles/reference.
* `rolePermission` (*type:* `String.t`, *default:* `nil`) - Indicates whether the role granted by this binding contains the specified permission.
* `rolePermissionRelevance` (*type:* `String.t`, *default:* `nil`) - The relevance of the permission's existence, or nonexistence, in the role to the overall determination for the entire policy.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:access => String.t() | nil,
:condition => GoogleApi.PolicySimulator.V1.Model.GoogleTypeExpr.t() | nil,
:memberships =>
%{
optional(String.t()) =>
GoogleApi.PolicySimulator.V1.Model.GoogleCloudPolicysimulatorV1BindingExplanationAnnotatedMembership.t()
}
| nil,
:relevance => String.t() | nil,
:role => String.t() | nil,
:rolePermission => String.t() | nil,
:rolePermissionRelevance => String.t() | nil
}
field(:access)
field(:condition, as: GoogleApi.PolicySimulator.V1.Model.GoogleTypeExpr)
field(:memberships,
as:
GoogleApi.PolicySimulator.V1.Model.GoogleCloudPolicysimulatorV1BindingExplanationAnnotatedMembership,
type: :map
)
field(:relevance)
field(:role)
field(:rolePermission)
field(:rolePermissionRelevance)
end
defimpl Poison.Decoder,
for: GoogleApi.PolicySimulator.V1.Model.GoogleCloudPolicysimulatorV1BindingExplanation do
def decode(value, options) do
GoogleApi.PolicySimulator.V1.Model.GoogleCloudPolicysimulatorV1BindingExplanation.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.PolicySimulator.V1.Model.GoogleCloudPolicysimulatorV1BindingExplanation do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 61.08642 | 1,041 | 0.743937 |
f7f72265b71a070c6e062926d70b716ccd209efa | 393 | ex | Elixir | lib/ssdp/supervisor.ex | rosetta-home/ssdp | 80bd4b75868d2605be5d497b5b9f36ac051ae459 | [
"Apache-2.0"
] | 1 | 2020-03-22T23:36:34.000Z | 2020-03-22T23:36:34.000Z | lib/ssdp/supervisor.ex | rosetta-home/ssdp | 80bd4b75868d2605be5d497b5b9f36ac051ae459 | [
"Apache-2.0"
] | null | null | null | lib/ssdp/supervisor.ex | rosetta-home/ssdp | 80bd4b75868d2605be5d497b5b9f36ac051ae459 | [
"Apache-2.0"
] | null | null | null | defmodule SSDP.Supervisor do
use Supervisor
def start_link do
Supervisor.start_link(__MODULE__, :ok, name: __MODULE__)
end
def init(:ok) do
children = [
supervisor(Registry, [:duplicate, SSDP.Registry, []]),
supervisor(Task.Supervisor, [[name: SSDP.TaskSupervisor]]),
worker(SSDP.Client, []),
]
supervise(children, strategy: :one_for_one)
end
end
| 23.117647 | 65 | 0.671756 |
f7f7279968f4b1e7ee5a679d5c34545cb223803d | 405 | exs | Elixir | test/fish_web/views/error_view_test.exs | wdiechmann/fish | b63fe109bbfc1cbe515ac31f9adcd9b57c6b21c8 | [
"MIT"
] | 1 | 2021-02-09T23:49:40.000Z | 2021-02-09T23:49:40.000Z | test/fish_web/views/error_view_test.exs | wdiechmann/fish | b63fe109bbfc1cbe515ac31f9adcd9b57c6b21c8 | [
"MIT"
] | null | null | null | test/fish_web/views/error_view_test.exs | wdiechmann/fish | b63fe109bbfc1cbe515ac31f9adcd9b57c6b21c8 | [
"MIT"
] | null | null | null | defmodule FishWeb.ErrorViewTest do
use FishWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(FishWeb.ErrorView, "404.html", []) == "Not Found"
end
test "renders 500.html" do
assert render_to_string(FishWeb.ErrorView, "500.html", []) == "Internal Server Error"
end
end
| 27 | 89 | 0.725926 |
f7f748968e27d66382fd097aa07d37d7d2ceb2d1 | 98 | exs | Elixir | pattern_matching.exs | ariarijp/elixir-crash-course | f63502a2bb59e3a3e1ce6c62c595fbf75c0462c0 | [
"MIT"
] | 1 | 2016-09-22T03:47:46.000Z | 2016-09-22T03:47:46.000Z | pattern_matching.exs | ariarijp/elixir-crash-course | f63502a2bb59e3a3e1ce6c62c595fbf75c0462c0 | [
"MIT"
] | null | null | null | pattern_matching.exs | ariarijp/elixir-crash-course | f63502a2bb59e3a3e1ce6c62c595fbf75c0462c0 | [
"MIT"
] | null | null | null | def loop_through([h|t]) do
IO.inspect h
loop_through t
end
def loop_through([]) do
:ok
end
| 10.888889 | 26 | 0.683673 |
f7f758177eafec98bfb7f2e2d4ab7d27c2d7104b | 963 | ex | Elixir | lib/media_server_web/live/home_live/index.ex | midarrlabs/midarr-server | f12c6347e41a96517bbb5ed1ad12b65d10b8d30a | [
"MIT"
] | 538 | 2022-02-02T21:46:52.000Z | 2022-03-29T20:50:34.000Z | lib/media_server_web/live/home_live/index.ex | midarrlabs/midarr-server | f12c6347e41a96517bbb5ed1ad12b65d10b8d30a | [
"MIT"
] | 48 | 2022-02-03T11:46:09.000Z | 2022-03-31T04:44:53.000Z | lib/media_server_web/live/home_live/index.ex | midarrlabs/midarr-server | f12c6347e41a96517bbb5ed1ad12b65d10b8d30a | [
"MIT"
] | 15 | 2022-02-03T05:55:14.000Z | 2022-02-28T11:09:03.000Z | defmodule MediaServerWeb.HomeLive.Index do
use MediaServerWeb, :live_view
alias MediaServer.Repo
alias MediaServer.Accounts
alias MediaServerWeb.Repositories.Movies
alias MediaServerWeb.Repositories.Series
@impl true
def mount(_params, session, socket) do
{
:ok,
socket
|> assign(page_title: "Home")
|> assign(
:current_user,
Accounts.get_user_by_session_token(session["user_token"])
|> Repo.preload(:movie_continues)
|> Repo.preload(:episode_continues)
)
}
end
@impl true
def handle_params(_params, _url, socket) do
{
:noreply,
socket
|> assign(:latest_movies, Movies.get_latest(7))
|> assign(:latest_series, Series.get_latest(6))
|> assign(:movie_continues, socket.assigns.current_user.movie_continues |> Enum.take(4))
|> assign(:episode_continues, socket.assigns.current_user.episode_continues |> Enum.take(4))
}
end
end
| 26.75 | 98 | 0.676012 |
f7f75828ffb258dfdfee4e8d42acbb58c8f83de7 | 527 | exs | Elixir | priv/repo/migrations/20201201180222_create_transactions.exs | murilosrg/banking-api | 731a150d06d605958b53bfd27c4a1f6033527847 | [
"MIT"
] | null | null | null | priv/repo/migrations/20201201180222_create_transactions.exs | murilosrg/banking-api | 731a150d06d605958b53bfd27c4a1f6033527847 | [
"MIT"
] | null | null | null | priv/repo/migrations/20201201180222_create_transactions.exs | murilosrg/banking-api | 731a150d06d605958b53bfd27c4a1f6033527847 | [
"MIT"
] | null | null | null | defmodule Banking.Repo.Migrations.CreateTransactions do
use Ecto.Migration
def change do
create table(:transactions, primary_key: false) do
add :id, :uuid, primary_key: true
add :amount, :decimal, precision: 15, scale: 2, null: false
add :account_to, :uuid
add :type, :integer, null: false
add :account_from, references(:accounts, on_delete: :nothing, type: :uuid, null: false),
null: false
timestamps()
end
create index(:transactions, [:account_from])
end
end
| 26.35 | 94 | 0.669829 |
f7f75d50ce7977731a21fb0b00e691ef7913956b | 386 | ex | Elixir | web/views/error_view.ex | jwdotjs/battlestation | 9552cf2af11f5d5420ce90947ca58d9cca2f7c4a | [
"MIT"
] | null | null | null | web/views/error_view.ex | jwdotjs/battlestation | 9552cf2af11f5d5420ce90947ca58d9cca2f7c4a | [
"MIT"
] | null | null | null | web/views/error_view.ex | jwdotjs/battlestation | 9552cf2af11f5d5420ce90947ca58d9cca2f7c4a | [
"MIT"
] | null | null | null | defmodule Battlestation.ErrorView do
use Battlestation.Web, :view
def render("404.html", _assigns) do
"Page not found"
end
def render("500.html", _assigns) do
"Internal server error"
end
# In case no render clause matches or no
# template is found, let's render it as 500
def template_not_found(_template, assigns) do
render "500.html", assigns
end
end
| 21.444444 | 47 | 0.707254 |
f7f75f6c7bb4ca7c44f02e33777bc7a659052dec | 1,326 | ex | Elixir | lib/coherence_assent/router.ex | Schultzer/coherence_assent | 5adf518d7760641e686ca2c588def0e8f62ea093 | [
"MIT",
"Unlicense"
] | 22 | 2017-09-15T17:52:31.000Z | 2018-10-07T02:36:27.000Z | lib/coherence_assent/router.ex | Schultzer/coherence_assent | 5adf518d7760641e686ca2c588def0e8f62ea093 | [
"MIT",
"Unlicense"
] | 15 | 2017-11-01T15:39:37.000Z | 2019-03-11T18:02:04.000Z | lib/coherence_assent/router.ex | Schultzer/coherence_assent | 5adf518d7760641e686ca2c588def0e8f62ea093 | [
"MIT",
"Unlicense"
] | 9 | 2017-09-18T20:48:06.000Z | 2018-12-05T15:24:24.000Z | defmodule CoherenceAssent.Router do
@moduledoc """
Handles routing for CoherenceAssent.
## Usage
Configure `lib/my_project/web/router.ex` the following way:
defmodule MyProject.Router do
use MyProjectWeb, :router
use CoherenceAssent.Router
scope "/", MyProjectWeb do
pipe_through :browser
coherence_assent_routes
end
...
end
"""
defmacro __using__(_opts \\ []) do
quote do
import unquote(__MODULE__)
end
end
@doc """
CoherenceAssent router macro.
Use this macro to define the CoherenceAssent routes.
## Examples:
scope "/" do
coherence_assent_routes
end
"""
defmacro coherence_assent_routes(options \\ %{}) do
quote location: :keep do
options = Map.merge(%{scope: "auth"}, unquote(Macro.escape(options)))
scope "/#{options[:scope]}", as: "coherence_assent" do
get "/:provider", CoherenceAssent.AuthController, :index
get "/:provider/callback", CoherenceAssent.AuthController, :callback
delete "/:provider", CoherenceAssent.AuthController, :delete
get "/:provider/new", CoherenceAssent.RegistrationController, :add_login_field
post "/:provider/create", CoherenceAssent.RegistrationController, :create
end
end
end
end
| 25.5 | 86 | 0.657617 |
f7f7770a1a0316fd907ba756a7ca96be7fa15ce0 | 4,106 | ex | Elixir | lib/mastani_server/statistics/delegates/contribute.ex | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | 1 | 2019-05-07T15:03:54.000Z | 2019-05-07T15:03:54.000Z | lib/mastani_server/statistics/delegates/contribute.ex | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | null | null | null | lib/mastani_server/statistics/delegates/contribute.ex | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | null | null | null | defmodule MastaniServer.Statistics.Delegate.Contribute do
import Ecto.Query, warn: false
import Helper.Utils
import ShortMaps
alias MastaniServer.Repo
alias MastaniServer.Accounts.User
alias MastaniServer.CMS.Community
alias MastaniServer.Statistics.{UserContribute, CommunityContribute}
alias Helper.{ORM, QueryBuilder}
@community_contribute_days get_config(:general, :community_contribute_days)
@user_contribute_months get_config(:general, :user_contribute_months)
def make_contribute(%Community{id: id}) do
today = Timex.today() |> Date.to_iso8601()
with {:ok, contribute} <- ORM.find_by(CommunityContribute, community_id: id, date: today) do
contribute |> inc_contribute_count(:community) |> done
else
{:error, _} ->
CommunityContribute |> ORM.create(%{community_id: id, date: today, count: 1})
end
end
def make_contribute(%User{id: id}) do
today = Timex.today() |> Date.to_iso8601()
with {:ok, contribute} <- ORM.find_by(UserContribute, user_id: id, date: today) do
contribute |> inc_contribute_count(:user) |> done
else
{:error, _} ->
UserContribute |> ORM.create(%{user_id: id, date: today, count: 1})
end
end
@doc """
Returns the list of user_contribute by latest 6 months.
"""
def list_contributes(%User{id: id}) do
user_id = integerfy(id)
"user_contributes"
|> where([c], c.user_id == ^user_id)
|> QueryBuilder.recent_inserted(months: @user_contribute_months)
|> select([c], %{date: c.date, count: c.count})
|> Repo.all()
|> to_contributes_map()
|> done
end
def list_contributes(%Community{id: id}) do
%Community{id: id}
|> get_contributes()
|> to_counts_digest(days: @community_contribute_days)
|> done
end
def list_contributes_digest(%Community{id: id}) do
%Community{id: id}
|> get_contributes()
|> to_counts_digest(days: @community_contribute_days)
|> done
end
defp get_contributes(%Community{id: id}) do
community_id = integerfy(id)
"community_contributes"
|> where([c], c.community_id == ^community_id)
|> QueryBuilder.recent_inserted(days: @community_contribute_days)
|> select([c], %{date: c.date, count: c.count})
|> Repo.all()
|> to_contribute_records()
end
defp to_contributes_map(data) do
end_date = Timex.today()
start_date = Timex.shift(Timex.today(), months: -6)
total_count = Enum.reduce(data, 0, &(&1.count + &2))
records = data
~m(start_date end_date total_count records)a
end
defp to_contribute_records(data) do
data
|> Enum.map(fn %{count: count, date: date} ->
%{
date: date,
count: count
}
end)
end
# 返回 count 数组,方便前端绘图
# example:
# from: [0,0,0,0,0,0]
# to: [0,30,3,8,0,0]
# 如果 7 天都有 count, 不用计算直接 map 返回
defp to_counts_digest(record, days: count) do
case length(record) == @community_contribute_days + 1 do
true ->
Enum.map(record, & &1.count)
false ->
today = Timex.today() |> Date.to_erl()
return_count = abs(count) + 1
enmpty_tuple = return_count |> repeat(0) |> List.to_tuple()
results =
Enum.reduce(record, enmpty_tuple, fn record, acc ->
diff = Timex.diff(Timex.to_date(record.date), today, :days)
index = diff + abs(count)
put_elem(acc, index, record.count)
end)
results |> Tuple.to_list()
end
end
defp inc_contribute_count(contribute, :community) do
CommunityContribute
|> where([c], c.community_id == ^contribute.community_id and c.date == ^contribute.date)
|> do_inc_count(contribute)
end
defp inc_contribute_count(contribute, :user) do
UserContribute
|> where([c], c.user_id == ^contribute.user_id and c.date == ^contribute.date)
|> do_inc_count(contribute)
end
defp do_inc_count(query, contribute, count \\ 1) do
{1, [result]} =
Repo.update_all(
from(p in query, select: p.count),
inc: [count: count]
)
put_in(contribute.count, result)
end
end
| 28.317241 | 96 | 0.649537 |
f7f77eebbd16474cb0b5878c9de8700bec57c571 | 4,989 | exs | Elixir | test/logger_file_backend_test.exs | MisterToolbox/logger_file_backend | ee10e27a5aa792ed927cfcbaff603935c9d6e8bf | [
"MIT"
] | null | null | null | test/logger_file_backend_test.exs | MisterToolbox/logger_file_backend | ee10e27a5aa792ed927cfcbaff603935c9d6e8bf | [
"MIT"
] | null | null | null | test/logger_file_backend_test.exs | MisterToolbox/logger_file_backend | ee10e27a5aa792ed927cfcbaff603935c9d6e8bf | [
"MIT"
] | null | null | null | defmodule LoggerFileBackendTest do
use ExUnit.Case, async: false
require Logger
@backend {LoggerFileBackend, :test}
import LoggerFileBackend, only: [prune: 1, metadata_matches?: 2]
# add the backend here instead of `config/test.exs` due to issue 2649
Logger.add_backend @backend
setup do
config [path: "test/logs/test.log", level: :debug]
on_exit fn ->
path() && File.rm_rf!(Path.dirname(path()))
end
end
test "does not crash if path isn't set" do
config path: nil
Logger.debug "foo"
assert {:error, :already_present} = Logger.add_backend(@backend)
end
test "can configure metadata_filter" do
config metadata_filter: [md_key: true]
Logger.debug("shouldn't", md_key: false)
Logger.debug("should", md_key: true)
refute log() =~ "shouldn't"
assert log() =~ "should"
config metadata_filter: nil
end
test "metadata_matches?" do
assert metadata_matches?([a: 1], [a: 1]) == true # exact match
assert metadata_matches?([b: 1], [a: 1]) == false # total mismatch
assert metadata_matches?([b: 1], nil) == true # default to allow
assert metadata_matches?([b: 1, a: 1], [a: 1]) == true # metadata is superset of filter
assert metadata_matches?([c: 1, b: 1, a: 1], [b: 1, a: 1]) == true # multiple filter keys subset of metadata
assert metadata_matches?([a: 1], [b: 1, a: 1]) == false # multiple filter keys superset of metadata
end
test "creates log file" do
refute File.exists?(path())
Logger.debug("this is a msg")
assert File.exists?(path())
assert log() =~ "this is a msg"
end
test "can log utf8 chars" do
Logger.debug("ß\uFFaa\u0222")
assert log() =~ "ßᆰȢ"
end
test "prune/1" do
assert prune(1) == "�"
assert prune(<<"hí", 233>>) == "hí�"
assert prune(["hi"|233]) == ["hi"|"�"]
assert prune([233|"hi"]) == [233|"hi"]
assert prune([[]|[]]) == [[]]
end
test "prunes invalid utf-8 codepoints" do
Logger.debug(<<"hi", 233>>)
assert log() =~ "hi�"
end
test "can configure format" do
config format: "$message [$level]\n"
Logger.debug("hello")
assert log() =~ "hello [debug]"
end
test "can configure metadata" do
config format: "$metadata$message\n", metadata: [:user_id, :auth]
Logger.debug("hello")
assert log() =~ "hello"
Logger.metadata(auth: true)
Logger.metadata(user_id: 11)
Logger.metadata(user_id: 13)
Logger.debug("hello")
assert log() =~ "user_id=13 auth=true hello"
end
test "can configure level" do
config level: :info
Logger.debug("hello")
refute File.exists?(path())
end
test "can configure path" do
new_path = "test/logs/test.log.2"
config path: new_path
assert new_path == path()
end
test "logs to new file after old file has been moved" do
config format: "$message\n"
Logger.debug "foo"
Logger.debug "bar"
assert log() == "foo\nbar\n"
{"", 0} = System.cmd("mv", [path(), path() <> ".1"])
Logger.debug "biz"
Logger.debug "baz"
assert log() == "biz\nbaz\n"
end
test "closes old log file after log file has been moved" do
Logger.debug "foo"
assert has_open(path())
new_path = path() <> ".1"
{"", 0} = System.cmd("mv", [path(), new_path])
assert has_open(new_path)
Logger.debug "bar"
assert has_open(path())
refute has_open(new_path)
end
test "closes old log file after path has been changed" do
Logger.debug "foo"
assert has_open(path())
org_path = path()
config path: path() <> ".new"
Logger.debug "bar"
assert has_open(path())
refute has_open(org_path)
end
test "log file rotate" do
config format: "$message\n"
config rotate: %{max_bytes: 4, keep: 4}
Logger.debug "rotate1"
Logger.debug "rotate2"
Logger.debug "rotate3"
Logger.debug "rotate4"
Logger.debug "rotate5"
Logger.debug "rotate6"
p = path()
assert File.read!("#{p}.4") == "rotate2\n"
assert File.read!("#{p}.3") == "rotate3\n"
assert File.read!("#{p}.2") == "rotate4\n"
assert File.read!("#{p}.1") == "rotate5\n"
assert File.read!(p) == "rotate6\n"
config rotate: nil
end
test "log file not rotate" do
config format: "$message\n"
config rotate: %{max_bytes: 100, keep: 4}
words = ~w(rotate1 rotate2 rotate3 rotate4 rotate5 rotate6)
words |> Enum.map(&(Logger.debug(&1)))
assert log() == Enum.join(words, "\n") <> "\n"
config rotate: nil
end
defp has_open(path) do
has_open(:os.type, path)
end
defp has_open({:unix,_}, path) do
case System.cmd("lsof", [path]) do
{output, 0} ->
output =~ System.get_pid
_ -> false
end
end
defp has_open(_, _) do
false
end
defp path do
{:ok, path} = :gen_event.call(Logger, @backend, :path)
path
end
defp log do
File.read!(path())
end
defp config(opts) do
Logger.configure_backend(@backend, opts)
end
end
| 23.870813 | 112 | 0.612147 |
f7f7aefb824fadf01fc3dc919a32af523797ec5b | 2,047 | exs | Elixir | config/config.exs | raymondboswel/elixir-companies | 863f6c4fe979b91abe3e15c6e3db3b41a014915f | [
"MIT"
] | null | null | null | config/config.exs | raymondboswel/elixir-companies | 863f6c4fe979b91abe3e15c6e3db3b41a014915f | [
"MIT"
] | null | null | null | config/config.exs | raymondboswel/elixir-companies | 863f6c4fe979b91abe3e15c6e3db3b41a014915f | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
# General application configuration
use Mix.Config
config :companies, ecto_repos: [Companies.Repo]
# Configures the endpoint
config :companies, CompaniesWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "cnFp+p3HcWa0ZaS5YhEfuJlU2PIxvUinNThsTSXm4ZE2M7D/zYzpfIJGMVNLHtqv",
render_errors: [view: CompaniesWeb.ErrorView, accepts: ~w(html json)],
pubsub_server: Companies.PubSub,
live_view: [signing_salt: "IJL0bF+zIE2Ax4MFSi16HqrurNFhiYlD"]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Use Jason for JSON parsing in Phoenix and Ecto
config :phoenix, :json_library, Jason
config :companies,
notifier: Notify.Console,
site_data: %{
name: "Elixir Companies"
},
results_per_page: 16
config :oauth2,
serializers: %{
"application/json" => Jason
}
config :ueberauth, Ueberauth,
providers: [
github: {Ueberauth.Strategy.Github, [default_scope: "user:email", send_redirect_uri: false]}
]
config :ueberauth, Ueberauth.Strategy.Github.OAuth,
client_id: System.get_env("GITHUB_CLIENT_ID"),
client_secret: System.get_env("GITHUB_CLIENT_SECRET")
config :scrivener_html,
routes_helper: CompaniesWeb.Router.Helpers,
view_style: :bulma
config :phoenix, :template_engines,
eex: Appsignal.Phoenix.Template.EExEngine,
exs: Appsignal.Phoenix.Template.ExsEngine
config :companies, Notify.Mailer, adapter: Bamboo.LocalAdapter
config :live_dashboard_history, LiveDashboardHistory,
router: CompaniesWeb.Router,
metrics: CompaniesWeb.Telemetry,
buffer_size: 500
#
config :companies, Companies.Repo,
ssl: true,
pool_size: 10
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"
| 29.242857 | 96 | 0.766976 |
f7f7c18b8ce9eaee5213f65d42024e553ffa6ecc | 1,747 | ex | Elixir | lib/esi.ex | bmartin2015/esi | 6463d444b01c4373fbed23dad70a4206cd855153 | [
"MIT"
] | 15 | 2017-11-25T09:10:35.000Z | 2021-04-26T03:09:51.000Z | lib/esi.ex | bmartin2015/esi | 6463d444b01c4373fbed23dad70a4206cd855153 | [
"MIT"
] | 6 | 2018-01-19T20:14:20.000Z | 2019-08-03T12:58:39.000Z | lib/esi.ex | bmartin2015/esi | 6463d444b01c4373fbed23dad70a4206cd855153 | [
"MIT"
] | 13 | 2017-12-09T16:30:30.000Z | 2021-12-22T21:27:37.000Z | defmodule ESI do
@doc """
Execute a request.
## Arguments
- `request` -- the request
- `opts` -- any additional options to set on the request
"""
@spec request!(req :: ESI.Request.t(), opts :: ESI.Request.request_opts()) ::
{:ok, any} | {:error, any}
def request(req, opts \\ []) do
req
|> ESI.Request.options(opts)
|> ESI.Request.run()
end
@doc """
Execute a request and raise an error if it is not successful.
"""
@spec request!(req :: ESI.Request.t(), opts :: ESI.Request.request_opts()) :: any
def request!(req, opts \\ []) do
case request(req, opts) do
{:ok, result} ->
result
{:error, err} ->
raise "Request failed: #{err}"
end
end
@doc """
Generate a stream from a request, supporting automatic pagination.
## Examples
Paginating, without `stream!`; you need to manually handle incrementing the
`:page` option:
iex> ESI.API.Universe.groups() |> ESI.request! |> length
1000
iex> ESI.API.Universe.groups(page: 2) |> ESI.request! |> length
284
Paginating with `stream!`, you don't have to care about `:page`:
iex> ESI.API.Universe.groups() |> ESI.stream! |> Enum.take(1020) |> length
1020
Like any stream, you can use `Enum.to_list/1` to get all the items:
iex> ESI.API.Universe.groups() |> ESI.stream! |> Enum.to_list |> length
1284
It even works for requests that don't paginate:
iex> ESI.API.Universe.bloodlines() |> ESI.stream! |> Enum.to_list |> length
15
"""
@spec stream!(req :: ESI.Request.t(), opts :: ESI.Request.request_opts()) :: any
def stream!(req, opts \\ []) do
req
|> ESI.Request.options(opts)
|> ESI.Request.stream!()
end
end
| 25.691176 | 83 | 0.605037 |
f7f7c4bd08addd18a824ed7856a93852abb2bf7c | 1,868 | exs | Elixir | clients/cloud_asset/mix.exs | mopp/elixir-google-api | d496227d17600bccbdf8f6be9ad1b7e7219d7ec6 | [
"Apache-2.0"
] | null | null | null | clients/cloud_asset/mix.exs | mopp/elixir-google-api | d496227d17600bccbdf8f6be9ad1b7e7219d7ec6 | [
"Apache-2.0"
] | null | null | null | clients/cloud_asset/mix.exs | mopp/elixir-google-api | d496227d17600bccbdf8f6be9ad1b7e7219d7ec6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudAsset.Mixfile do
use Mix.Project
@version "0.33.3"
def project() do
[
app: :google_api_cloud_asset,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/cloud_asset"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.4"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Cloud Asset API client library. The cloud asset API manages the history and inventory of cloud resources.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/cloud_asset",
"Homepage" => "https://cloud.google.com/asset-inventory/docs/quickstart"
}
]
end
end
| 27.880597 | 109 | 0.658994 |
f7f7ca5a56eb3139a5888c456a2910d55f3a93d6 | 12,001 | ex | Elixir | lib/boundary/mix/tasks/compile/boundary.ex | randycoulman/boundary | 2fdea46e702400c152670262d5ca1f31edbd4fa1 | [
"MIT"
] | null | null | null | lib/boundary/mix/tasks/compile/boundary.ex | randycoulman/boundary | 2fdea46e702400c152670262d5ca1f31edbd4fa1 | [
"MIT"
] | null | null | null | lib/boundary/mix/tasks/compile/boundary.ex | randycoulman/boundary | 2fdea46e702400c152670262d5ca1f31edbd4fa1 | [
"MIT"
] | null | null | null | defmodule Mix.Tasks.Compile.Boundary do
# credo:disable-for-this-file Credo.Check.Readability.Specs
use Boundary, classify_to: Boundary.Mix
use Mix.Task.Compiler
alias Boundary.Mix.Xref
@moduledoc """
Verifies cross-module function calls according to defined boundaries.
This compiler reports all cross-boundary function calls which are not permitted, according to
the current definition of boundaries. For details on defining boundaries, see the docs for the
`Boundary` module.
## Usage
Once you have configured the boundaries, you need to include the compiler in `mix.exs`:
```
defmodule MySystem.MixProject do
# ...
def project do
[
compilers: [:boundary] ++ Mix.compilers(),
# ...
]
end
# ...
end
```
When developing a library, it's advised to use this compiler only in `:dev` and `:test`
environments:
```
defmodule Boundary.MixProject do
# ...
def project do
[
compilers: extra_compilers(Mix.env()) ++ Mix.compilers(),
# ...
]
end
# ...
defp extra_compilers(:prod), do: []
defp extra_compilers(_env), do: [:boundary]
end
```
## Warnings
Every invalid cross-boundary usage is reported as a compiler warning. Consider the following example:
```
defmodule MySystem.User do
def auth() do
MySystemWeb.Endpoint.url()
end
end
```
Assuming that calls from `MySystem` to `MySystemWeb` are not allowed, you'll get the following warning:
```
$ mix compile
warning: forbidden reference to MySystemWeb
(references from MySystem to MySystemWeb are not allowed)
lib/my_system/user.ex:3
```
Since the compiler emits warnings, `mix compile` will still succeed, and you can normally start
your system, even if some boundary rules are violated. The compiler doesn't force you to immediately
fix these violations, which is a deliberate decision made to avoid disrupting the development flow.
At the same time, it's worth enforcing boundaries on the CI. This can easily be done by providing
the `--warnings-as-errors` option to `mix compile`.
"""
@recursive true
@impl Mix.Task.Compiler
def run(argv) do
Xref.start_link()
Mix.Task.Compiler.after_compiler(:app, &after_compiler(&1, argv))
tracers = Code.get_compiler_option(:tracers)
Code.put_compiler_option(:tracers, [__MODULE__ | tracers])
{:ok, []}
end
@doc false
def trace({remote, meta, to_module, _name, _arity}, env)
when remote in ~w/remote_function imported_function remote_macro imported_macro/a do
mode = if is_nil(env.function) or remote in ~w/remote_macro imported_macro/a, do: :compile, else: :runtime
record(to_module, meta, env, mode, :call)
end
def trace({local, _meta, _to_module, _name, _arity}, env)
when local in ~w/local_function local_macro/a,
# We need to initialize module although we're not going to record the call, to correctly remove previously
# recorded entries when the module is recompiled.
do: initialize_module(env.module)
def trace({:struct_expansion, meta, to_module, _keys}, env),
do: record(to_module, meta, env, :compile, :struct_expansion)
def trace({:alias_reference, meta, to_module}, env) do
unless env.function == {:boundary, 1} do
mode = if is_nil(env.function), do: :compile, else: :runtime
record(to_module, meta, env, mode, :alias_reference)
end
:ok
end
def trace(_event, _env), do: :ok
defp record(to_module, meta, env, mode, type) do
# We need to initialize module even if we're not going to record the call, to correctly remove previously
# recorded entries when the module is recompiled.
initialize_module(env.module)
unless env.module in [nil, to_module] or system_module?(to_module) or
not String.starts_with?(Atom.to_string(to_module), "Elixir.") do
Xref.record(
env.module,
%{
from_function: env.function,
to: to_module,
mode: mode,
type: type,
file: Path.relative_to_cwd(env.file),
line: Keyword.get(meta, :line, env.line)
}
)
end
:ok
end
defp initialize_module(module),
do: unless(is_nil(module), do: Xref.initialize_module(module))
system_apps = ~w/elixir stdlib kernel/a
system_apps
|> Stream.each(&Application.load/1)
|> Stream.flat_map(&Application.spec(&1, :modules))
|> Enum.each(fn module -> defp system_module?(unquote(module)), do: true end)
defp system_module?(module), do: :code.which(module) == :preloaded
defp after_compiler({:error, _} = status, _argv), do: status
defp after_compiler({status, diagnostics}, argv) when status in [:ok, :noop] do
# We're reloading the app to make sure we have the latest version. This fixes potential stale state in ElixirLS.
Application.unload(Boundary.Mix.app_name())
Application.load(Boundary.Mix.app_name())
tracers = Enum.reject(Code.get_compiler_option(:tracers), &(&1 == __MODULE__))
Code.put_compiler_option(:tracers, tracers)
Xref.flush(Application.spec(Boundary.Mix.app_name(), :modules) || [])
view =
case Boundary.Mix.read_manifest("boundary_view") do
nil -> rebuild_view()
view -> Boundary.View.refresh(view) || rebuild_view()
end
Boundary.Mix.write_manifest("boundary_view", Boundary.View.drop_main_app(view))
errors = check(view, Xref.entries())
print_diagnostic_errors(errors)
{status(errors, argv), diagnostics ++ errors}
end
defp rebuild_view do
Boundary.Mix.load_app()
Boundary.View.build(Boundary.Mix.app_name())
end
defp status([], _), do: :ok
defp status([_ | _], argv), do: if(warnings_as_errors?(argv), do: :error, else: :ok)
defp warnings_as_errors?(argv) do
{parsed, _argv, _errors} = OptionParser.parse(argv, strict: [warnings_as_errors: :boolean])
Keyword.get(parsed, :warnings_as_errors, false)
end
defp print_diagnostic_errors(errors) do
if errors != [], do: Mix.shell().info("")
Enum.each(errors, &print_diagnostic_error/1)
end
defp print_diagnostic_error(error) do
Mix.shell().info([severity(error.severity), error.message, location(error)])
end
defp location(error) do
if error.file != nil and error.file != "" do
pos = if error.position != nil, do: ":#{error.position}", else: ""
"\n #{error.file}#{pos}\n"
else
"\n"
end
end
defp severity(severity), do: [:bright, color(severity), "#{severity}: ", :reset]
defp color(:error), do: :red
defp color(:warning), do: :yellow
defp check(application, entries) do
Boundary.errors(application, entries)
|> Stream.map(&to_diagnostic_error/1)
|> Enum.sort_by(&{&1.file, &1.position})
rescue
e in Boundary.Error ->
[diagnostic(e.message, file: e.file, position: e.line)]
end
defp to_diagnostic_error({:unclassified_module, module}),
do: diagnostic("#{inspect(module)} is not included in any boundary", file: module_source(module))
defp to_diagnostic_error({:unknown_dep, dep}) do
diagnostic("unknown boundary #{inspect(dep.name)} is listed as a dependency",
file: Path.relative_to_cwd(dep.file),
position: dep.line
)
end
defp to_diagnostic_error({:check_in_false_dep, dep}) do
diagnostic("boundary #{inspect(dep.name)} can't be a dependency because it has check.in set to false",
file: Path.relative_to_cwd(dep.file),
position: dep.line
)
end
defp to_diagnostic_error({:forbidden_dep, dep}) do
diagnostic(
"#{inspect(dep.name)} can't be listed as a dependency because it's not a sibling, a parent, or a dep of some ancestor",
file: Path.relative_to_cwd(dep.file),
position: dep.line
)
end
defp to_diagnostic_error({:unknown_export, export}) do
diagnostic("unknown module #{inspect(export.name)} is listed as an export",
file: Path.relative_to_cwd(export.file),
position: export.line
)
end
defp to_diagnostic_error({:export_not_in_boundary, export}) do
diagnostic("module #{inspect(export.name)} can't be exported because it's not a part of this boundary",
file: Path.relative_to_cwd(export.file),
position: export.line
)
end
defp to_diagnostic_error({:cycle, cycle}) do
cycle = cycle |> Stream.map(&inspect/1) |> Enum.join(" -> ")
diagnostic("dependency cycle found:\n#{cycle}\n")
end
defp to_diagnostic_error({:unknown_boundary, info}) do
diagnostic("unknown boundary #{inspect(info.name)}",
file: Path.relative_to_cwd(info.file),
position: info.line
)
end
defp to_diagnostic_error({:cant_reclassify, info}) do
diagnostic("only mix task and protocol implementation can be reclassified",
file: Path.relative_to_cwd(info.file),
position: info.line
)
end
defp to_diagnostic_error({:invalid_reference, error}) do
reason =
case error.type do
:normal ->
"(references from #{inspect(error.from_boundary)} to #{inspect(error.to_boundary)} are not allowed)"
:runtime ->
"(runtime references from #{inspect(error.from_boundary)} to #{inspect(error.to_boundary)} are not allowed)"
:not_exported ->
module = inspect(error.reference.to)
"(module #{module} is not exported by its owner boundary #{inspect(error.to_boundary)})"
:invalid_external_dep_call ->
"(references from #{inspect(error.from_boundary)} to #{inspect(error.to_boundary)} are not allowed)"
end
message = "forbidden reference to #{inspect(error.reference.to)}\n #{reason}"
diagnostic(message, file: Path.relative_to_cwd(error.reference.file), position: error.reference.line)
end
defp to_diagnostic_error({:unknown_option, %{name: :ignore?, value: value} = data}) do
diagnostic(
"ignore?: #{value} is deprecated, use check: [in: #{not value}, out: #{not value}] instead",
file: Path.relative_to_cwd(data.file),
position: data.line
)
end
defp to_diagnostic_error({:unknown_option, data}) do
diagnostic("unknown option #{inspect(data.name)}",
file: Path.relative_to_cwd(data.file),
position: data.line
)
end
defp to_diagnostic_error({:deps_in_check_out_false, data}) do
diagnostic("deps can't be listed if check.out is set to false",
file: Path.relative_to_cwd(data.file),
position: data.line
)
end
defp to_diagnostic_error({:apps_in_check_out_false, data}) do
diagnostic("check apps can't be listed if check.out is set to false",
file: Path.relative_to_cwd(data.file),
position: data.line
)
end
defp to_diagnostic_error({:exports_in_check_in_false, data}) do
diagnostic("can't export modules if check.in is set to false",
file: Path.relative_to_cwd(data.file),
position: data.line
)
end
defp to_diagnostic_error({:invalid_type, data}) do
diagnostic("invalid type",
file: Path.relative_to_cwd(data.file),
position: data.line
)
end
defp to_diagnostic_error({:invalid_ignores, boundary}) do
diagnostic("can't disable checks in a sub-boundary",
file: Path.relative_to_cwd(boundary.file),
position: boundary.line
)
end
defp to_diagnostic_error({:ancestor_with_ignored_checks, boundary, ancestor}) do
diagnostic("sub-boundary inside a boundary with disabled checks (#{inspect(ancestor.name)})",
file: Path.relative_to_cwd(boundary.file),
position: boundary.line
)
end
defp module_source(module) do
module.module_info(:compile)
|> Keyword.fetch!(:source)
|> to_string()
|> Path.relative_to_cwd()
catch
_, _ -> ""
end
def diagnostic(message, opts \\ []) do
%Mix.Task.Compiler.Diagnostic{
compiler_name: "boundary",
details: nil,
file: "unknown",
message: message,
position: nil,
severity: :warning
}
|> Map.merge(Map.new(opts))
end
end
| 31.090674 | 125 | 0.67711 |
f7f7d1ff1fe562ddd8524085847d159428e0f0e5 | 984 | ex | Elixir | examples/ohai/login_handler.ex | jeffweiss/exirc | d56e1d93ef83d350606e489021a276f8a49e9480 | [
"MIT"
] | 147 | 2015-01-12T15:05:12.000Z | 2022-03-17T06:32:41.000Z | examples/ohai/login_handler.ex | jeffweiss/exirc | d56e1d93ef83d350606e489021a276f8a49e9480 | [
"MIT"
] | 74 | 2015-01-24T23:07:53.000Z | 2021-04-23T14:27:43.000Z | examples/ohai/login_handler.ex | jeffweiss/exirc | d56e1d93ef83d350606e489021a276f8a49e9480 | [
"MIT"
] | 51 | 2015-02-13T21:00:38.000Z | 2022-03-03T15:38:12.000Z | defmodule LoginHandler do
@moduledoc """
This is an example event handler that listens for login events and then
joins the appropriate channels. We actually need this because we can't
join channels until we've waited for login to complete. We could just
attempt to sleep until login is complete, but that's just hacky. This
as an event handler is a far more elegant solution.
"""
def start_link(client, channels) do
GenServer.start_link(__MODULE__, [client, channels])
end
def init([client, channels]) do
ExIRC.Client.add_handler client, self
{:ok, {client, channels}}
end
def handle_info(:logged_in, state = {client, channels}) do
debug "Logged in to server"
channels |> Enum.map(&ExIRC.Client.join client, &1)
{:noreply, state}
end
# Catch-all for messages you don't care about
def handle_info(_msg, state) do
{:noreply, state}
end
defp debug(msg) do
IO.puts IO.ANSI.yellow() <> msg <> IO.ANSI.reset()
end
end
| 29.818182 | 73 | 0.705285 |
f7f7d9295f798f1cf3a1f596c2ec406d99fafd09 | 183 | ex | Elixir | testData/org/elixir_lang/inspection/no_parentheses_strict_test_case/FunctionSpaceKeywordsInParentheses.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/inspection/no_parentheses_strict_test_case/FunctionSpaceKeywordsInParentheses.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/inspection/no_parentheses_strict_test_case/FunctionSpaceKeywordsInParentheses.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | function <error descr="unexpected parenthesis. If you are making a function call, do not insert spaces in between the function name and the opening parentheses.">(key: value)</error>
| 91.5 | 182 | 0.786885 |
f7f7e4dfff7d1f6730fad1b6fc7e45749f62ff0e | 17,894 | ex | Elixir | lib/elixir/lib/task/supervisor.ex | britto/elixir | 1f6e7093cff4b68dada60b924399bc8404d39a7e | [
"Apache-2.0"
] | 2 | 2020-06-02T18:00:28.000Z | 2021-12-10T03:21:42.000Z | lib/elixir/lib/task/supervisor.ex | britto/elixir | 1f6e7093cff4b68dada60b924399bc8404d39a7e | [
"Apache-2.0"
] | 1 | 2020-09-14T16:23:33.000Z | 2021-03-25T17:38:59.000Z | lib/elixir/lib/task/supervisor.ex | britto/elixir | 1f6e7093cff4b68dada60b924399bc8404d39a7e | [
"Apache-2.0"
] | null | null | null | defmodule Task.Supervisor do
@moduledoc """
A task supervisor.
This module defines a supervisor which can be used to dynamically
supervise tasks.
A task supervisor is started with no children, often under a
supervisor and a name:
children = [
{Task.Supervisor, name: MyApp.TaskSupervisor}
]
Supervisor.start_link(children, strategy: :one_for_one)
The options given in the child specification are documented in `start_link/1`.
See the `Task` module for more examples.
## Name registration
A `Task.Supervisor` is bound to the same name registration rules as a
`GenServer`. Read more about them in the `GenServer` docs.
"""
@typedoc "Option values used by `start_link`"
@type option ::
DynamicSupervisor.option()
# :permanent | :transient | :temporary here because :supervisor.restart() is not exported
| {:restart, :permanent | :transient | :temporary}
# :brutal_kill | timeout() here because :supervisor.shutdown() is not exported
| {:shutdown, :brutal_kill | timeout()}
@doc false
def child_spec(opts) when is_list(opts) do
id =
case Keyword.get(opts, :name, Task.Supervisor) do
name when is_atom(name) -> name
{:global, name} -> name
{:via, _module, name} -> name
end
%{
id: id,
start: {Task.Supervisor, :start_link, [opts]},
type: :supervisor
}
end
@doc """
Starts a new supervisor.
## Examples
A task supervisor is typically started under a supervision tree using
the tuple format:
{Task.Supervisor, name: MyApp.TaskSupervisor}
You can also start it by calling `start_link/1` directly:
Task.Supervisor.start_link(name: MyApp.TaskSupervisor)
But this is recommended only for scripting and should be avoided in
production code. Generally speaking, processes should always be started
inside supervision trees.
## Options
* `:name` - used to register a supervisor name, the supported values are
described under the `Name Registration` section in the `GenServer` module
docs;
* `:max_restarts`, `:max_seconds` and `:max_children` - as specified in
`DynamicSupervisor`;
This function could also receive `:restart` and `:shutdown` as options
but those two options have been deprecated and it is now preferred to
give them directly to `start_child` and `async`.
"""
@spec start_link([option]) :: Supervisor.on_start()
def start_link(options \\ []) do
{restart, options} = Keyword.pop(options, :restart)
{shutdown, options} = Keyword.pop(options, :shutdown)
if restart || shutdown do
IO.warn(
":restart and :shutdown options in Task.Supervisor.start_link/1 " <>
"are deprecated. Please pass those options on start_child/3 instead"
)
end
keys = [:max_children, :max_seconds, :max_restarts]
{sup_opts, start_opts} = Keyword.split(options, keys)
restart_and_shutdown = {restart || :temporary, shutdown || 5000}
DynamicSupervisor.start_link(__MODULE__, {restart_and_shutdown, sup_opts}, start_opts)
end
@doc false
def init({{_restart, _shutdown} = arg, options}) do
Process.put(__MODULE__, arg)
DynamicSupervisor.init([strategy: :one_for_one] ++ options)
end
@doc """
Starts a task that can be awaited on.
The `supervisor` must be a reference as defined in `Supervisor`.
The task will still be linked to the caller, see `Task.async/3` for
more information and `async_nolink/2` for a non-linked variant.
Raises an error if `supervisor` has reached the maximum number of
children.
## Options
* `:shutdown` - `:brutal_kill` if the tasks must be killed directly on shutdown
or an integer indicating the timeout value, defaults to 5000 milliseconds.
"""
@spec async(Supervisor.supervisor(), (() -> any), Keyword.t()) :: Task.t()
def async(supervisor, fun, options \\ []) do
async(supervisor, :erlang, :apply, [fun, []], options)
end
@doc """
Starts a task that can be awaited on.
The `supervisor` must be a reference as defined in `Supervisor`.
The task will still be linked to the caller, see `Task.async/3` for
more information and `async_nolink/2` for a non-linked variant.
Raises an error if `supervisor` has reached the maximum number of
children.
## Options
* `:shutdown` - `:brutal_kill` if the tasks must be killed directly on shutdown
or an integer indicating the timeout value, defaults to 5000 milliseconds.
"""
@spec async(Supervisor.supervisor(), module, atom, [term], Keyword.t()) :: Task.t()
def async(supervisor, module, fun, args, options \\ []) do
async(supervisor, :link, module, fun, args, options)
end
@doc """
Starts a task that can be awaited on.
The `supervisor` must be a reference as defined in `Supervisor`.
The task won't be linked to the caller, see `Task.async/3` for
more information.
Raises an error if `supervisor` has reached the maximum number of
children.
## Options
* `:shutdown` - `:brutal_kill` if the tasks must be killed directly on shutdown
or an integer indicating the timeout value, defaults to 5000 milliseconds.
## Compatibility with OTP behaviours
If you create a task using `async_nolink` inside an OTP behaviour
like `GenServer`, you should match on the message coming from the
task inside your `c:GenServer.handle_info/2` callback.
The reply sent by the task will be in the format `{ref, result}`,
where `ref` is the monitor reference held by the task struct
and `result` is the return value of the task function.
Keep in mind that, regardless of how the task created with `async_nolink`
terminates, the caller's process will always receive a `:DOWN` message
with the same `ref` value that is held by the task struct. If the task
terminates normally, the reason in the `:DOWN` message will be `:normal`.
## Examples
Typically, you use `async_nolink/3` when there is a reasonable expectation that
the task may fail, and you don't want it to take down the caller. Let's see an
example where a `GenServer` is meant to run a single task and track its status:
defmodule MyApp.Server do
use GenServer
# ...
def start_task do
GenServer.call(__MODULE__, :start_task)
end
# In this case the task is already running, so we just return :ok.
def handle_call(:start_task, _from, %{ref: ref} = state) when is_reference(ref) do
{:reply, :ok, state}
end
# The task is not running yet, so let's start it.
def handle_call(:start_task, _from, %{ref: nil} = state) do
task =
Task.Supervisor.async_nolink(MyApp.TaskSupervisor, fn ->
...
end)
# We return :ok and the server will continue running
{:reply, :ok, %{state | ref: task.ref}}
end
# The task completed successfully
def handle_info({ref, answer}, %{ref: ref} = state) do
# We don't care about the DOWN message now, so let's demonitor and flush it
Process.demonitor(ref, [:flush])
# Do something with the result and then return
{:noreply, %{state | ref: nil}}
end
# The task failed
def handle_info({:DOWN, ref, :process, _pid, _reason}, %{ref: ref} = state) do
# Log and possibly restart the task...
{:noreply, %{state | ref: nil}}
end
end
"""
@spec async_nolink(Supervisor.supervisor(), (() -> any), Keyword.t()) :: Task.t()
def async_nolink(supervisor, fun, options \\ []) do
async_nolink(supervisor, :erlang, :apply, [fun, []], options)
end
@doc """
Starts a task that can be awaited on.
The `supervisor` must be a reference as defined in `Supervisor`.
The task won't be linked to the caller, see `Task.async/3` for
more information.
Raises an error if `supervisor` has reached the maximum number of
children.
Note this function requires the task supervisor to have `:temporary`
as the `:restart` option (the default), as `async_nolink/4` keeps a
direct reference to the task which is lost if the task is restarted.
"""
@spec async_nolink(Supervisor.supervisor(), module, atom, [term], Keyword.t()) :: Task.t()
def async_nolink(supervisor, module, fun, args, options \\ []) do
async(supervisor, :nolink, module, fun, args, options)
end
@doc """
Returns a stream where the given function (`module` and `function`)
is mapped concurrently on each element in `enumerable`.
Each element will be prepended to the given `args` and processed by its
own task. The tasks will be spawned under the given `supervisor` and
linked to the current process, similarly to `async/4`.
When streamed, each task will emit `{:ok, value}` upon successful
completion or `{:exit, reason}` if the caller is trapping exits.
The order of results depends on the value of the `:ordered` option.
The level of concurrency and the time tasks are allowed to run can
be controlled via options (see the "Options" section below).
If you find yourself trapping exits to handle exits inside
the async stream, consider using `async_stream_nolink/6` to start tasks
that are not linked to the calling process.
## Options
* `:max_concurrency` - sets the maximum number of tasks to run
at the same time. Defaults to `System.schedulers_online/0`.
* `:ordered` - whether the results should be returned in the same order
as the input stream. This option is useful when you have large
streams and don't want to buffer results before they are delivered.
This is also useful when you're using the tasks for side effects.
Defaults to `true`.
* `:timeout` - the maximum amount of time to wait (in milliseconds)
without receiving a task reply (across all running tasks).
Defaults to `5000`.
* `:on_timeout` - what do to when a task times out. The possible
values are:
* `:exit` (default) - the process that spawned the tasks exits.
* `:kill_task` - the task that timed out is killed. The value
emitted for that task is `{:exit, :timeout}`.
* `:shutdown` - `:brutal_kill` if the tasks must be killed directly on shutdown
or an integer indicating the timeout value. Defaults to `5000` milliseconds.
## Examples
Let's build a stream and then enumerate it:
stream = Task.Supervisor.async_stream(MySupervisor, collection, Mod, :expensive_fun, [])
Enum.to_list(stream)
"""
@doc since: "1.4.0"
@spec async_stream(Supervisor.supervisor(), Enumerable.t(), module, atom, [term], keyword) ::
Enumerable.t()
def async_stream(supervisor, enumerable, module, function, args, options \\ [])
when is_atom(module) and is_atom(function) and is_list(args) do
build_stream(supervisor, :link, enumerable, {module, function, args}, options)
end
@doc """
Returns a stream that runs the given function `fun` concurrently
on each element in `enumerable`.
Each element in `enumerable` is passed as argument to the given function `fun`
and processed by its own task. The tasks will be spawned under the given
`supervisor` and linked to the current process, similarly to `async/2`.
See `async_stream/6` for discussion, options, and examples.
"""
@doc since: "1.4.0"
@spec async_stream(Supervisor.supervisor(), Enumerable.t(), (term -> term), keyword) ::
Enumerable.t()
def async_stream(supervisor, enumerable, fun, options \\ []) when is_function(fun, 1) do
build_stream(supervisor, :link, enumerable, fun, options)
end
@doc """
Returns a stream where the given function (`module` and `function`)
is mapped concurrently on each element in `enumerable`.
Each element in `enumerable` will be prepended to the given `args` and processed
by its own task. The tasks will be spawned under the given `supervisor` and
will not be linked to the current process, similarly to `async_nolink/4`.
See `async_stream/6` for discussion, options, and examples.
"""
@doc since: "1.4.0"
@spec async_stream_nolink(
Supervisor.supervisor(),
Enumerable.t(),
module,
atom,
[term],
keyword
) :: Enumerable.t()
def async_stream_nolink(supervisor, enumerable, module, function, args, options \\ [])
when is_atom(module) and is_atom(function) and is_list(args) do
build_stream(supervisor, :nolink, enumerable, {module, function, args}, options)
end
@doc """
Returns a stream that runs the given `function` concurrently on each
element in `enumerable`.
Each element in `enumerable` is passed as argument to the given function `fun`
and processed by its own task. The tasks will be spawned under the given
`supervisor` and will not be linked to the current process, similarly to `async_nolink/2`.
See `async_stream/6` for discussion and examples.
"""
@doc since: "1.4.0"
@spec async_stream_nolink(Supervisor.supervisor(), Enumerable.t(), (term -> term), keyword) ::
Enumerable.t()
def async_stream_nolink(supervisor, enumerable, fun, options \\ []) when is_function(fun, 1) do
build_stream(supervisor, :nolink, enumerable, fun, options)
end
@doc """
Terminates the child with the given `pid`.
"""
@spec terminate_child(Supervisor.supervisor(), pid) :: :ok | {:error, :not_found}
def terminate_child(supervisor, pid) when is_pid(pid) do
DynamicSupervisor.terminate_child(supervisor, pid)
end
@doc """
Returns all children PIDs.
"""
@spec children(Supervisor.supervisor()) :: [pid]
def children(supervisor) do
for {_, pid, _, _} <- DynamicSupervisor.which_children(supervisor), is_pid(pid), do: pid
end
@doc """
Starts a task as a child of the given `supervisor`.
Note that the spawned process is not linked to the caller, but
only to the supervisor. This command is useful in case the
task needs to perform side-effects (like I/O) and does not need
to report back to the caller.
## Options
* `:restart` - the restart strategy, may be `:temporary` (the default),
`:transient` or `:permanent`. `:temporary` means the task is never
restarted, `:transient` means it is restarted if the exit is not
`:normal`, `:shutdown` or `{:shutdown, reason}`. A `:permanent` restart
strategy means it is always restarted. It defaults to `:temporary`.
* `:shutdown` - `:brutal_kill` if the tasks must be killed directly on shutdown
or an integer indicating the timeout value, defaults to 5000 milliseconds.
"""
@spec start_child(Supervisor.supervisor(), (() -> any), keyword) ::
DynamicSupervisor.on_start_child()
def start_child(supervisor, fun, options \\ []) do
restart = options[:restart]
shutdown = options[:shutdown]
args = [get_owner(self()), get_callers(self()), {:erlang, :apply, [fun, []]}]
start_child_with_spec(supervisor, args, restart, shutdown)
end
@doc """
Starts a task as a child of the given `supervisor`.
Similar to `start_child/2` except the task is specified
by the given `module`, `fun` and `args`.
"""
@spec start_child(Supervisor.supervisor(), module, atom, [term], keyword) ::
DynamicSupervisor.on_start_child()
def start_child(supervisor, module, fun, args, options \\ [])
when is_atom(fun) and is_list(args) do
restart = options[:restart]
shutdown = options[:shutdown]
args = [get_owner(self()), get_callers(self()), {module, fun, args}]
start_child_with_spec(supervisor, args, restart, shutdown)
end
defp start_child_with_spec(supervisor, args, restart, shutdown) do
# TODO: This only exists because we need to support reading restart/shutdown
# from two different places. Remove this, the init function and the associated
# clause in DynamicSupervisor on Elixir v2.0
# TODO: Once we do this, we can also make it so the task arguments are never
# sent to the supervisor if the restart is temporary
GenServer.call(supervisor, {:start_task, args, restart, shutdown}, :infinity)
end
defp get_owner(pid) do
self_or_name =
case Process.info(pid, :registered_name) do
{:registered_name, name} when is_atom(name) -> name
_ -> pid
end
{node(), self_or_name, pid}
end
defp get_callers(owner) do
case :erlang.get(:"$callers") do
[_ | _] = list -> [owner | list]
_ -> [owner]
end
end
defp async(supervisor, link_type, module, fun, args, options) do
owner = self()
args = [get_owner(owner), get_callers(owner), :monitor, {module, fun, args}]
shutdown = options[:shutdown]
case start_child_with_spec(supervisor, args, :temporary, shutdown) do
{:ok, pid} ->
if link_type == :link, do: Process.link(pid)
ref = Process.monitor(pid)
send(pid, {owner, ref})
%Task{pid: pid, ref: ref, owner: owner}
{:error, :max_children} ->
raise """
reached the maximum number of tasks for this task supervisor. The maximum number \
of tasks that are allowed to run at the same time under this supervisor can be \
configured with the :max_children option passed to Task.Supervisor.start_link/1\
"""
end
end
defp build_stream(supervisor, link_type, enumerable, fun, options) do
shutdown = options[:shutdown]
&Task.Supervised.stream(enumerable, &1, &2, fun, options, fn [owner | _] = callers, mfa ->
args = [get_owner(owner), callers, :monitor, mfa]
case start_child_with_spec(supervisor, args, :temporary, shutdown) do
{:ok, pid} ->
if link_type == :link, do: Process.link(pid)
{:ok, link_type, pid}
{:error, :max_children} ->
{:error, :max_children}
end
end)
end
end
| 36.443992 | 99 | 0.677322 |
f7f8099cdab36e99519ef89e578c4920958865a9 | 1,588 | ex | Elixir | lib/dde_iotserver_liveview_web/views/error_helpers.ex | aslakjohansen/dde-iotserver-liveview | eaf063c366105da7ca30b55c6a7a7dd4505b0916 | [
"BSD-3-Clause"
] | null | null | null | lib/dde_iotserver_liveview_web/views/error_helpers.ex | aslakjohansen/dde-iotserver-liveview | eaf063c366105da7ca30b55c6a7a7dd4505b0916 | [
"BSD-3-Clause"
] | null | null | null | lib/dde_iotserver_liveview_web/views/error_helpers.ex | aslakjohansen/dde-iotserver-liveview | eaf063c366105da7ca30b55c6a7a7dd4505b0916 | [
"BSD-3-Clause"
] | null | null | null | defmodule DdeIotserverLiveviewWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn error ->
content_tag(:span, translate_error(error),
class: "invalid-feedback",
phx_feedback_for: input_name(form, field)
)
end)
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# When using gettext, we typically pass the strings we want
# to translate as a static argument:
#
# # Translate "is invalid" in the "errors" domain
# dgettext("errors", "is invalid")
#
# # Translate the number of files with plural rules
# dngettext("errors", "1 file", "%{count} files", count)
#
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
# This requires us to call the Gettext module passing our gettext
# backend as first argument.
#
# Note we use the "errors" domain, which means translations
# should be written to the errors.po file. The :count option is
# set by Ecto and indicates we should also apply plural rules.
if count = opts[:count] do
Gettext.dngettext(DdeIotserverLiveviewWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(DdeIotserverLiveviewWeb.Gettext, "errors", msg, opts)
end
end
end
| 33.083333 | 89 | 0.673804 |
f7f83b6b925a5d1124a77614b373c3e87f91cd67 | 1,815 | ex | Elixir | clients/big_query/lib/google_api/big_query/v2/connection.ex | chingor13/elixir-google-api | 85e13fa25c4c9f4618bb463ab4c79245fc6d2a7b | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/big_query/lib/google_api/big_query/v2/connection.ex | chingor13/elixir-google-api | 85e13fa25c4c9f4618bb463ab4c79245fc6d2a7b | [
"Apache-2.0"
] | null | null | null | clients/big_query/lib/google_api/big_query/v2/connection.ex | chingor13/elixir-google-api | 85e13fa25c4c9f4618bb463ab4c79245fc6d2a7b | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.BigQuery.V2.Connection do
@moduledoc """
Handle Tesla connections for GoogleApi.BigQuery.V2.
"""
@type t :: Tesla.Env.client()
use GoogleApi.Gax.Connection,
scopes: [
# View and manage your data in Google BigQuery
"https://www.googleapis.com/auth/bigquery",
# Insert data into Google BigQuery
"https://www.googleapis.com/auth/bigquery.insertdata",
# View and manage your data across Google Cloud Platform services
"https://www.googleapis.com/auth/cloud-platform",
# View your data across Google Cloud Platform services
"https://www.googleapis.com/auth/cloud-platform.read-only",
# Manage your data and permissions in Google Cloud Storage
"https://www.googleapis.com/auth/devstorage.full_control",
# View your data in Google Cloud Storage
"https://www.googleapis.com/auth/devstorage.read_only",
# Manage your data in Google Cloud Storage
"https://www.googleapis.com/auth/devstorage.read_write"
],
otp_app: :google_api_big_query,
base_url: "https://www.googleapis.com/"
end
| 35.588235 | 77 | 0.723967 |
f7f87a07a4873b1419275e9a9a337bc0f922850f | 2,199 | exs | Elixir | mix.exs | van-mronov/honeybadger-elixir | 4fa5b3ada7239f54a68d78c42a4da9bf2ece3e6c | [
"MIT"
] | null | null | null | mix.exs | van-mronov/honeybadger-elixir | 4fa5b3ada7239f54a68d78c42a4da9bf2ece3e6c | [
"MIT"
] | null | null | null | mix.exs | van-mronov/honeybadger-elixir | 4fa5b3ada7239f54a68d78c42a4da9bf2ece3e6c | [
"MIT"
] | null | null | null | defmodule Honeybadger.Mixfile do
use Mix.Project
def project do
[
app: :honeybadger,
version: "0.10.3",
elixir: "~> 1.3",
consolidate_protocols: Mix.env() != :test,
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
deps: deps(),
elixirc_paths: elixirc_paths(Mix.env()),
package: package(),
name: "Honeybadger",
homepage_url: "https://honeybadger.io",
source_url: "https://github.com/honeybadger-io/honeybadger-elixir",
description:
"Elixir client, Plug and error_logger for integrating with the Honeybadger.io exception tracker",
docs: [extras: ["README.md", "CHANGELOG.md"], main: "readme"]
]
end
# we use a non standard location for mix tasks as we don't want them to leak
# into the host apps mix tasks. This way our release task is shown only in our mix tasks
defp elixirc_paths(:dev), do: ["lib", "mix"]
defp elixirc_paths(_), do: ["lib"]
def application do
[
applications: [:hackney, :logger, :jason],
env: [
api_key: {:system, "HONEYBADGER_API_KEY"},
app: nil,
environment_name: Mix.env(),
exclude_envs: [:dev, :test],
origin: "https://api.honeybadger.io",
proxy: nil,
proxy_auth: {nil, nil},
use_logger: true,
notice_filter: Honeybadger.NoticeFilter.Default,
filter: Honeybadger.Filter.Default,
filter_keys: [:password, :credit_card],
filter_args: true,
filter_disable_url: false,
filter_disable_params: false,
filter_disable_session: false
],
mod: {Honeybadger, []}
]
end
defp deps do
[
{:hackney, "~> 1.1"},
{:jason, "~> 1.0"},
{:plug, ">= 1.0.0 and < 2.0.0", optional: true},
{:phoenix, ">= 1.0.0 and < 2.0.0", optional: true},
# Dev dependencies
{:ex_doc, "~> 0.7", only: :dev},
# Test dependencies
{:plug_cowboy, "~> 1.0", only: :test}
]
end
defp package do
[
licenses: ["MIT"],
maintainers: ["Joshua Wood"],
links: %{"GitHub" => "https://github.com/honeybadger-io/honeybadger-elixir"}
]
end
end
| 28.934211 | 105 | 0.58663 |
f7f8875ad227114a0b65dedec442e9cf66a6d591 | 1,023 | ex | Elixir | lib/high_score/endpoint.ex | dtcristo/high-score | 7dcd0a9aaf7cc89a7c268acc1ef4560fbf24c61c | [
"MIT"
] | 3 | 2015-11-16T10:27:50.000Z | 2015-11-25T02:22:57.000Z | lib/high_score/endpoint.ex | dtcristo/high-score | 7dcd0a9aaf7cc89a7c268acc1ef4560fbf24c61c | [
"MIT"
] | null | null | null | lib/high_score/endpoint.ex | dtcristo/high-score | 7dcd0a9aaf7cc89a7c268acc1ef4560fbf24c61c | [
"MIT"
] | null | null | null | defmodule HighScore.Endpoint do
use Phoenix.Endpoint, otp_app: :high_score
socket "/socket", HighScore.UserSocket
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phoenix.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/", from: :high_score, gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Logger
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Poison
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session,
store: :cookie,
key: "_high_score_key",
signing_salt: "lqb2bsB3"
plug HighScore.Router
end
| 25.575 | 69 | 0.713587 |
f7f8a474a6dcc79194fac61d39e130ee6e3cf93d | 4,915 | exs | Elixir | exercise_2.29.exs | bschmeck/sicp-elixir | 4e6f959f506d0cb4e2692177cbdad1c87779cf7d | [
"MIT"
] | 2 | 2019-08-26T13:17:27.000Z | 2020-09-24T13:16:07.000Z | exercise_2.29.exs | bschmeck/sicp-elixir | 4e6f959f506d0cb4e2692177cbdad1c87779cf7d | [
"MIT"
] | null | null | null | exercise_2.29.exs | bschmeck/sicp-elixir | 4e6f959f506d0cb4e2692177cbdad1c87779cf7d | [
"MIT"
] | null | null | null | defmodule BinaryMobile do
def make(left, right), do: [left | right]
def left(mobile), do: hd mobile
def right(mobile), do: tl mobile
def total_weight(mobile), do: Branch.weight(left(mobile)) + Branch.weight(right(mobile))
def balanced?(n) when is_number(n), do: true
def balanced?(mobile) do
Branch.torque(left(mobile)) == Branch.torque(right(mobile)) &&
balanced?(Branch.structure(left(mobile))) &&
balanced?(Branch.structure(right(mobile)))
end
end
defmodule Branch do
def make(len, structure), do: [len, structure]
def len(branch), do: hd branch
def structure(branch), do: hd tl branch
def weight(branch) do
case structure(branch) do
n when is_number(n) -> n
mobile -> BinaryMobile.total_weight(mobile)
end
end
def torque(branch) do
len(branch) * weight(branch)
end
end
ExUnit.start
defmodule BinaryMobileTests do
use ExUnit.Case, async: true
test "mobiles have selectors" do
left = Branch.make(1, 1)
right = Branch.make(2, 2)
mobile = BinaryMobile.make(left, right)
assert BinaryMobile.left(mobile) == left
assert BinaryMobile.right(mobile) == right
end
test "branches have selectors" do
branch = Branch.make(5, 10)
assert Branch.len(branch) == 5
assert Branch.structure(branch) == 10
end
test "branches have weight" do
branch = Branch.make(5, 10)
assert Branch.weight(branch) == 10
end
test "branches with sub-mobiles have weight" do
subbranch = Branch.make(1, 1)
mobile = BinaryMobile.make(subbranch, subbranch)
branch = Branch.make(5, mobile)
assert Branch.weight(branch) == BinaryMobile.total_weight(mobile)
end
test "a mobile's weight is the weight of its branches" do
left = Branch.make(1, 1)
right = Branch.make(2, 2)
mobile = BinaryMobile.make(left, right)
assert BinaryMobile.total_weight(mobile) == Branch.weight(left) + Branch.weight(right)
end
test "the weight of nested mobiles is computed correctly" do
b1 = Branch.make(1, 1)
b2 = Branch.make(2, 2)
left_mobile = BinaryMobile.make(b1, b2)
left = Branch.make(10, left_mobile)
sub_mobile1 = BinaryMobile.make(b1, b1)
sub_mobile2 = BinaryMobile.make(b2, b2)
left_subbranch = Branch.make(5, sub_mobile1)
right_subbranch = Branch.make(3, sub_mobile2)
right_mobile = BinaryMobile.make(left_subbranch, right_subbranch)
right = Branch.make(10, right_mobile)
mobile = BinaryMobile.make(left, right)
assert BinaryMobile.total_weight(mobile) == 3 * Branch.weight(b1) + 3 * Branch.weight(b2)
end
test "symmetric mobile is balanced" do
b = Branch.make 1, 1
mobile = BinaryMobile.make b, b
assert BinaryMobile.balanced?(mobile)
end
test "asymmetric mobile is not balanced" do
left = Branch.make 1, 1
right = Branch.make 2, 2
mobile = BinaryMobile.make left, right
refute BinaryMobile.balanced?(mobile)
end
test "a mobile with branches of equal torque is balanced" do
left = Branch.make 1, 4
right = Branch.make 2, 2
mobile = BinaryMobile.make left, right
assert BinaryMobile.balanced? mobile
end
test "a mobile with balanced branches of equal torque is balanced" do
b1 = Branch.make 1, 4
b2 = Branch.make 2, 2
left_mobile = BinaryMobile.make b1, b2
left = Branch.make 10, left_mobile
b3 = Branch.make 1, 8
b4 = Branch.make 2, 4
right_mobile = BinaryMobile.make b3, b4
right = Branch.make 5, right_mobile
mobile = BinaryMobile.make left, right
assert BinaryMobile.balanced? mobile
end
test "a mobile with balanced branches of unequal torque is not balanced" do
b1 = Branch.make 1, 4
b2 = Branch.make 2, 2
left_mobile = BinaryMobile.make b1, b2
left = Branch.make 10, left_mobile
b3 = Branch.make 1, 8
b4 = Branch.make 2, 4
right_mobile = BinaryMobile.make b3, b4
right = Branch.make 10, right_mobile
mobile = BinaryMobile.make left, right
refute BinaryMobile.balanced? mobile
end
test "a mobile with unbalanced branches of equal torque is not balanced" do
b1 = Branch.make 1, 4
b2 = Branch.make 1, 2
left_mobile = BinaryMobile.make b1, b2
left = Branch.make 10, left_mobile
b3 = Branch.make 1, 8
b4 = Branch.make 2, 4
right_mobile = BinaryMobile.make b3, b4
right = Branch.make 5, right_mobile
mobile = BinaryMobile.make left, right
refute BinaryMobile.balanced? mobile
end
test "a mobile with unbalanced branches of unequal weight is not balanced" do
b1 = Branch.make 1, 4
b2 = Branch.make 1, 2
left_mobile = BinaryMobile.make b1, b2
left = Branch.make 10, left_mobile
b3 = Branch.make 1, 8
b4 = Branch.make 2, 4
right_mobile = BinaryMobile.make b3, b4
right = Branch.make 10, right_mobile
mobile = BinaryMobile.make left, right
refute BinaryMobile.balanced? mobile
end
end
| 29.08284 | 93 | 0.686267 |
f7f8d1229f2edfc5b232595928f2372ad1218241 | 3,548 | exs | Elixir | test/scenic/view_port/tables_test.exs | tiger808/scenic | 77abc6d891b7a1a9262cdc47d7c5fac3c8609d1f | [
"Apache-2.0"
] | 1,716 | 2018-09-07T21:55:43.000Z | 2022-03-31T16:16:30.000Z | test/scenic/view_port/tables_test.exs | tiger808/scenic | 77abc6d891b7a1a9262cdc47d7c5fac3c8609d1f | [
"Apache-2.0"
] | 220 | 2018-09-08T01:28:00.000Z | 2022-03-22T03:55:17.000Z | test/scenic/view_port/tables_test.exs | tiger808/scenic | 77abc6d891b7a1a9262cdc47d7c5fac3c8609d1f | [
"Apache-2.0"
] | 137 | 2018-09-07T21:55:56.000Z | 2022-03-26T04:07:27.000Z | #
# Created by Boyd Multerer on 2018-08-22.
# Copyright © 2018 Kry10 Industries. All rights reserved.
#
# ==============================================================================
defmodule Scenic.ViewPort.TablesTest do
use ExUnit.Case, async: false
doctest Scenic.ViewPort.Tables
alias Scenic.Graph
alias Scenic.ViewPort.Tables
import Scenic.Primitives
# ets table names
@ets_graphs_table :_scenic_graphs_table_
@graph Graph.build()
|> text("Main Graph")
@graph_1 Graph.build()
|> text("Second Graph")
# --------------------------------------------------------
setup do
{:ok, svc} = Tables.start_link(nil)
on_exit(fn -> Process.exit(svc, :normal) end)
%{svc: svc}
end
# ============================================================================
# integration style tests
test "integration style test" do
{:ok, agent_0} = Agent.start(fn -> 1 + 1 end)
{:ok, agent_1} = Agent.start(fn -> 1 + 1 end)
scene_ref = make_ref()
graph_key = {:graph, scene_ref, 123}
registration = {self(), agent_0, agent_1}
# register
Tables.register_scene(scene_ref, registration)
# is an async cast, so sleep to let it run
Process.sleep(100)
# confirm the registration by checking the scene
assert Tables.get_scene_pid(scene_ref) == {:ok, self()}
assert Tables.get_scene_pid(graph_key) == {:ok, self()}
# insert a graph
Tables.insert_graph(graph_key, self(), @graph, [])
# not subscribed, so confirm no event received - also gives it time to process
refute_receive({:"$gen_cast", {:update_graph, {:graph, ^scene_ref, 123}}})
assert :ets.lookup(@ets_graphs_table, graph_key) == [{graph_key, self(), @graph, []}]
assert Tables.get_graph(graph_key) == {:ok, @graph}
assert Tables.get_refs(graph_key) == {:ok, []}
assert Tables.get_graph_refs(graph_key) == {:ok, @graph, []}
# subscribe to the graph_key
Tables.subscribe(graph_key, self())
# update the graph
Tables.insert_graph(graph_key, self(), @graph_1, [])
# subscribed. confirm event received - also gives it time to process
assert_receive({:"$gen_cast", {:update_graph, {:graph, ^scene_ref, 123}}})
assert :ets.lookup(@ets_graphs_table, graph_key) == [{graph_key, self(), @graph_1, []}]
assert Tables.get_graph(graph_key) == {:ok, @graph_1}
# unsubscribe to the graph_key
Tables.unsubscribe(graph_key, self())
# confirm unsubscripted
Tables.insert_graph(graph_key, self(), @graph, [])
# not subscribed, so confirm no event received - also gives it time to process
refute_receive({:"$gen_cast", {:update_graph, {:graph, ^scene_ref, 123}}})
assert :ets.lookup(@ets_graphs_table, graph_key) == [{graph_key, self(), @graph, []}]
# subscribe to the graph_key again
Tables.subscribe(graph_key, self())
# update the graph
Tables.insert_graph(graph_key, self(), @graph_1, [])
# subscribed. confirm event received - also gives it time to process
assert_receive({:"$gen_cast", {:update_graph, {:graph, ^scene_ref, 123}}})
assert :ets.lookup(@ets_graphs_table, graph_key) == [{graph_key, self(), @graph_1, []}]
assert Tables.get_graph(graph_key) == {:ok, @graph_1}
# delete the graph
Tables.delete_graph(graph_key)
assert_receive({:"$gen_cast", {:delete_graph, {:graph, ^scene_ref, 123}}})
assert :ets.lookup(@ets_graphs_table, graph_key) == []
assert Tables.get_graph(graph_key) == nil
Agent.stop(agent_0)
Agent.stop(agent_1)
end
end
| 35.838384 | 91 | 0.62655 |
f7f8edc171b913ef2f9e8385076fd3708f7656c6 | 1,344 | ex | Elixir | clients/health_care/lib/google_api/health_care/v1beta1/model/export_dicom_data_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/health_care/lib/google_api/health_care/v1beta1/model/export_dicom_data_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/health_care/lib/google_api/health_care/v1beta1/model/export_dicom_data_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.HealthCare.V1beta1.Model.ExportDicomDataResponse do
@moduledoc """
Returns additional information in regards to a completed DICOM store export.
## Attributes
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{}
end
defimpl Poison.Decoder, for: GoogleApi.HealthCare.V1beta1.Model.ExportDicomDataResponse do
def decode(value, options) do
GoogleApi.HealthCare.V1beta1.Model.ExportDicomDataResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.HealthCare.V1beta1.Model.ExportDicomDataResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32 | 90 | 0.773065 |
f7f8f19a481777ee644b9d451f90dc9e558301c4 | 3,736 | ex | Elixir | lib/surface/catalogue/components/prop_input.ex | treebee/surface_catalogue | fc938a81deef099e88a977d5de62c3f0e86197b8 | [
"MIT"
] | null | null | null | lib/surface/catalogue/components/prop_input.ex | treebee/surface_catalogue | fc938a81deef099e88a977d5de62c3f0e86197b8 | [
"MIT"
] | null | null | null | lib/surface/catalogue/components/prop_input.ex | treebee/surface_catalogue | fc938a81deef099e88a977d5de62c3f0e86197b8 | [
"MIT"
] | null | null | null | defmodule Surface.Catalogue.Components.PropInput do
@moduledoc false
use Surface.Component
alias Surface.Components.Form.{TextInput, Checkbox, Select, NumberInput}
prop prop, :map
prop value, :any
def render(assigns) do
~H"""
<div class="field is-horizontal">
<div class="field-label is-small">
<label class="label">{{ label(@prop) }}</label>
</div>
<div class="field-body">
<div class="field" style="display:flex; align-items:center;">
<div class="control" style="width: 400px;">
{{ input(assigns) }}
</div>
</div>
</div>
</div>
"""
end
defp label(prop) do
required_str = if prop.opts[:required], do: "*", else: ""
"#{prop.name}#{required_str}"
end
defp input(%{prop: prop, value: value} = assigns) do
case {prop.type, get_choices(prop)} do
{:boolean, _} ->
~H"""
<Checkbox field={{ prop.name }} value={{ value }} opts={{ style: "height: 26px;" }}/>
"""
{:string, []} ->
~H"""
<TextInput
field={{ prop.name }}
value={{ value }}
class="input is-small"
opts={{ placeholder: value == nil && "nil", phx_keydown: "text_prop_keydown", phx_value_prop: prop.name }}
/>
"""
{:string, choices} ->
~H"""
<div class="select is-small">
<Select field={{ prop.name }} options={{ choices }} selected={{ value }}/>
</div>
"""
{:atom, []} ->
~H"""
<TextInput
field={{ prop.name }}
value={{ value_to_string(value) }}
class="input is-small"
opts={{ placeholder: value == nil && "nil" }}
/>
"""
{:atom, choices} ->
choices = Enum.map(choices, fn {k, v} -> {inspect(k), inspect(v)} end)
~H"""
<div class="select is-small">
<Select field={{ prop.name }} options={{ choices }} selected={{ value_to_string(value) }}/>
</div>
"""
{:css_class, _} ->
~H"""
<TextInput
field={{ prop.name }}
value={{ css_value_to_string(value) }}
class="input is-small"
opts={{ placeholder: value == nil && "nil", phx_keydown: "text_prop_keydown", phx_value_prop: prop.name }}
/>
"""
{:integer, []} ->
~H"""
<NumberInput
field={{ prop.name }}
value={{ value }}
class="input is-small"
opts={{ placeholder: value == nil && "nil" }}
/>
"""
{:integer, choices} ->
~H"""
<div class="select is-small">
<Select field={{ prop.name }} options={{ choices }} selected={{ value }}/>
</div>
"""
{type, []} when type in [:list, :keyword] ->
~H"""
<TextInput
field={{ prop.name }}
value={{ value_to_string(value) }}
class="input is-small"
opts={{ placeholder: value == nil && "nil", phx_keydown: "text_prop_keydown", phx_value_prop: prop.name }}
/>
"""
{type, _} ->
~H"""
<span class="is-size-7">
[editor not available for type <b>{{ inspect(type) }}</b>]
</span>
"""
end
end
defp value_to_string(nil), do: nil
defp value_to_string(value), do: inspect(value)
defp css_value_to_string(nil), do: nil
defp css_value_to_string(value), do: Enum.join(value, " ")
defp get_choices(prop) do
values =
prop.opts
|> Keyword.get(:values, [])
|> Enum.map(&{&1, &1})
cond do
values == [] -> []
prop.opts[:required] -> values
true -> [{"nil", "__NIL__"} | values]
end
end
end
| 26.309859 | 116 | 0.49652 |
f7f9337a66fd054efea6376b6f622c01d2856e3a | 991 | exs | Elixir | projects/api/priv/repo/migrations/20171205225247_add_stories_table.exs | strattadb/margaret | dde5d7b42f6d9b4d320069a0117136dae03b13b5 | [
"MIT"
] | 82 | 2017-11-06T01:00:55.000Z | 2020-12-09T10:35:29.000Z | projects/api/priv/repo/migrations/20171205225247_add_stories_table.exs | dbstratta/margaret | dde5d7b42f6d9b4d320069a0117136dae03b13b5 | [
"MIT"
] | 98 | 2017-11-06T22:57:32.000Z | 2020-07-03T04:46:39.000Z | projects/api/priv/repo/migrations/20171205225247_add_stories_table.exs | strattadb/margaret | dde5d7b42f6d9b4d320069a0117136dae03b13b5 | [
"MIT"
] | 10 | 2017-11-16T05:31:58.000Z | 2020-10-29T18:02:35.000Z | defmodule Margaret.Repo.Migrations.AddStoriesTable do
@moduledoc false
use Ecto.Migration
@doc false
def change do
Margaret.Stories.Story.StoryAudience.create_type()
Margaret.Stories.Story.StoryLicense.create_type()
create table(:stories) do
add(:content, :map, null: false)
add(:author_id, references(:users, on_delete: :delete_all), null: false)
add(:unique_hash, :string, size: 32, null: false)
add(:audience, :story_audience, null: false)
add(:published_at, :naive_datetime)
add(:publication_id, references(:publications, on_delete: :nilify_all))
add(:license, :story_license, null: false, default: "all_rights_reserved")
timestamps()
end
create(unique_index(:stories, [:unique_hash]))
create(index(:stories, [:author_id]))
create(
index(
:stories,
[:publication_id],
where: "publication_id is not null",
name: :publication_stories
)
)
end
end
| 25.410256 | 80 | 0.665994 |
f7f948fa10437ccce6f223e38f38b66ab84ad741 | 1,046 | exs | Elixir | mix.exs | venndr/redbird | a17acf180083fb4190b2b4eefa8f04c886f5dbfc | [
"MIT"
] | null | null | null | mix.exs | venndr/redbird | a17acf180083fb4190b2b4eefa8f04c886f5dbfc | [
"MIT"
] | null | null | null | mix.exs | venndr/redbird | a17acf180083fb4190b2b4eefa8f04c886f5dbfc | [
"MIT"
] | null | null | null | defmodule Redbird.Mixfile do
use Mix.Project
@version "0.7.1"
def project do
[
app: :redbird,
build_embedded: Mix.env() == :prod,
deps: deps(),
elixir: "~> 1.9",
elixirc_paths: elixirc_paths(Mix.env()),
start_permanent: Mix.env() == :prod,
version: @version,
package: [
maintainers: ["anellis", "drapergeek"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/thoughtbot/redbird"}
],
description: "A Redis adapter for Plug.Session",
source_url: "https://github.com/thoughtbot/redbird",
docs: [extras: ["README.md"], main: "readme"]
]
end
def application do
[
extra_applications: [:logger],
mod: {Redbird, []}
]
end
defp deps do
[
{:ex_doc, "~> 0.19", only: :dev},
{:mock, "~> 0.3", only: :test},
{:redix, "~> 1.0 and < 2.0.0"},
{:plug, "~> 1.12"}
]
end
defp elixirc_paths(:test),
do: ["lib", "test/support"]
defp elixirc_paths(_),
do: ["lib"]
end
| 22.255319 | 69 | 0.533461 |
f7f9a0a4b21cfbf7024574b4e5d4c2845bf33704 | 480 | ex | Elixir | lib/live_sup/core/widgets/team_members/worker.ex | livesup-dev/livesup | eaf9ffc78d3043bd9e3408f0f4df26ed16eb8446 | [
"Apache-2.0",
"MIT"
] | null | null | null | lib/live_sup/core/widgets/team_members/worker.ex | livesup-dev/livesup | eaf9ffc78d3043bd9e3408f0f4df26ed16eb8446 | [
"Apache-2.0",
"MIT"
] | 3 | 2022-02-23T15:51:48.000Z | 2022-03-14T22:52:43.000Z | lib/live_sup/core/widgets/team_members/worker.ex | livesup-dev/livesup | eaf9ffc78d3043bd9e3408f0f4df26ed16eb8446 | [
"Apache-2.0",
"MIT"
] | null | null | null | defmodule LiveSup.Core.Widgets.TeamMembers.Worker do
use LiveSup.Core.Widgets.WidgetServer
alias LiveSup.Core.Widgets.TeamMembers.Handler
alias LiveSup.Schemas.WidgetInstance
@default_title "Team Members"
@impl true
def public_settings, do: ["team"]
@impl true
def settings_keys, do: ["team"]
@impl true
def build_data(settings, _context) do
settings
|> Handler.get_data()
end
@impl true
def default_title() do
@default_title
end
end
| 18.461538 | 52 | 0.727083 |
f7f9a98c9987347e719969a8be6a7ea5e89677c3 | 892 | exs | Elixir | priv/repo/migrations/20210409222038_create_users_auth_tables.exs | Arvandazr/zaryn | 748805297b399358d28fbcb7ced7588e40f90f03 | [
"Apache-2.0"
] | 1 | 2020-01-04T11:24:44.000Z | 2020-01-04T11:24:44.000Z | priv/repo/migrations/20210409222038_create_users_auth_tables.exs | Arvandazr/zaryn | 748805297b399358d28fbcb7ced7588e40f90f03 | [
"Apache-2.0"
] | null | null | null | priv/repo/migrations/20210409222038_create_users_auth_tables.exs | Arvandazr/zaryn | 748805297b399358d28fbcb7ced7588e40f90f03 | [
"Apache-2.0"
] | null | null | null | defmodule Zaryn.Repo.Migrations.CreateUsersAuthTables do
use Ecto.Migration
def change do
execute "CREATE EXTENSION IF NOT EXISTS citext", ""
create table(:users, primary_key: false) do
add(:id, :uuid, primary_key: true)
add :email, :citext, null: false
add :hashed_password, :string, null: false
add :confirmed_at, :naive_datetime
timestamps()
end
create unique_index(:users, [:email])
create table(:users_tokens, primary_key: false) do
add(:id, :uuid, primary_key: true)
add :user_id, references(:users, type: :uuid, on_delete: :delete_all), null: false
add :token, :binary, null: false
add :context, :string, null: false
add :sent_to, :string
timestamps(updated_at: false)
end
create index(:users_tokens, [:user_id])
create unique_index(:users_tokens, [:context, :token])
end
end
| 29.733333 | 88 | 0.669283 |
f7f9b55f5475e3943a61aa3351d2eb7c1bc6a368 | 3,376 | ex | Elixir | lib/pixel_font/table_source/otf_layout/chained_sequence_context_1.ex | Dalgona/pixel_font | 6a65bf85e5228296eb29fddbfdd690565767ff76 | [
"MIT"
] | 17 | 2020-09-14T15:25:38.000Z | 2022-03-05T17:14:24.000Z | lib/pixel_font/table_source/otf_layout/chained_sequence_context_1.ex | Dalgona/pixel_font | 6a65bf85e5228296eb29fddbfdd690565767ff76 | [
"MIT"
] | 1 | 2021-08-19T05:05:37.000Z | 2021-08-19T05:05:37.000Z | lib/pixel_font/table_source/otf_layout/chained_sequence_context_1.ex | Dalgona/pixel_font | 6a65bf85e5228296eb29fddbfdd690565767ff76 | [
"MIT"
] | null | null | null | defmodule PixelFont.TableSource.OTFLayout.ChainedSequenceContext1 do
require PixelFont.Util, as: Util
import Util, only: :macros
alias PixelFont.Glyph
alias PixelFont.TableSource.GPOSGSUB
alias PixelFont.TableSource.OTFLayout.GlyphCoverage
alias PixelFont.TableSource.OTFLayout.Lookup
defstruct rulesets: %{}
@type t :: %__MODULE__{rulesets: rulesets()}
@type rulesets :: %{optional(Glyph.id()) => ruleset()}
@type ruleset :: [rule()]
@type rule :: %{
backtrack: [Glyph.id()],
input: [Glyph.id()],
lookahead: [Glyph.id()],
lookup_records: [{integer(), Lookup.id()}]
}
@spec compile(t(), keyword()) :: binary()
def compile(%__MODULE__{rulesets: rulesets}, opts) do
lookup_indices = opts[:lookup_indices]
ruleset_count = map_size(rulesets)
{glyphs, rulesets} =
rulesets
|> Enum.map(fn {glyph_id, rules} -> {gid!(glyph_id), rules} end)
|> Enum.sort_by(&elem(&1, 0))
|> Enum.unzip()
coverage = glyphs |> GlyphCoverage.of() |> GlyphCoverage.compile(internal: true)
coverage_offset = 6 + ruleset_count * 2
offset_base = coverage_offset + byte_size(coverage)
{_, offsets, compiled_rulesets} =
Util.offsetted_binaries(rulesets, offset_base, &compile_ruleset(&1, lookup_indices))
IO.iodata_to_binary([
# format
<<1::16>>,
# coverageOffset
<<coverage_offset::16>>,
# chainedSeqRuleSetCount
<<ruleset_count::16>>,
# chainedSeqRuleSetOffsets[]
offsets,
# Coverage table
coverage,
# Chained sequence ruleset tables
compiled_rulesets
])
end
@spec compile_ruleset(ruleset(), GPOSGSUB.lookup_indices()) :: iodata()
defp compile_ruleset(rules, lookup_indices) do
rule_count = length(rules)
rule_offset_base = 2 + rule_count * 2
{_, offsets, compiled_rules} =
Util.offsetted_binaries(rules, rule_offset_base, &compile_rule(&1, lookup_indices))
[
# chainedSeqRuleCount
<<rule_count::16>>,
# chainedSeqRuleOffsets[]
offsets,
# Sequence rule tables
compiled_rules
]
end
@spec compile_rule(rule(), GPOSGSUB.lookup_indices()) :: iodata()
defp compile_rule(rule, lookup_indices) do
compiled_lookup_records =
Enum.map(rule.lookup_records, fn {glyph_pos, lookup_id} ->
<<glyph_pos::16, lookup_indices[lookup_id]::16>>
end)
[
# backtrackGlyphCount
<<length(rule.backtrack)::16>>,
# backtrackSequence[]
Enum.map(rule.backtrack, &<<gid!(&1)::16>>),
# inputGlyphCount
<<length(rule.input) + 1::16>>,
# inputSequence[]
Enum.map(rule.input, &<<gid!(&1)::16>>),
# lookaheadGlyphCount
<<length(rule.lookahead)::16>>,
# lookaheadSequence[]
Enum.map(rule.lookahead, &<<gid!(&1)::16>>),
# seqLookupCount
<<length(compiled_lookup_records)::16>>,
# seqLookupRecords[]
compiled_lookup_records
]
end
defimpl PixelFont.TableSource.GPOS.Subtable do
alias PixelFont.TableSource.OTFLayout.ChainedSequenceContext1
defdelegate compile(subtable, opts), to: ChainedSequenceContext1
end
defimpl PixelFont.TableSource.GSUB.Subtable do
alias PixelFont.TableSource.OTFLayout.ChainedSequenceContext1
defdelegate compile(subtable, opts), to: ChainedSequenceContext1
end
end
| 29.876106 | 90 | 0.658768 |
f7f9ea3fcd5f92b7e8003bbbbd103fb265f18449 | 1,626 | exs | Elixir | router_light_ui/mix.exs | itwasscience/router_light | 2c25643e43f8b670fbd1975d2eabddd3d9c3fd79 | [
"MIT"
] | 1 | 2021-04-26T12:35:03.000Z | 2021-04-26T12:35:03.000Z | router_light_ui/mix.exs | itwasscience/router_light | 2c25643e43f8b670fbd1975d2eabddd3d9c3fd79 | [
"MIT"
] | null | null | null | router_light_ui/mix.exs | itwasscience/router_light | 2c25643e43f8b670fbd1975d2eabddd3d9c3fd79 | [
"MIT"
] | null | null | null | defmodule RouterLightUi.MixProject do
use Mix.Project
def project do
[
app: :router_light_ui,
version: "0.1.0",
elixir: "~> 1.7",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {RouterLightUi.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.5.8"},
{:phoenix_live_view, "~> 0.15.1"},
{:floki, ">= 0.27.0", only: :test},
{:phoenix_html, "~> 2.11"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:phoenix_live_dashboard, "~> 0.4"},
{:telemetry_metrics, "~> 0.4"},
{:telemetry_poller, "~> 0.4"},
{:gettext, "~> 0.11"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to install project dependencies and perform other setup tasks, run:
#
# $ mix setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
setup: ["deps.get", "cmd npm install --prefix assets"]
]
end
end
| 26.225806 | 84 | 0.590406 |
f7fa04438ac5171f17a2c6d5a4fcecba3fe92284 | 2,838 | ex | Elixir | web/controllers/image_controller.ex | allen-garvey/artour | fce27b234d11a3e434c897b5fa3178b7c126245f | [
"MIT"
] | 4 | 2019-10-04T16:11:15.000Z | 2021-08-18T21:00:13.000Z | apps/artour/web/controllers/image_controller.ex | allen-garvey/phoenix-umbrella | 1d444bbd62a5e7b5f51d317ce2be71ee994125d5 | [
"MIT"
] | 5 | 2020-03-16T23:52:25.000Z | 2021-09-03T16:52:17.000Z | apps/artour/web/controllers/image_controller.ex | allen-garvey/phoenix-umbrella | 1d444bbd62a5e7b5f51d317ce2be71ee994125d5 | [
"MIT"
] | null | null | null | defmodule Artour.ImageController do
use Artour.Web, :controller
alias Artour.Image
alias Artour.Admin
def index(conn, _params) do
images = Admin.list_images()
view = view_module(conn)
put_view(conn, Artour.SharedView) |>
render("index.html", items: images, item_name_singular: "image", column_headings: view.attribute_names_short(), item_view: view,
row_values_func_name: :attribute_values_short)
end
def import_images(conn, _params) do
render(conn, "import.html", csrf_token: get_csrf_token())
end
def new(conn, _params) do
changeset = Admin.change_image(%Image{})
formats = Artour.Format.form_list(Repo)
render(conn, "new.html", changeset: changeset, formats: formats)
end
def create(conn, %{"image" => image_params, "form_submit_type" => submit_type}) do
changeset = Image.changeset(%Image{}, image_params)
case Repo.insert(changeset) do
{:ok, image} ->
if submit_type == "add_another" do
changeset = Image.changeset(%Image{format_id: image.format_id, completion_date: image.completion_date})
formats = Artour.Format.form_list(Repo)
conn
|> put_flash(:info, Artour.ImageView.display_name(image) <> " saved.")
|> render("new.html", changeset: changeset, formats: formats)
else
conn
|> put_flash(:info, "Image created successfully.")
|> redirect(to: image_path(conn, :index))
end
{:error, changeset} ->
formats = Artour.Format.form_list(Repo)
render(conn, "new.html", changeset: changeset, formats: formats)
end
end
def show(conn, %{"id" => id}) do
image = Admin.get_image!(id)
render(conn, "show.html", image: image)
end
def edit(conn, %{"id" => id}) do
image = Repo.get!(Image, id)
changeset = Image.changeset(image)
formats = Artour.Format.form_list(Repo)
render(conn, "edit.html", image: image, changeset: changeset, formats: formats)
end
def update(conn, %{"id" => id, "image" => image_params}) do
image = Repo.get!(Image, id)
case Admin.update_image(image, image_params) do
{:ok, image} ->
conn
|> put_flash(:info, "Image updated successfully.")
|> redirect(to: image_path(conn, :show, image))
{:error, changeset} ->
formats = Artour.Format.form_list(Repo)
render(conn, "edit.html", image: image, changeset: changeset, formats: formats)
end
end
def delete(conn, %{"id" => id}) do
image = Repo.get!(Image, id)
# Here we use delete! (with a bang) because we expect
# it to always work (and if it does not, it will raise).
Repo.delete!(image)
conn
|> put_flash(:info, "Image deleted successfully.")
|> redirect(to: image_path(conn, :index))
end
end
| 33.388235 | 134 | 0.637068 |
f7fa09bb7992e2bc93abe81a998409808e31081b | 80 | exs | Elixir | apps/poker_web/test/views/player_view_test.exs | thetamind/elixir_phoenix_poker | 125faa62d52b78101fee51e4d4639c1caa8fa033 | [
"MIT"
] | null | null | null | apps/poker_web/test/views/player_view_test.exs | thetamind/elixir_phoenix_poker | 125faa62d52b78101fee51e4d4639c1caa8fa033 | [
"MIT"
] | null | null | null | apps/poker_web/test/views/player_view_test.exs | thetamind/elixir_phoenix_poker | 125faa62d52b78101fee51e4d4639c1caa8fa033 | [
"MIT"
] | null | null | null | defmodule Poker.Web.PlayerViewTest do
use Poker.Web.ConnCase, async: true
end
| 20 | 37 | 0.8 |
f7fa28723cdb227dc8b65467902b629c68f1d98f | 506 | ex | Elixir | lib/elixir_lokalise_api/endpoints/key.ex | kianmeng/elixir-lokalise-api | fca59438cbd8ee960adbbce8b13cef12ddc68ef6 | [
"BSD-3-Clause"
] | 3 | 2021-06-24T14:30:31.000Z | 2021-09-06T11:30:17.000Z | lib/elixir_lokalise_api/endpoints/key.ex | kianmeng/elixir-lokalise-api | fca59438cbd8ee960adbbce8b13cef12ddc68ef6 | [
"BSD-3-Clause"
] | 8 | 2021-09-15T07:30:59.000Z | 2022-02-01T17:40:17.000Z | lib/elixir_lokalise_api/endpoints/key.ex | kianmeng/elixir-lokalise-api | fca59438cbd8ee960adbbce8b13cef12ddc68ef6 | [
"BSD-3-Clause"
] | 2 | 2021-09-07T11:10:51.000Z | 2021-09-26T07:37:39.000Z | defmodule ElixirLokaliseApi.Keys do
@moduledoc """
Keys endpoint.
"""
@model ElixirLokaliseApi.Model.Key
@collection ElixirLokaliseApi.Collection.Keys
@endpoint "projects/{!:project_id}/keys/{:key_id}"
@data_key :keys
@singular_data_key :key
@parent_key :project_id
@item_key :key_id
use ElixirLokaliseApi.DynamicResource,
import: [
:item_reader,
:find2,
:all2,
:create2,
:delete2,
:update3,
:update2_bulk,
:delete2_bulk
]
end
| 20.24 | 52 | 0.662055 |
f7fa2c27aa01e5c90396762354ed764a92f7bd5f | 2,831 | ex | Elixir | lib/mongo/id_server.ex | rafamedina/mongodb | 65bd68adbc2753c38d927ad0ba804fe8e66d50d7 | [
"Apache-2.0"
] | null | null | null | lib/mongo/id_server.ex | rafamedina/mongodb | 65bd68adbc2753c38d927ad0ba804fe8e66d50d7 | [
"Apache-2.0"
] | null | null | null | lib/mongo/id_server.ex | rafamedina/mongodb | 65bd68adbc2753c38d927ad0ba804fe8e66d50d7 | [
"Apache-2.0"
] | null | null | null | defmodule Mongo.IdServer do
@moduledoc false
# An ObjectId consists of a machine id, process id, seconds since unix epoch
# and a counter. The counter is used to differentiate between generated
# ObjectIds during a single second.
#
# A counter is generated for each second in an hour, the counter is
# initialized to a random number based on MongoDB documentation's
# recommendation. Each time a new ObjectId is generated we take the counter
# for the current second and increment it.
#
# To keep the counters random and to make sure they don't grow infinitely they
# need to be reset. Care needs to be taken to ensure a counter is not reset
# during its second's window during which it is being used. Once each minute
# ~60 counters should be reset, only counters that will be used ~30 minutes in
# the future are reset to ensure the current second's counter is not touched.
use GenServer
@name __MODULE__
@num_counters 3600
@reset_timer 60_000
@counter_max 16777216
@gs_epoch :calendar.datetime_to_gregorian_seconds({{1970, 1, 1}, {0, 0, 0}})
def start_link(_ \\ nil) do
GenServer.start_link(__MODULE__, [], name: @name)
end
def init([]) do
:ets.new(@name, [:named_table, :public, write_concurrency: true])
:ets.insert(@name, [machineprocid: {machine_id(), process_id()}])
:ets.insert(@name, gen_counters(0..@num_counters))
Process.send_after(self(), :reset_counters, @reset_timer)
{:ok, opposite_on_window(:calendar.universal_time)}
end
def handle_info(:reset_counters, last_reset) do
new_reset = opposite_on_window(:calendar.universal_time)
:ets.insert(@name, gen_counters(last_reset+1..new_reset))
Process.send_after(self(), :reset_counters, @reset_timer)
{:noreply, new_reset}
end
def new do
{machine_id, proc_id} = :ets.lookup_element(@name, :machineprocid, 2)
now = :calendar.universal_time
secs = :calendar.datetime_to_gregorian_seconds(now) - @gs_epoch
counter = :ets.update_counter(@name, in_window(now), 1)
counter = rem counter, @counter_max
BSON.ObjectId.new(machine_id, proc_id, secs, counter)
end
defp gen_counters(range) do
for ix <- range do
{ix, :rand.uniform(@counter_max)-1}
end
end
defp in_window(now) do
secs = :calendar.datetime_to_gregorian_seconds(now)
window = @num_counters
rem secs, window
end
defp opposite_on_window(now) do
secs = :calendar.datetime_to_gregorian_seconds(now)
window = @num_counters
half_window = div window, 2
rem secs+half_window, window
end
defp machine_id do
{:ok, hostname} = :inet.gethostname
<<machine_id::unsigned-big-24, _::binary>> = :crypto.hash(:md5, hostname)
machine_id
end
defp process_id do
:os.getpid |> List.to_integer
end
end
| 31.455556 | 80 | 0.708584 |
f7fa6fcc025f9e0d222d7f999250a3568fce9350 | 1,459 | exs | Elixir | test/blog_web/controllers/auth_controller_test.exs | vrrpizzato/blog | bee1e71ca29925d43d62f0bbaa9ae7f4566d65fe | [
"MIT"
] | null | null | null | test/blog_web/controllers/auth_controller_test.exs | vrrpizzato/blog | bee1e71ca29925d43d62f0bbaa9ae7f4566d65fe | [
"MIT"
] | null | null | null | test/blog_web/controllers/auth_controller_test.exs | vrrpizzato/blog | bee1e71ca29925d43d62f0bbaa9ae7f4566d65fe | [
"MIT"
] | null | null | null | defmodule BlogWeb.AuthControllerTest do
use BlogWeb.ConnCase
@ueberauth %Ueberauth.Auth{
credentials: %{token: "token_echo"},
info: %{
email: "email_echo",
first_name: "first_name_echo",
last_name: "last_name_echo",
image: "image_echo"
},
provider: "google"
}
@ueberauth_invalido %Ueberauth.Auth{
credentials: %{token: nil},
info: %{
email: "email_echo",
first_name: nil,
last_name: nil,
image: nil
},
provider: nil
}
test "callback sucess", %{conn: conn} do
conn =
conn
|> assign(:ueberauth_auth, @ueberauth)
|> get(Routes.auth_path(conn, :callback, "google"))
assert redirected_to(conn) == Routes.page_path(conn, :index)
conn = get(conn, Routes.page_path(conn, :index))
assert html_response(conn, 200) =~ "Seja bem-vindo!"
end
test "callback failure", %{conn: conn} do
conn =
conn
|> assign(:ueberauth_auth, @ueberauth_invalido)
|> get(Routes.auth_path(conn, :callback, "google"))
assert redirected_to(conn) == Routes.page_path(conn, :index)
conn = get(conn, Routes.page_path(conn, :index))
assert html_response(conn, 200) =~ "Algo deu errado!"
end
test "logout success", %{conn: conn} do
conn =
conn
|> Plug.Test.init_test_session(user_id: 1)
|> get(Routes.auth_path(conn, :logout))
assert redirected_to(conn) == Routes.page_path(conn, :index)
end
end
| 25.596491 | 64 | 0.629883 |
f7fac0e5a5f8530a6a4cebd1f0d6b365d8aee4ae | 19,092 | ex | Elixir | lib/mix/lib/mix/project.ex | namjae/elixir | 6d1561a5939d68fb61f422b83271fbc824847395 | [
"Apache-2.0"
] | 1 | 2021-05-20T13:08:37.000Z | 2021-05-20T13:08:37.000Z | lib/mix/lib/mix/project.ex | namjae/elixir | 6d1561a5939d68fb61f422b83271fbc824847395 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/project.ex | namjae/elixir | 6d1561a5939d68fb61f422b83271fbc824847395 | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Project do
@moduledoc """
Defines and manipulates Mix projects.
A Mix project is defined by calling `use Mix.Project` in a module, usually
placed in `mix.exs`:
defmodule MyApp.MixProject do
use Mix.Project
def project do
[
app: :my_app,
version: "1.0.0"
]
end
end
## Configuration
In order to configure Mix, the module that `use`s `Mix.Project` should export
a `project/0` function that returns a keyword list representing configuration
for the project.
This configuration can be read using `Mix.Project.config/0`. Note that
`config/0` won't fail if a project is not defined; this allows many Mix tasks
to work without a project.
If a task requires a project to be defined or needs to access a
special function within the project, the task can call `Mix.Project.get!/0`
which fails with `Mix.NoProjectError` in the case a project is not
defined.
There isn't a comprehensive list of all the options that can be returned by
`project/0` since many Mix tasks define their own options that they read from
this configuration. For example, look at the "Configuration" section in the
documentation for the `Mix.Tasks.Compile` task.
These are a few options that are not used by just one Mix task (and will thus
be documented here):
* `:build_per_environment` - if `true`, builds will be *per-environment*. If
`false`, builds will go in `_build/shared` regardless of the Mix
environment. Defaults to `true`.
* `:aliases` - a list of task aliases. For more information, check out the
"Aliases" section in the documentation for the `Mix` module. Defaults to
`[]`.
* `:config_path` - a string representing the path of the main config
file. See `config_files/0` for more information. Defaults to
`"config/config.exs"`.
* `:default_task` - a string representing the default task to be run by
`mix` when no task is specified. Defaults to `"run"`.
* `:deps` - a list of dependencies of this project. Refer to the
documentation for the `Mix.Tasks.Deps` task for more information. Defaults
to `[]`.
* `:deps_path` - directory where dependencies are stored. Also see
`deps_path/1`. Defaults to `"deps"`.
* `:lockfile` - the name of the lockfile used by the `mix deps.*` family of
tasks. Defaults to `"mix.lock"`.
* `:preferred_cli_env` - a keyword list of `{task, env}` tuples where `task`
is the task name as an atom (for example, `:"deps.get"`) and `env` is the
preferred environment (for example, `:test`). This option overrides what
specified by the tasks with the `@preferred_cli_env` attribute (see the
docs for `Mix.Task`). Defaults to `[]`.
For more options, keep an eye on the documentation for single Mix tasks; good
examples are the `Mix.Tasks.Compile` task and all the specific compiler tasks
(such as `Mix.Tasks.Compile.Elixir` or `Mix.Tasks.Compile.Erlang`).
Note that sometimes the same configuration option is mentioned in the
documentation for different tasks; this is just because it's common for many
tasks to read and use the same configuration option (for example,
`:erlc_paths` is used by `mix compile.erlang`, `mix compile.yecc`, and other
tasks).
## Erlang projects
Mix can be used to manage Erlang projects that don't have any Elixir code. To
ensure Mix tasks work correctly for an Erlang project, `language: :erlang` has
to be part of the configuration returned by `project/0`. This setting also
makes sure Elixir is not added as a dependency to the generated `.app` file or
to the escript generated with `mix escript.build`, and so on.
"""
@doc false
defmacro __using__(_) do
quote do
@after_compile Mix.Project
end
end
# Invoked after each Mix.Project is compiled.
@doc false
def __after_compile__(env, _binary) do
push(env.module, env.file)
end
# Push a project onto the project stack.
# Only the top of the stack can be accessed.
@doc false
def push(atom, file \\ nil, app \\ nil) when is_atom(atom) do
file = file || (atom && List.to_string(atom.__info__(:compile)[:source]))
config = Keyword.merge([app: app] ++ default_config(), get_project_config(atom))
case Mix.ProjectStack.push(atom, config, file) do
:ok ->
:ok
{:error, other} when is_binary(other) ->
Mix.raise(
"Trying to load #{inspect(atom)} from #{inspect(file)}" <>
" but another project with the same name was already defined at #{inspect(other)}"
)
end
end
# Pops a project from the stack.
@doc false
def pop do
Mix.ProjectStack.pop()
end
# The configuration that is pushed down to dependencies.
@doc false
def deps_config(config \\ config()) do
[
build_embedded: config[:build_embedded],
build_per_environment: config[:build_per_environment],
consolidate_protocols: false,
deps_path: deps_path(config),
env_path: build_path(config)
]
end
@doc """
Retrieves the current project if there is one.
If there is no current project, `nil` is returned. This
may happen in cases there is no `mix.exs` in the current
directory.
If you expect a project to be defined, i.e., it is a
requirement of the current task, you should call
`get!/0` instead.
"""
@spec get() :: module | nil
def get do
case Mix.ProjectStack.peek() do
%{name: name} -> name
_ -> nil
end
end
@doc """
Same as `get/0`, but raises an exception if there is no current project.
This is usually called by tasks that need additional
functions on the project to be defined. Since such
tasks usually depend on a project being defined, this
function raises a `Mix.NoProjectError` exception in
case no project is available.
"""
@spec get!() :: module | no_return
def get! do
get() || raise Mix.NoProjectError, []
end
@doc """
Returns the project configuration.
If there is no project defined, it still returns a keyword
list with default values. This allows many Mix tasks to work
without the need for an underlying project.
Note this configuration is cached once the project is
pushed onto the stack. Calling it multiple times won't
cause it to be recomputed.
Do not use `Mix.Project.config/0` to find the runtime configuration.
Use it only to configure aspects of your project (like
compilation directories) and not your application runtime.
"""
@spec config() :: keyword
def config do
case Mix.ProjectStack.peek() do
%{config: config} -> config
_ -> default_config()
end
end
@doc """
Returns a list of project configuration files for this project.
This function is usually used in compilation tasks to trigger
a full recompilation whenever such configuration files change.
It returns the `mix.exs` file, the lock manifest, and all config
files in the `config` directory that do not start with a trailing
period (for example, `.my_config.exs`).
"""
@spec config_files() :: [Path.t()]
def config_files do
manifest = Mix.Dep.Lock.manifest()
configs =
case Mix.ProjectStack.peek() do
%{config: config, file: file} ->
configs =
config[:config_path]
|> Path.dirname()
|> Path.join("**/*.*")
|> Path.wildcard()
|> Enum.reject(&String.starts_with?(Path.basename(&1), "."))
[file | configs]
_ ->
[]
end
[manifest] ++ configs
end
@doc """
Returns `true` if `config` is the configuration for an umbrella project.
When called with no arguments, tells whether the current project is
an umbrella project.
"""
@spec umbrella?() :: boolean
def umbrella?(config \\ config()) do
config[:apps_path] != nil
end
@doc """
Returns a map with the umbrella child applications paths.
These paths are based on the `:apps_path` and `:apps` configurations.
If the given project configuration identifies an umbrella project, the return
value is a map of `app => path` where `app` is a child app of the umbrella and
`path` is its path relative to the root of the umbrella project.
If the given project configuration does not identify an umbrella project,
`nil` is returned.
## Examples
Mix.Project.apps_paths()
#=> %{my_app1: "apps/my_app1", my_app2: "apps/my_app2"}
"""
@spec apps_paths() :: %{optional(atom) => Path.t()} | nil
def apps_paths(config \\ config()) do
if apps_path = config[:apps_path] do
key = {:apps_paths, Mix.Project.get!()}
if cache = Mix.ProjectStack.read_cache(key) do
cache
else
cache = config[:apps] |> umbrella_apps(apps_path) |> to_apps_paths(apps_path)
Mix.ProjectStack.write_cache(key, cache)
end
end
end
defp umbrella_apps(nil, apps_path) do
case File.ls(apps_path) do
{:ok, apps} -> Enum.map(apps, &String.to_atom/1)
{:error, _} -> []
end
end
defp umbrella_apps(apps, _apps_path) when is_list(apps) do
apps
end
defp to_apps_paths(apps, apps_path) do
for app <- apps,
path = path_with_mix_exs_otherwise_warn(app, apps_path),
do: {app, path},
into: %{}
end
defp path_with_mix_exs_otherwise_warn(app, apps_path) do
path = Path.join(apps_path, Atom.to_string(app))
cond do
File.regular?(Path.join(path, "mix.exs")) ->
path
File.dir?(path) ->
Mix.shell().error(
"warning: path #{inspect(Path.relative_to_cwd(path))} is a directory but " <>
"it has no mix.exs. Mix won't consider this directory as part of your " <>
"umbrella application. Please add a \"mix.exs\" or set the \":apps\" key " <>
"in your umbrella configuration with all relevant apps names as atoms"
)
nil
true ->
# If it is a stray file, we just ignore it.
nil
end
end
@doc ~S"""
Runs the given `fun` inside the given project.
This function changes the current working directory and
loads the project at the given directory onto the project
stack.
A `post_config` can be passed that will be merged into
the project configuration.
`fun` is called with the module name of the given `Mix.Project`.
The return value of this function is the return value of `fun`.
## Examples
Mix.Project.in_project :my_app, "/path/to/my_app", fn module ->
"Mixfile is: #{inspect module}"
end
#=> "Mixfile is: MyApp.MixProject"
"""
@spec in_project(atom, Path.t(), keyword, (module -> result)) :: result when result: term
def in_project(app, path, post_config \\ [], fun)
def in_project(app, ".", post_config, fun) do
cached =
try do
load_project(app, post_config)
rescue
any ->
Mix.shell().error("Error while loading project #{inspect(app)} at #{File.cwd!()}")
reraise any, System.stacktrace()
end
try do
fun.(cached)
after
Mix.Project.pop()
end
end
def in_project(app, path, post_config, fun) do
File.cd!(path, fn ->
in_project(app, ".", post_config, fun)
end)
end
@doc """
Returns the path where dependencies are stored for the given project.
If no configuration is given, the one for the current project is used.
The returned path will be expanded.
## Examples
Mix.Project.deps_path
#=> "/path/to/project/deps"
"""
@spec deps_path(keyword) :: Path.t()
def deps_path(config \\ config()) do
Path.expand(config[:deps_path])
end
@doc """
Returns the full path of all dependencies as a map.
## Examples
Mix.Project.deps_paths
#=> %{foo: "deps/foo", bar: "custom/path/dep"}
"""
@spec deps_paths() :: %{optional(atom) => Path.t()}
def deps_paths do
Enum.reduce(Mix.Dep.cached(), %{}, fn %{app: app, opts: opts}, acc ->
Map.put(acc, app, opts[:dest])
end)
end
@doc """
Returns the build path for the given project.
If no configuration is given, the one for the current project is used.
The returned path will be expanded.
## Examples
Mix.Project.build_path
#=> "/path/to/project/_build/shared"
If `:build_per_environment` is set to `true`, it will create a new build per
environment:
Mix.env
#=> :dev
Mix.Project.build_path
#=> "/path/to/project/_build/dev"
"""
@spec build_path(keyword) :: Path.t()
def build_path(config \\ config()) do
config[:env_path] || env_path(config)
end
defp env_path(config) do
build = config[:build_path] || "_build"
case config[:build_per_environment] do
true ->
Path.expand("#{build}/#{Mix.env()}")
false ->
Path.expand("#{build}/shared")
other ->
Mix.raise("The :build_per_environment option should be a boolean, got: #{inspect(other)}")
end
end
@doc """
Returns the path where manifests are stored.
By default they are stored in the app path inside
the build directory. Umbrella applications have
the manifest path set to the root of the build directory.
Directories may be changed in future releases.
The returned path will be expanded.
## Examples
Mix.Project.manifest_path
#=> "/path/to/project/_build/shared/lib/app"
"""
@spec manifest_path(keyword) :: Path.t()
def manifest_path(config \\ config()) do
config[:app_path] ||
if app = config[:app] do
Path.join([build_path(config), "lib", Atom.to_string(app)])
else
build_path(config)
end
end
@doc """
Returns the application path inside the build.
The returned path will be expanded.
## Examples
Mix.Project.app_path
#=> "/path/to/project/_build/shared/lib/app"
"""
@spec app_path(keyword) :: Path.t()
def app_path(config \\ config()) do
config[:app_path] ||
cond do
app = config[:app] ->
Path.join([build_path(config), "lib", Atom.to_string(app)])
config[:apps_path] ->
raise "trying to access Mix.Project.app_path for an umbrella project but umbrellas have no app"
true ->
Mix.raise(
"Cannot access build without an application name, " <>
"please ensure you are in a directory with a mix.exs file and it defines " <>
"an :app name under the project configuration"
)
end
end
@doc """
Returns the paths the given project compiles to.
If no configuration is given, the one for the current project will be used.
The returned path will be expanded.
## Examples
Mix.Project.compile_path
#=> "/path/to/project/_build/dev/lib/app/ebin"
"""
@spec compile_path(keyword) :: Path.t()
def compile_path(config \\ config()) do
Path.join(app_path(config), "ebin")
end
@doc """
Returns the path where protocol consolidations are stored.
The returned path will be expanded.
## Examples
Mix.Project.consolidation_path
#=> "/path/to/project/_build/dev/lib/my_app/consolidated"
Inside umbrellas:
Mix.Project.consolidation_path
#=> "/path/to/project/_build/dev/consolidated"
"""
def consolidation_path(config \\ config()) do
if umbrella?(config) do
Path.join(build_path(config), "consolidated")
else
Path.join(app_path(config), "consolidated")
end
end
@doc """
Compiles the given project.
"""
@spec compile([term], keyword) :: term
def compile(args, _config \\ []) do
Mix.Task.run("compile", args)
end
@doc """
Builds the project structure for the given application.
## Options
* `:symlink_ebin` - symlink ebin instead of copying it
"""
@spec build_structure(keyword, keyword) :: :ok
def build_structure(config \\ config(), opts \\ []) do
app = app_path(config)
File.mkdir_p!(app)
source = Path.expand("ebin")
target = Path.join(app, "ebin")
_ =
cond do
opts[:symlink_ebin] ->
_ = symlink_or_copy(config, source, target)
match?({:ok, _}, :file.read_link(target)) ->
_ = File.rm_rf!(target)
File.mkdir_p!(target)
true ->
File.mkdir_p!(target)
end
_ = symlink_or_copy(config, Path.expand("include"), Path.join(app, "include"))
_ = symlink_or_copy(config, Path.expand("priv"), Path.join(app, "priv"))
:ok
end
defp symlink_or_copy(config, source, target) do
if config[:build_embedded] do
if File.exists?(source) do
File.rm_rf!(target)
File.cp_r!(source, target)
end
else
Mix.Utils.symlink_or_copy(source, target)
end
end
@doc """
Ensures the project structure for the given project exists.
In case it does exist, it is a no-op. Otherwise, it is built.
"""
@spec ensure_structure(keyword, keyword) :: :ok
def ensure_structure(config \\ config(), opts \\ []) do
if File.exists?(app_path(config)) do
:ok
else
build_structure(config, opts)
end
end
@doc """
Returns all load paths for the given project.
"""
@spec load_paths(keyword) :: [Path.t()]
def load_paths(config \\ config()) do
if umbrella?(config) do
[]
else
[compile_path(config)]
end
end
# Loads mix.exs in the current directory or loads the project from the
# mixfile cache and pushes the project onto the project stack.
defp load_project(app, post_config) do
Mix.ProjectStack.post_config(post_config)
if cached = Mix.ProjectStack.read_cache({:app, app}) do
{project, file} = cached
push(project, file, app)
project
else
file = Path.expand("mix.exs")
old_proj = get()
{new_proj, file} =
if File.regular?(file) do
try do
Code.compiler_options(relative_paths: false)
_ = Code.load_file(file)
get()
else
^old_proj -> Mix.raise("Could not find a Mix project at #{file}")
new_proj -> {new_proj, file}
after
Code.compiler_options(relative_paths: true)
end
else
push(nil, file, app)
{nil, "nofile"}
end
Mix.ProjectStack.write_cache({:app, app}, {new_proj, file})
new_proj
end
end
defp default_config do
[
aliases: [],
build_embedded: false,
build_per_environment: true,
build_scm: Mix.SCM.Path,
config_path: "config/config.exs",
consolidate_protocols: true,
default_task: "run",
deps: [],
deps_path: "deps",
elixirc_paths: ["lib"],
erlc_paths: ["src"],
erlc_include_path: "include",
erlc_options: [:debug_info],
lockfile: "mix.lock",
preferred_cli_env: [],
start_permanent: false
]
end
@private_config [:app_path, :build_scm, :env_path]
defp get_project_config(nil), do: []
defp get_project_config(atom), do: atom.project |> Keyword.drop(@private_config)
end
| 28.284444 | 105 | 0.645401 |
f7fac2f3db4b8c3e86ff8dd8e50d67b4c7eafcc5 | 9,961 | exs | Elixir | lib/elixir/test/elixir/path_test.exs | ellbee/elixir | c1acfe9827f12ef58f7f301baad7497472cb4bc9 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/path_test.exs | ellbee/elixir | c1acfe9827f12ef58f7f301baad7497472cb4bc9 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/path_test.exs | ellbee/elixir | c1acfe9827f12ef58f7f301baad7497472cb4bc9 | [
"Apache-2.0"
] | null | null | null | Code.require_file "test_helper.exs", __DIR__
defmodule PathTest do
use ExUnit.Case, async: true
doctest Path
import PathHelpers
if :file.native_name_encoding == :utf8 do
test "wildcard with UTF-8" do
File.mkdir_p(tmp_path("héllò"))
assert Path.wildcard(tmp_path("héllò")) == [tmp_path("héllò")]
after
File.rm_rf tmp_path("héllò")
end
end
test "wildcard" do
hello = tmp_path("wildcard/.hello")
world = tmp_path("wildcard/.hello/world")
File.mkdir_p(world)
assert Path.wildcard(tmp_path("wildcard/*/*")) == []
assert Path.wildcard(tmp_path("wildcard/**/*")) == []
assert Path.wildcard(tmp_path("wildcard/?hello/world")) == []
assert Path.wildcard(tmp_path("wildcard/*/*"), match_dot: true) == [world]
assert Path.wildcard(tmp_path("wildcard/**/*"), match_dot: true) == [hello, world]
assert Path.wildcard(tmp_path("wildcard/?hello/world"), match_dot: true) == [world]
after
File.rm_rf tmp_path("wildcard")
end
if windows?() do
test "relative win" do
assert Path.relative("C:/usr/local/bin") == "usr/local/bin"
assert Path.relative("C:\\usr\\local\\bin") == "usr\\local\\bin"
assert Path.relative("C:usr\\local\\bin") == "usr\\local\\bin"
assert Path.relative("/usr/local/bin") == "usr/local/bin"
assert Path.relative("usr/local/bin") == "usr/local/bin"
assert Path.relative("../usr/local/bin") == "../usr/local/bin"
assert Path.relative_to("D:/usr/local/foo", "D:/usr/") == "local/foo"
assert Path.relative_to("D:/usr/local/foo", "d:/usr/") == "local/foo"
assert Path.relative_to("d:/usr/local/foo", "D:/usr/") == "local/foo"
assert Path.relative_to("D:/usr/local/foo", "d:/") == "usr/local/foo"
assert Path.relative_to("D:/usr/local/foo", "D:/") == "usr/local/foo"
assert Path.relative_to("D:/usr/local/foo", "d:") == "D:/usr/local/foo"
assert Path.relative_to("D:/usr/local/foo", "D:") == "D:/usr/local/foo"
end
test "type win" do
assert Path.type("C:/usr/local/bin") == :absolute
assert Path.type('C:\\usr\\local\\bin') == :absolute
assert Path.type("C:usr\\local\\bin") == :volumerelative
assert Path.type("/usr/local/bin") == :volumerelative
assert Path.type('usr/local/bin') == :relative
assert Path.type("../usr/local/bin") == :relative
end
test "split win" do
assert Path.split("C:\\foo\\bar") == ["c:/", "foo", "bar"]
assert Path.split("C:/foo/bar") == ["c:/", "foo", "bar"]
end
else
test "relative Unix" do
assert Path.relative("/usr/local/bin") == "usr/local/bin"
assert Path.relative("usr/local/bin") == "usr/local/bin"
assert Path.relative("../usr/local/bin") == "../usr/local/bin"
assert Path.relative(['/usr', ?/, "local/bin"]) == "usr/local/bin"
end
test "type Unix" do
assert Path.type("/usr/local/bin") == :absolute
assert Path.type("usr/local/bin") == :relative
assert Path.type("../usr/local/bin") == :relative
assert Path.type('/usr/local/bin') == :absolute
assert Path.type('usr/local/bin') == :relative
assert Path.type('../usr/local/bin') == :relative
assert Path.type(['/usr/', 'local/bin']) == :absolute
assert Path.type(['usr/', 'local/bin']) == :relative
assert Path.type(['../usr', '/local/bin']) == :relative
end
end
test "relative to cwd" do
assert Path.relative_to_cwd(__ENV__.file) ==
Path.relative_to(__ENV__.file, System.cwd!)
assert Path.relative_to_cwd(to_charlist(__ENV__.file)) ==
Path.relative_to(to_charlist(__ENV__.file), to_charlist(System.cwd!))
end
test "absname" do
assert (Path.absname("/") |> strip_drive_letter_if_windows) == "/"
assert (Path.absname("/foo") |> strip_drive_letter_if_windows) == "/foo"
assert (Path.absname("/./foo") |> strip_drive_letter_if_windows) == "/foo"
assert (Path.absname("/foo/bar") |> strip_drive_letter_if_windows) == "/foo/bar"
assert (Path.absname("/foo/bar/") |> strip_drive_letter_if_windows) == "/foo/bar"
assert (Path.absname("/foo/bar/../bar") |> strip_drive_letter_if_windows) == "/foo/bar/../bar"
assert Path.absname("bar", "/foo") == "/foo/bar"
assert Path.absname("bar/", "/foo") == "/foo/bar"
assert Path.absname("bar/.", "/foo") == "/foo/bar/."
assert Path.absname("bar/../bar", "/foo") == "/foo/bar/../bar"
assert Path.absname("bar/../bar", "foo") == "foo/bar/../bar"
assert Path.absname(["bar/", ?., ?., ["/bar"]], "/foo") == "/foo/bar/../bar"
end
test "expand path with user home" do
home = System.user_home! |> Path.absname
assert home == Path.expand("~")
assert home == Path.expand('~')
assert is_binary Path.expand("~/foo")
assert is_binary Path.expand('~/foo')
assert Path.expand("~/file") == Path.join(home, "file")
assert Path.expand("~/file", "whatever") == Path.join(home, "file")
assert Path.expand("file", Path.expand("~")) == Path.expand("~/file")
assert Path.expand("file", "~") == Path.join(home, "file")
assert Path.expand("~file") == Path.join(System.cwd!, "file")
end
test "expand path" do
assert (Path.expand("/") |> strip_drive_letter_if_windows) == "/"
assert (Path.expand("/foo/../..") |> strip_drive_letter_if_windows) == "/"
assert (Path.expand("/foo") |> strip_drive_letter_if_windows) == "/foo"
assert (Path.expand("/./foo") |> strip_drive_letter_if_windows) == "/foo"
assert (Path.expand("/../foo") |> strip_drive_letter_if_windows) == "/foo"
assert (Path.expand("/foo/bar") |> strip_drive_letter_if_windows) == "/foo/bar"
assert (Path.expand("/foo/bar/") |> strip_drive_letter_if_windows) == "/foo/bar"
assert (Path.expand("/foo/bar/.") |> strip_drive_letter_if_windows) == "/foo/bar"
assert (Path.expand("/foo/bar/../bar") |> strip_drive_letter_if_windows) == "/foo/bar"
assert (Path.expand("bar", "/foo") |> strip_drive_letter_if_windows) == "/foo/bar"
assert (Path.expand("bar/", "/foo") |> strip_drive_letter_if_windows) == "/foo/bar"
assert (Path.expand("bar/.", "/foo") |> strip_drive_letter_if_windows) == "/foo/bar"
assert (Path.expand("bar/../bar", "/foo") |> strip_drive_letter_if_windows) == "/foo/bar"
assert (Path.expand("../bar/../bar", "/foo/../foo/../foo") |> strip_drive_letter_if_windows) == "/bar"
assert "/bar" ==
(Path.expand(['..', ?/, "bar/../bar"], '/foo/../foo/../foo') |> strip_drive_letter_if_windows)
assert (Path.expand("/..") |> strip_drive_letter_if_windows) == "/"
assert Path.expand("bar/../bar", "foo") == Path.expand("foo/bar")
end
test "relative to" do
assert Path.relative_to("/usr/local/foo", "/usr/local") == "foo"
assert Path.relative_to("/usr/local/foo", "/") == "usr/local/foo"
assert Path.relative_to("/usr/local/foo", "/etc") == "/usr/local/foo"
assert Path.relative_to("/usr/local/foo", "/usr/local/foo") == "/usr/local/foo"
assert Path.relative_to("usr/local/foo", "usr/local") == "foo"
assert Path.relative_to("usr/local/foo", "etc") == "usr/local/foo"
assert Path.relative_to('usr/local/foo', "etc") == "usr/local/foo"
assert Path.relative_to("usr/local/foo", "usr/local") == "foo"
assert Path.relative_to(["usr", ?/, 'local/foo'], 'usr/local') == "foo"
end
test "rootname" do
assert Path.rootname("~/foo/bar.ex", ".ex") == "~/foo/bar"
assert Path.rootname("~/foo/bar.exs", ".ex") == "~/foo/bar.exs"
assert Path.rootname("~/foo/bar.old.ex", ".ex") == "~/foo/bar.old"
assert Path.rootname([?~, '/foo/bar', ".old.ex"], '.ex') == "~/foo/bar.old"
end
test "extname" do
assert Path.extname("foo.erl") == ".erl"
assert Path.extname("~/foo/bar") == ""
assert Path.extname('foo.erl') == ".erl"
assert Path.extname('~/foo/bar') == ""
end
test "dirname" do
assert Path.dirname("/foo/bar.ex") == "/foo"
assert Path.dirname("foo/bar.ex") == "foo"
assert Path.dirname("~/foo/bar.ex") == "~/foo"
assert Path.dirname("/foo/bar/baz/") == "/foo/bar/baz"
assert Path.dirname([?~, "/foo", '/bar.ex']) == "~/foo"
end
test "basename" do
assert Path.basename("foo") == "foo"
assert Path.basename("/foo/bar") == "bar"
assert Path.basename("/") == ""
assert Path.basename("~/foo/bar.ex", ".ex") == "bar"
assert Path.basename("~/foo/bar.exs", ".ex") == "bar.exs"
assert Path.basename("~/for/bar.old.ex", ".ex") == "bar.old"
assert Path.basename([?~, "/for/bar", '.old.ex'], ".ex") == "bar.old"
end
test "join" do
assert Path.join([""]) == ""
assert Path.join(["foo"]) == "foo"
assert Path.join(["/", "foo", "bar"]) == "/foo/bar"
assert Path.join(["~", "foo", "bar"]) == "~/foo/bar"
assert Path.join(['/foo/', "/bar/"]) == "/foo/bar"
assert Path.join(["/", ""]) == "/"
assert Path.join(["/", "", "bar"]) == "/bar"
end
test "join two" do
assert Path.join("/foo", "bar") == "/foo/bar"
assert Path.join("~", "foo") == "~/foo"
assert Path.join("", "bar") == "bar"
assert Path.join("bar", "") == "bar"
assert Path.join("", "/bar") == "bar"
assert Path.join("/bar", "") == "/bar"
assert Path.join("foo", "/bar") == "foo/bar"
assert Path.join("/foo", "/bar") == "/foo/bar"
assert Path.join("/foo", "/bar") == "/foo/bar"
assert Path.join("/foo", "./bar") == "/foo/./bar"
assert Path.join([?/, "foo"], "./bar") == "/foo/./bar"
end
test "split" do
assert Path.split("") == []
assert Path.split("foo") == ["foo"]
assert Path.split("/foo/bar") == ["/", "foo", "bar"]
assert Path.split([?/, "foo/bar"]) == ["/", "foo", "bar"]
end
if windows?() do
defp strip_drive_letter_if_windows([_d, ?: | rest]), do: rest
defp strip_drive_letter_if_windows(<<_d, ?:, rest::binary>>), do: rest
else
defp strip_drive_letter_if_windows(path), do: path
end
end
| 40.327935 | 106 | 0.590905 |
f7fad9bd3f6e3df430fbdfa0afab9d607277f0e5 | 947 | exs | Elixir | rumbl/config/config.exs | hectoregm/elixir | c4dc9cd327c6d935de93337e5c52d58b82c4d339 | [
"MIT"
] | null | null | null | rumbl/config/config.exs | hectoregm/elixir | c4dc9cd327c6d935de93337e5c52d58b82c4d339 | [
"MIT"
] | null | null | null | rumbl/config/config.exs | hectoregm/elixir | c4dc9cd327c6d935de93337e5c52d58b82c4d339 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# Configures the endpoint
config :rumbl, Rumbl.Endpoint,
url: [host: "localhost"],
root: Path.dirname(__DIR__),
secret_key_base: "dAet79ujJnRXjeVSehnvN1HfNcMZ86OEqNg7FrdIzTjDOjmqLSE/6t8OWgAoxoLl",
render_errors: [accepts: ~w(html json)],
pubsub: [name: Rumbl.PubSub,
adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env}.exs"
# Configure phoenix generators
config :phoenix, :generators,
migration: true,
binary_id: false
| 31.566667 | 86 | 0.756072 |
f7faff8f4d5d431ff8161a00dad7f5c0254d91da | 11,101 | ex | Elixir | lib/mix/lib/mix/tasks/compile.app.ex | alexcastano/elixir | 0221ce1f79d1cfd0955a9fa46a6d84d0193ad838 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/compile.app.ex | alexcastano/elixir | 0221ce1f79d1cfd0955a9fa46a6d84d0193ad838 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/compile.app.ex | alexcastano/elixir | 0221ce1f79d1cfd0955a9fa46a6d84d0193ad838 | [
"Apache-2.0"
] | 1 | 2021-09-30T01:21:02.000Z | 2021-09-30T01:21:02.000Z | defmodule Mix.Tasks.Compile.App do
use Mix.Task.Compiler
@recursive true
@moduledoc """
Writes an .app file.
An `.app` file is a file containing Erlang terms that defines
your application. Mix automatically generates this file based on
your `mix.exs` configuration.
In order to generate the `.app` file, Mix expects your project
to have both `:app` and `:version` keys. Furthermore, you can
configure the generated application by defining an `application/0`
function in your `mix.exs` with the following options.
The most commonly used options are:
* `:extra_applications` - a list of OTP applications
your application depends on which are not included in `:deps`
(usually defined in `deps/0` in your `mix.exs`). For example,
here you can declare a dependency on applications that ship with
Erlang/OTP or Elixir, like `:crypto` or `:logger`, but anything in
the code path works. Mix guarantees that these applications and
the rest of your runtime dependencies are started before your
application starts.
* `:registered` - the name of all registered processes in the
application. If your application defines a local GenServer
with name `MyServer`, it is recommended to add `MyServer`
to this list. It is most useful in detecting conflicts
between applications that register the same names.
* `:env` - default values for the application environment.
The application environment is one of the most common ways
to configure applications. See the `Application` module for
mechanisms to read and write to the application environment.
For example:
def application do
[extra_applications: [:logger, :crypto],
env: [key: :value],
registered: [MyServer]]
end
Other options include:
* `:applications` - all applications your application depends
on at runtime. By default, this list is automatically inferred
from your dependencies. Mix and other tools use the application
list in order to start your dependencies before starting the
application itself.
* `:mod` - specifies a module to invoke when the application
is started. It must be in the format `{Mod, args}` where
args is often an empty list. The module specified must
implement the callbacks defined by the `Application`
module.
* `:start_phases` - specifies a list of phases and their arguments
to be called after the application is started. See the "Phases"
section below.
* `:included_applications` - specifies a list of applications
that will be included in the application. It is the responsibility of
the primary application to start the supervision tree of all included
applications, as only the primary application will be started. A process
in an included application considers itself belonging to the
primary application.
* `:maxT` - specifies the maximum time the application is allowed to run, in
milliseconds. Applications are stopped if `:maxT` is reached, and their
top-level supervisor terminated with reason `:normal`. This threshold is
technically valid in any resource file, but it is only effective for
applications with a callback module. Defaults to `:infinity`.
Besides the options above, `.app` files also expect other options like
`:modules` and `:vsn`, but these are automatically added by Mix.
## Command line options
* `--force` - forces compilation regardless of modification times
## Phases
Applications provide a start phases mechanism which will be called,
in order, for the application and all included applications. If a phase
is not defined for an included application, that application is skipped.
Let's see an example `MyApp.application/0` function:
def application do
[start_phases: [init: [], go: [], finish: []],
included_applications: [:my_included_app]]
end
And an example `:my_included_app` defines on its `mix.exs` the function:
def application do
[mod: {MyIncludedApp, []},
start_phases: [go: []]]
end
In this example, the order that the application callbacks are called in is:
Application.start(MyApp)
MyApp.start(:normal, [])
MyApp.start_phase(:init, :normal, [])
MyApp.start_phase(:go, :normal, [])
MyIncludedApp.start_phase(:go, :normal, [])
MyApp.start_phase(:finish, :normal, [])
"""
def run(args) do
{opts, _, _} = OptionParser.parse(args, switches: [force: :boolean])
project = Mix.Project.get!()
config = Mix.Project.config()
app = Keyword.get(config, :app)
version = Keyword.get(config, :version)
validate_app(app)
validate_version(version)
path = Mix.Project.compile_path()
mods = modules_from(Path.wildcard("#{path}/*.beam")) |> Enum.sort()
target = Path.join(path, "#{app}.app")
source = Mix.Project.config_mtime()
if opts[:force] || Mix.Utils.stale?([source], [target]) || modules_changed?(mods, target) do
best_guess = [
description: to_charlist(config[:description] || app),
modules: mods,
registered: [],
vsn: to_charlist(version)
]
properties =
if function_exported?(project, :application, 0) do
project_application = project.application
unless Keyword.keyword?(project_application) do
Mix.raise(
"Application configuration returned from application/0 should be a keyword list"
)
end
Keyword.merge(best_guess, project_application)
else
best_guess
end
properties = ensure_correct_properties(properties, config)
contents = :io_lib.format("~p.~n", [{:application, app, properties}])
Mix.Project.ensure_structure()
File.write!(target, IO.chardata_to_string(contents))
Mix.shell().info("Generated #{app} app")
:ok
else
:noop
end
end
defp modules_changed?(mods, target) do
case :file.consult(target) do
{:ok, [{:application, _app, properties}]} ->
properties[:modules] != mods
_ ->
false
end
end
defp validate_app(app) when is_atom(app), do: :ok
defp validate_app(app) do
ensure_present(:app, app)
Mix.raise("Expected :app to be an atom, got: #{inspect(app)}")
end
defp validate_version(version) do
ensure_present(:version, version)
unless is_binary(version) and match?({:ok, _}, Version.parse(version)) do
Mix.raise("Expected :version to be a SemVer version, got: #{inspect(version)}")
end
end
defp ensure_present(name, nil) do
Mix.raise("Please ensure mix.exs file has the #{inspect(name)} in the project definition")
end
defp ensure_present(_name, _val), do: :ok
defp modules_from(beams) do
Enum.map(beams, &(&1 |> Path.basename() |> Path.rootname(".beam") |> String.to_atom()))
end
defp ensure_correct_properties(properties, config) do
validate_properties!(properties)
{extra, properties} = Keyword.pop(properties, :extra_applications, [])
apps =
properties
|> Keyword.get(:applications)
|> Kernel.||(apps_from_prod_non_optional_deps(properties))
|> normalize_apps(extra, config)
Keyword.put(properties, :applications, apps)
end
defp validate_properties!(properties) do
Enum.each(properties, fn
{:description, value} ->
unless is_list(value) do
Mix.raise(
"Application description (:description) is not a character list, got: " <>
inspect(value)
)
end
{:id, value} ->
unless is_list(value) do
Mix.raise("Application id (:id) is not a character list, got: " <> inspect(value))
end
{:vsn, value} ->
unless is_list(value) do
Mix.raise("Application vsn (:vsn) is not a character list, got: " <> inspect(value))
end
{:maxT, value} ->
unless value == :infinity or is_integer(value) do
Mix.raise(
"Application maximum time (:maxT) is not an integer or :infinity, got: " <>
inspect(value)
)
end
{:modules, value} ->
unless is_list(value) and Enum.all?(value, &is_atom(&1)) do
Mix.raise(
"Application modules (:modules) should be a list of atoms, got: " <> inspect(value)
)
end
{:registered, value} ->
unless is_list(value) and Enum.all?(value, &is_atom(&1)) do
Mix.raise(
"Application registered processes (:registered) should be a list of atoms, got: " <>
inspect(value)
)
end
{:included_applications, value} ->
unless is_list(value) and Enum.all?(value, &is_atom(&1)) do
Mix.raise(
"Application included applications (:included_applications) should be a list of atoms, got: " <>
inspect(value)
)
end
{:extra_applications, value} ->
unless is_list(value) and Enum.all?(value, &is_atom(&1)) do
Mix.raise(
"Application extra applications (:extra_applications) should be a list of atoms, got: " <>
inspect(value)
)
end
{:applications, value} ->
unless is_list(value) and Enum.all?(value, &is_atom(&1)) do
Mix.raise(
"Application applications (:applications) should be a list of atoms, got: " <>
inspect(value)
)
end
{:env, value} ->
unless Keyword.keyword?(value) do
Mix.raise(
"Application environment (:env) should be a keyword list, got: " <> inspect(value)
)
end
{:start_phases, value} ->
unless Keyword.keyword?(value) do
Mix.raise(
"Application start phases (:start_phases) should be a keyword list, got: " <>
inspect(value)
)
end
{:mod, []} ->
:ok
{:mod, {module, _args}} when is_atom(module) ->
:ok
{:mod, value} ->
Mix.raise(
"Application callback module (:mod) should be either [] or {module, start_args}, got: " <>
inspect(value)
)
_ ->
:ok
end)
end
defp apps_from_prod_non_optional_deps(properties) do
included_applications = Keyword.get(properties, :included_applications, [])
for %{app: app, opts: opts, top_level: true} <- Mix.Dep.cached(),
Keyword.get(opts, :app, true),
Keyword.get(opts, :runtime, true),
not Keyword.get(opts, :optional, false),
app not in included_applications,
do: app
end
defp normalize_apps(apps, extra, config) do
Enum.uniq([:kernel, :stdlib] ++ language_app(config) ++ extra ++ apps)
end
defp language_app(config) do
case Keyword.fetch(config, :language) do
{:ok, :elixir} -> [:elixir]
{:ok, :erlang} -> []
:error -> [:elixir]
end
end
end
| 32.746313 | 108 | 0.634988 |
f7fb056c060a964e2c8f9dee4d30b67f1924987e | 2,624 | exs | Elixir | day_9.1.exs | yggie/advent-of-code-2015 | 414d8910714a592e0811a70ff35a2dc4ec4c73f5 | [
"MIT"
] | null | null | null | day_9.1.exs | yggie/advent-of-code-2015 | 414d8910714a592e0811a70ff35a2dc4ec4c73f5 | [
"MIT"
] | null | null | null | day_9.1.exs | yggie/advent-of-code-2015 | 414d8910714a592e0811a70ff35a2dc4ec4c73f5 | [
"MIT"
] | null | null | null | input = Interface.read_input
defmodule TravelDiary do
def put_entry(diary, "") do
diary
end
def put_entry(%{ travel_distance_lookup: travel_distance_lookup, locations: locations }, entry) do
[_match, start_loc, end_loc, dist] = Regex.run(~r/^(\w+) to (\w+) = (\d+)$/, entry)
key = make_key(start_loc, end_loc)
{dist, ""} = Integer.parse(dist)
%{
travel_distance_lookup: Map.put(travel_distance_lookup, key, dist),
locations: HashSet.put(HashSet.put(locations, start_loc), end_loc)
}
end
def travel_distance(diary, start_loc, end_loc) do
key = make_key(start_loc, end_loc)
diary.travel_distance_lookup[key]
end
defp make_key(start_loc, end_loc) do
[start_loc, end_loc] |> Enum.sort |> List.to_tuple
end
end
defmodule GraphSearch do
def minimize_traversal_cost(nodes, cost_lookup) do
cost_lookup_list = cost_lookup
|> Map.to_list
|> List.keysort(1)
nodes |> Enum.reduce({[], 1_000_000_000}, fn(first_node, best_so_far) ->
remaining_nodes = HashSet.delete(nodes, first_node)
traverse_nodes(remaining_nodes, cost_lookup_list, {[first_node], 0}, best_so_far)
end)
end
defp traverse_nodes_guarded(remaining_nodes, cost_lookup_list, trail_so_far, best_so_far) do
if HashSet.size(remaining_nodes) == 0 do
[trail_so_far, best_so_far] |> List.keysort(1) |> hd
else
traverse_nodes(remaining_nodes, cost_lookup_list, trail_so_far, best_so_far)
end
end
defp traverse_nodes(remaining_nodes, cost_lookup_list, trail_so_far, best_so_far) do
{[last_node | list_so_far], cost_so_far} = trail_so_far
remaining_nodes
|> Enum.reduce(best_so_far, fn(chosen_node, next_best_so_far) ->
next_remaining_nodes = HashSet.delete(remaining_nodes, chosen_node)
{_lookup_key, cost} = cost_lookup_list
|> Enum.find(fn({{node_0, node_1}, _cost}) ->
cond do
{node_0, node_1} == {chosen_node, last_node} || {node_0, node_1} == {last_node, chosen_node} ->
true
true ->
false
end
end)
next_cost = cost_so_far + cost
traverse_nodes_guarded(next_remaining_nodes, cost_lookup_list, {[chosen_node, last_node | list_so_far], next_cost}, next_best_so_far)
end)
end
end
diary = input |> String.split("\n")
|> Enum.reduce(%{ travel_distance_lookup: %{}, locations: HashSet.new }, fn(entry, diary) ->
TravelDiary.put_entry(diary, entry)
end)
{_reverse_trail, answer} = GraphSearch.minimize_traversal_cost(diary.locations, diary.travel_distance_lookup)
Interface.print_output(answer)
| 32.395062 | 139 | 0.692835 |
f7fb12fc26ebe4630e9347b8baafd657cd28e2d5 | 916 | ex | Elixir | lib/helper/converter/chinese_convention.ex | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 240 | 2018-11-06T09:36:54.000Z | 2022-02-20T07:12:36.000Z | lib/helper/converter/chinese_convention.ex | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 363 | 2018-07-11T03:38:14.000Z | 2021-12-14T01:42:40.000Z | lib/helper/converter/chinese_convention.ex | mydearxym/mastani_server | f24034a4a5449200165cf4a547964a0961793eab | [
"Apache-2.0"
] | 22 | 2019-01-27T11:47:56.000Z | 2021-02-28T13:17:52.000Z | defmodule Helper.Converter.ChineseConvention do
@moduledoc """
follow's https://github.com/sparanoid/chinese-copywriting-guidelines
遵循中文排版指南
- 自动添加空格
- 中文状态下输入的的 "" 和 '' 会被自动转换成「」以及 『』
inspired by wordpress plugin cover-lover:
https://cn.wordpress.org/plugins/corner-bracket-lover/
"""
require Pangu
@doc """
format chinese stirng follows github: sparanoid/chinese-copywriting-guidelines.
example: "Sephiroth見他”這等’神情‘“,也是悚然一驚:不知我這Ultimate Destructive Magic是否對付得了?"
to: "Sephiroth 見他「這等『神情』」, 也是悚然一驚: 不知我這 Ultimate Destructive Magic 是否對付得了?"
"""
@spec format(binary) :: binary
def format(text) do
text
|> Pangu.spacing()
|> cover_brackets
end
# covert chinese "" and '' to 「」& 『』
defp cover_brackets(text) do
text
|> String.replace("“", "「")
|> String.replace("”", "」")
|> String.replace("‘", "『")
|> String.replace("’", "』")
end
end
| 24.756757 | 82 | 0.65393 |
f7fb439cb79cbf4e9921180229ba05bc52698754 | 2,803 | ex | Elixir | apps/site/lib/site_web/views/search_view.ex | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 42 | 2019-05-29T16:05:30.000Z | 2021-08-09T16:03:37.000Z | apps/site/lib/site_web/views/search_view.ex | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 872 | 2019-05-29T17:55:50.000Z | 2022-03-30T09:28:43.000Z | apps/site/lib/site_web/views/search_view.ex | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 12 | 2019-07-01T18:33:21.000Z | 2022-03-10T02:13:57.000Z | defmodule SiteWeb.SearchView do
use SiteWeb, :view
import SiteWeb.CMSView, only: [render_duration: 2]
import Site.ContentRewriter, only: [rewrite: 2]
alias CMS.Search.Result
alias CMS.SearchResult.{
Event,
File,
LandingPage,
Link,
NewsEntry,
Page,
Person
}
defdelegate fa_icon_for_file_type(mime), to: Site.FontAwesomeHelpers
@spec render_filter_option(Phoenix.HTML.Form, atom, map) :: Phoenix.HTML.safe()
def render_filter_option(form, type, option) do
id = "#{type}_#{option.value}"
name = "search[#{type}][#{option.value}]"
content_tag :li do
label form, type, for: id, class: "facet-label" do
[
content_tag(
:input,
"",
type: "checkbox",
id: id,
name: name,
value: "true",
checked: option.active?
),
content_tag(:span, "#{option.label} (#{option.count})")
]
end
end
end
@spec render_toggle_filter() :: [Phoenix.HTML.safe()]
def render_toggle_filter do
[
content_tag(:span, fa("plus-circle"), class: "search-filter-expand"),
content_tag(:span, fa("minus-circle"), class: "search-filter-collapse")
]
end
@spec icon(Result.result()) :: Phoenix.HTML.safe() | String.t()
defp icon(%Event{}), do: fa("calendar")
defp icon(%NewsEntry{}), do: fa("newspaper-o")
defp icon(%Person{}), do: fa("user")
defp icon(%LandingPage{}), do: fa("file-o")
defp icon(%Page{}), do: fa("file-o")
defp icon(%Link{}), do: fa("file-o")
defp icon(%File{mimetype: mimetype}), do: fa_icon_for_file_type(mimetype)
@spec fragment(Result.result(), Plug.Conn.t()) :: Phoenix.HTML.safe() | String.t()
defp fragment(%NewsEntry{highlights: higlights}, conn), do: highlights(higlights, conn)
defp fragment(%Person{highlights: higlights}, conn), do: highlights(higlights, conn)
defp fragment(%Page{highlights: higlights}, conn), do: highlights(higlights, conn)
defp fragment(%Link{description: description}, _conn), do: description
defp fragment(%LandingPage{highlights: higlights}, conn), do: highlights(higlights, conn)
defp fragment(%Event{start_time: start_time, location: location}, _conn) do
[content_tag(:div, render_duration(start_time, nil)), content_tag(:div, "#{location}")]
end
defp fragment(_, _conn), do: ""
@spec highlights([String.t()], Plug.Conn.t()) :: Phoenix.HTML.safe()
defp highlights(html_strings, conn) do
html_strings
|> raw()
|> rewrite(conn)
end
@spec track_search_click(String.t(), String.t()) :: String.t()
defp track_search_click(url, origin) do
delimiter =
case String.contains?(url, "?") do
true -> "&"
false -> "?"
end
"#{url}#{delimiter}from=#{origin}"
end
end
| 30.467391 | 91 | 0.631466 |
f7fba5f351371e6b6734fcd18bfc351163eb0639 | 1,178 | ex | Elixir | web/channels/user_socket.ex | jespr/ex_blog_example | 0a014d4b88b224210a7e15d7c60521aff05059c9 | [
"Unlicense"
] | null | null | null | web/channels/user_socket.ex | jespr/ex_blog_example | 0a014d4b88b224210a7e15d7c60521aff05059c9 | [
"Unlicense"
] | null | null | null | web/channels/user_socket.ex | jespr/ex_blog_example | 0a014d4b88b224210a7e15d7c60521aff05059c9 | [
"Unlicense"
] | null | null | null | defmodule BlogExample.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", BlogExample.RoomChannel
## Transports
transport :websocket, Phoenix.Transports.WebSocket
# transport :longpoll, Phoenix.Transports.LongPoll
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "users_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# BlogExample.Endpoint.broadcast("users_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 31 | 84 | 0.704584 |
f7fba82db103679a5be54d73811e98b746416165 | 75 | ex | Elixir | lib/changelog/mailer.ex | soleo/changelog.com | 621c7471b23379e1cdd4a0c960b66ed98d8d1a53 | [
"MIT"
] | 2,599 | 2016-10-25T15:02:53.000Z | 2022-03-26T02:34:42.000Z | lib/changelog/mailer.ex | soleo/changelog.com | 621c7471b23379e1cdd4a0c960b66ed98d8d1a53 | [
"MIT"
] | 253 | 2016-10-25T20:29:24.000Z | 2022-03-29T21:52:36.000Z | lib/changelog/mailer.ex | soleo/changelog.com | 621c7471b23379e1cdd4a0c960b66ed98d8d1a53 | [
"MIT"
] | 298 | 2016-10-25T15:18:31.000Z | 2022-01-18T21:25:52.000Z | defmodule Changelog.Mailer do
use Bamboo.Mailer, otp_app: :changelog
end
| 18.75 | 40 | 0.8 |
f7fbe2ceaef06767130fc833954abef416eb9ae4 | 2,555 | ex | Elixir | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p2beta1_page.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p2beta1_page.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p2beta1_page.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p2beta1Page do
@moduledoc """
Detected page from OCR.
## Attributes
- blocks ([GoogleCloudVisionV1p2beta1Block]): List of blocks of text, images etc on this page. Defaults to: `null`.
- confidence (float()): Confidence of the OCR results on the page. Range [0, 1]. Defaults to: `null`.
- height (integer()): Page height. For PDFs the unit is points. For images (including TIFFs) the unit is pixels. Defaults to: `null`.
- property (GoogleCloudVisionV1p2beta1TextAnnotationTextProperty): Additional information detected on the page. Defaults to: `null`.
- width (integer()): Page width. For PDFs the unit is points. For images (including TIFFs) the unit is pixels. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:blocks => list(GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p2beta1Block.t()),
:confidence => any(),
:height => any(),
:property =>
GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p2beta1TextAnnotationTextProperty.t(),
:width => any()
}
field(:blocks, as: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p2beta1Block, type: :list)
field(:confidence)
field(:height)
field(
:property,
as: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p2beta1TextAnnotationTextProperty
)
field(:width)
end
defimpl Poison.Decoder, for: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p2beta1Page do
def decode(value, options) do
GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p2beta1Page.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p2beta1Page do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.712121 | 135 | 0.736595 |
f7fbf447556fd1899b63f7ed05ef40a7f5c46d65 | 2,634 | exs | Elixir | test/membrane/rtp/packet_test.exs | geometerio/membrane_rtp_plugin | ac6191bf7dcf2b00e880aeb475faa6de688ff5cb | [
"Apache-2.0"
] | 15 | 2020-12-12T01:30:19.000Z | 2022-02-28T22:48:08.000Z | test/membrane/rtp/packet_test.exs | geometerio/membrane_rtp_plugin | ac6191bf7dcf2b00e880aeb475faa6de688ff5cb | [
"Apache-2.0"
] | 39 | 2020-07-31T09:16:41.000Z | 2022-03-17T09:40:50.000Z | test/membrane/rtp/packet_test.exs | geometerio/membrane_rtp_plugin | ac6191bf7dcf2b00e880aeb475faa6de688ff5cb | [
"Apache-2.0"
] | 3 | 2021-05-12T21:27:28.000Z | 2021-11-03T19:27:34.000Z | defmodule Membrane.RTP.PacketTest do
use ExUnit.Case
alias Membrane.RTP.{Header, Packet, Fixtures}
@encrypted? false
test "parses and serializes valid packets" do
packet = Fixtures.sample_packet()
assert {:ok, %{packet: ^packet}} = Packet.parse(Fixtures.sample_packet_binary(), @encrypted?)
assert Packet.serialize(Fixtures.sample_packet()) == Fixtures.sample_packet_binary()
end
test "returns error when version is not supported" do
assert Packet.parse(<<1::2, 1233::1022>>, @encrypted?) == {:error, :wrong_version}
end
test "returns error when packet is too short" do
assert Packet.parse(<<128, 127, 0, 0, 1>>, @encrypted?) == {:error, :malformed_packet}
end
test "parses and serializes csrcs correctly" do
<<header_1::4, _old_cc::4, header_2::88, payload::binary()>> = Fixtures.sample_packet_binary()
packet_binary = <<header_1::4, 2::4, header_2::88, 12::32, 21::32, payload::binary()>>
packet = %Packet{
Fixtures.sample_packet()
| header: %Header{Fixtures.sample_header() | csrcs: [12, 21]}
}
assert {:ok, %{packet: ^packet}} = Packet.parse(packet_binary, @encrypted?)
assert Packet.serialize(packet) == packet_binary
end
test "ignores padding" do
test_padding_size = 2
padding_octets = test_padding_size - 1
test_padding = <<0::size(padding_octets)-unit(8), test_padding_size>>
<<version::2, _padding::1, rest::bitstring>> = Fixtures.sample_packet_binary()
test_packet = <<version::2, 1::1, rest::bitstring, test_padding::binary>>
sample_packet = Fixtures.sample_packet()
assert {:ok, %{packet: ^sample_packet}} = Packet.parse(test_packet, @encrypted?)
assert Packet.serialize(Fixtures.sample_packet(), align_to: 4) == test_packet
end
test "reads and serializes extension header" do
extension_header = <<0::16, 4::16, 1::32, 2::32, 3::32, 4::32>>
expected_parsed_extension = %Header.Extension{
data: <<1::32, 2::32, 3::32, 4::32>>,
profile_specific: <<0, 0>>
}
# Extension is stored on 4th bit of header
<<header_1::3, _extension::1, header_2::92, payload::binary>> =
Fixtures.sample_packet_binary()
# Glueing data back together with extension header in place
packet_binary = <<header_1::3, 1::1, header_2::92, extension_header::binary, payload::binary>>
packet = %Packet{
Fixtures.sample_packet()
| header: %Header{Fixtures.sample_header() | extension: expected_parsed_extension}
}
assert {:ok, %{packet: ^packet}} = Packet.parse(packet_binary, @encrypted?)
assert Packet.serialize(packet) == packet_binary
end
end
| 35.12 | 98 | 0.678815 |
f7fbf48d1356d642d940f8dd45a0ea3357737a80 | 461 | exs | Elixir | test/models/post_test.exs | gilcierweb/appElixirPhoenix | 4c594b365ddc07e732bd421901620469a49b2395 | [
"MIT"
] | 1 | 2018-04-25T15:03:42.000Z | 2018-04-25T15:03:42.000Z | test/models/post_test.exs | gilcierweb/appElixirPhoenix | 4c594b365ddc07e732bd421901620469a49b2395 | [
"MIT"
] | null | null | null | test/models/post_test.exs | gilcierweb/appElixirPhoenix | 4c594b365ddc07e732bd421901620469a49b2395 | [
"MIT"
] | null | null | null | defmodule AppElixirPhoenix.PostTest do
use AppElixirPhoenix.ModelCase
alias AppElixirPhoenix.Post
@valid_attrs %{body: "some content", title: "some content"}
@invalid_attrs %{}
test "changeset with valid attributes" do
changeset = Post.changeset(%Post{}, @valid_attrs)
assert changeset.valid?
end
test "changeset with invalid attributes" do
changeset = Post.changeset(%Post{}, @invalid_attrs)
refute changeset.valid?
end
end
| 24.263158 | 61 | 0.733189 |
f7fc124539b3f9d347468e2704bff5e83d5d6d34 | 20,890 | ex | Elixir | lib/commanded/aggregates/aggregate.ex | jccf091/commanded | 5d68a2b1b7a222b6f204c48d886f3d2c9670f26a | [
"MIT"
] | 1 | 2022-02-20T10:42:07.000Z | 2022-02-20T10:42:07.000Z | lib/commanded/aggregates/aggregate.ex | jccf091/commanded | 5d68a2b1b7a222b6f204c48d886f3d2c9670f26a | [
"MIT"
] | null | null | null | lib/commanded/aggregates/aggregate.ex | jccf091/commanded | 5d68a2b1b7a222b6f204c48d886f3d2c9670f26a | [
"MIT"
] | null | null | null | defmodule Commanded.Aggregates.Aggregate do
use TelemetryRegistry
use GenServer, restart: :temporary
use Commanded.Registration
telemetry_event(%{
event: [:commanded, :aggregate, :execute, :start],
description: "Emitted when an aggregate starts executing a command",
measurements: "%{system_time: integer()}",
metadata: """
%{application: Commanded.Application.t(),
aggregate_uuid: String.t(),
aggregate_state: struct(),
aggregate_version: non_neg_integer(),
caller: pid(),
execution_context: Commanded.Aggregates.ExecutionContext.t()}
"""
})
telemetry_event(%{
event: [:commanded, :aggregate, :execute, :stop],
description: "Emitted when an aggregate stops executing a command",
measurements: "%{duration: non_neg_integer()}",
metadata: """
%{application: Commanded.Application.t(),
aggregate_uuid: String.t(),
aggregate_state: struct(),
aggregate_version: non_neg_integer(),
caller: pid(),
execution_context: Commanded.Aggregates.ExecutionContext.t(),
events: [map()],
error: nil | any()}
"""
})
telemetry_event(%{
event: [:commanded, :aggregate, :execute, :exception],
description: "Emitted when an aggregate raises an exception",
measurements: "%{duration: non_neg_integer()}",
metadata: """
%{application: Commanded.Application.t(),
aggregate_uuid: String.t(),
aggregate_state: struct(),
aggregate_version: non_neg_integer(),
caller: pid(),
execution_context: Commanded.Aggregates.ExecutionContext.t(),
kind: :throw | :error | :exit,
reason: any(),
stacktrace: list()}
"""
})
@moduledoc """
Aggregate is a `GenServer` process used to provide access to an
instance of an event sourced aggregate.
It allows execution of commands against an aggregate instance, and handles
persistence of created events to the configured event store. Concurrent
commands sent to an aggregate instance are serialized and executed in the
order received.
The `Commanded.Commands.Router` module will locate, or start, an aggregate
instance when a command is dispatched. By default, an aggregate process will
run indefinitely once started. Its lifespan may be controlled by using the
`Commanded.Aggregates.AggregateLifespan` behaviour.
## Snapshotting
You can configure state snapshots for an aggregate in config. By default
snapshots are *not* taken for an aggregate. The following options are
available to enable snapshots:
- `snapshot_every` - snapshot aggregate state every so many events. Use
`nil` to disable snapshotting, or exclude the configuration entirely.
- `snapshot_version` - a non-negative integer indicating the version of
the aggregate state snapshot. Incrementing this version forces any
earlier recorded snapshots to be ignored when rebuilding aggregate
state.
### Example
In `config/config.exs` enable snapshots for `MyApp.ExampleAggregate` after
every ten events:
config :my_app, MyApp.Application,
snapshotting: %{
MyApp.ExampleAggregate => [
snapshot_every: 10,
snapshot_version: 1
]
}
## Telemetry
#{telemetry_docs()}
"""
require Logger
alias Commanded.Aggregate.Multi
alias Commanded.Aggregates.Aggregate
alias Commanded.Aggregates.AggregateStateBuilder
alias Commanded.Aggregates.ExecutionContext
alias Commanded.Application.Config
alias Commanded.Event.Mapper
alias Commanded.Event.Upcast
alias Commanded.EventStore
alias Commanded.EventStore.RecordedEvent
alias Commanded.Registration
alias Commanded.Snapshotting
alias Commanded.Telemetry
defstruct [
:application,
:aggregate_module,
:aggregate_uuid,
:aggregate_state,
:snapshotting,
aggregate_version: 0,
lifespan_timeout: :infinity
]
def start_link(config, opts) do
{start_opts, aggregate_opts} =
Keyword.split(opts, [:debug, :name, :timeout, :spawn_opt, :hibernate_after])
aggregate_module = Keyword.fetch!(aggregate_opts, :aggregate_module)
aggregate_uuid = Keyword.fetch!(aggregate_opts, :aggregate_uuid)
unless is_atom(aggregate_module),
do: raise(ArgumentError, message: "aggregate module must be an atom")
unless is_binary(aggregate_uuid),
do: raise(ArgumentError, message: "aggregate identity must be a string")
application = Keyword.fetch!(config, :application)
snapshotting = Keyword.get(config, :snapshotting, %{})
snapshot_options = Map.get(snapshotting, aggregate_module, [])
state = %Aggregate{
application: application,
aggregate_module: aggregate_module,
aggregate_uuid: aggregate_uuid,
snapshotting: Snapshotting.new(application, aggregate_uuid, snapshot_options)
}
GenServer.start_link(__MODULE__, state, start_opts)
end
@doc false
def name(application, aggregate_module, aggregate_uuid)
when is_atom(application) and is_atom(aggregate_module) and is_binary(aggregate_uuid),
do: {application, aggregate_module, aggregate_uuid}
@doc """
Execute the given command against the aggregate.
- `aggregate_module` - the aggregate's module (e.g. `BankAccount`).
- `aggregate_uuid` - uniquely identifies an instance of the aggregate.
- `context` - includes command execution arguments
(see `Commanded.Aggregates.ExecutionContext` for details).
- `timeout` - an non-negative integer which specifies how many milliseconds
to wait for a reply, or the atom :infinity to wait indefinitely.
The default value is five seconds (5,000ms).
## Return values
Returns `{:ok, aggregate_version, events}` on success, or `{:error, error}`
on failure.
- `aggregate_version` - the updated version of the aggregate after executing
the command.
- `events` - events produced by the command, can be an empty list.
"""
def execute(
application,
aggregate_module,
aggregate_uuid,
%ExecutionContext{} = context,
timeout \\ 5_000
)
when is_atom(aggregate_module) and is_binary(aggregate_uuid) and
(is_number(timeout) or timeout == :infinity) do
name = via_name(application, aggregate_module, aggregate_uuid)
try do
GenServer.call(name, {:execute_command, context}, timeout)
catch
:exit, {:normal, {GenServer, :call, [^name, {:execute_command, ^context}, ^timeout]}} ->
{:exit, {:normal, :aggregate_stopped}}
end
end
@doc false
def aggregate_state(application, aggregate_module, aggregate_uuid, timeout \\ 5_000) do
name = via_name(application, aggregate_module, aggregate_uuid)
try do
GenServer.call(name, :aggregate_state, timeout)
catch
:exit, {reason, {GenServer, :call, [^name, :aggregate_state, ^timeout]}}
when reason in [:normal, :noproc] ->
task =
Task.async(fn ->
snapshot_options =
application
|> Config.get(:snapshotting)
|> Kernel.||(%{})
|> Map.get(aggregate_module, [])
%Aggregate{
application: application,
aggregate_module: aggregate_module,
aggregate_uuid: aggregate_uuid,
snapshotting: Snapshotting.new(application, aggregate_uuid, snapshot_options)
}
|> AggregateStateBuilder.populate()
|> Map.fetch!(:aggregate_state)
end)
case Task.yield(task, timeout) || Task.shutdown(task) do
{:ok, result} ->
result
nil ->
exit({:timeout, {GenServer, :call, [name, :aggregate_state, timeout]}})
end
end
end
@doc false
def aggregate_version(application, aggregate_module, aggregate_uuid, timeout \\ 5_000) do
name = via_name(application, aggregate_module, aggregate_uuid)
GenServer.call(name, :aggregate_version, timeout)
end
@doc false
def take_snapshot(application, aggregate_module, aggregate_uuid) do
name = via_name(application, aggregate_module, aggregate_uuid)
GenServer.cast(name, :take_snapshot)
end
@doc false
def shutdown(application, aggregate_module, aggregate_uuid) do
name = via_name(application, aggregate_module, aggregate_uuid)
GenServer.stop(name)
end
@doc false
@impl GenServer
def init(%Aggregate{} = state) do
# Initial aggregate state is populated by loading its state snapshot and/or
# events from the event store.
{:ok, state, {:continue, :populate_aggregate_state}}
end
@doc false
@impl GenServer
def handle_continue(:populate_aggregate_state, %Aggregate{} = state) do
state = AggregateStateBuilder.populate(state)
# Subscribe to aggregate's events to catch any events appended to its stream
# by another process, such as directly appended to the event store.
{:noreply, state, {:continue, :subscribe_to_events}}
end
@doc false
@impl GenServer
def handle_continue(:subscribe_to_events, %Aggregate{} = state) do
%Aggregate{application: application, aggregate_uuid: aggregate_uuid} = state
:ok = EventStore.subscribe(application, aggregate_uuid)
{:noreply, state}
end
@doc false
@impl GenServer
def handle_cast(:take_snapshot, %Aggregate{} = state) do
%Aggregate{
aggregate_state: aggregate_state,
aggregate_version: aggregate_version,
lifespan_timeout: lifespan_timeout,
snapshotting: snapshotting
} = state
Logger.debug(fn -> describe(state) <> " recording snapshot" end)
state =
case Snapshotting.take_snapshot(snapshotting, aggregate_version, aggregate_state) do
{:ok, snapshotting} ->
%Aggregate{state | snapshotting: snapshotting}
{:error, error} ->
Logger.warn(fn -> describe(state) <> " snapshot failed due to: " <> inspect(error) end)
state
end
case lifespan_timeout do
{:stop, reason} ->
{:stop, reason, state}
lifespan_timeout ->
{:noreply, state, lifespan_timeout}
end
end
@doc false
@impl GenServer
def handle_call({:execute_command, %ExecutionContext{} = context}, from, %Aggregate{} = state) do
%ExecutionContext{lifespan: lifespan, command: command} = context
telemetry_metadata = telemetry_metadata(context, from, state)
start_time = telemetry_start(telemetry_metadata)
{result, state} = execute_command(context, state)
lifespan_timeout =
case result do
{:ok, []} ->
aggregate_lifespan_timeout(lifespan, :after_command, command)
{:ok, events} ->
aggregate_lifespan_timeout(lifespan, :after_event, events)
{:error, error} ->
aggregate_lifespan_timeout(lifespan, :after_error, error)
{:error, error, _stacktrace} ->
aggregate_lifespan_timeout(lifespan, :after_error, error)
end
formatted_reply = ExecutionContext.format_reply(result, context, state)
state = %Aggregate{state | lifespan_timeout: lifespan_timeout}
%Aggregate{aggregate_version: aggregate_version, snapshotting: snapshotting} = state
response =
if Snapshotting.snapshot_required?(snapshotting, aggregate_version) do
:ok = GenServer.cast(self(), :take_snapshot)
{:reply, formatted_reply, state}
else
case lifespan_timeout do
{:stop, reason} -> {:stop, reason, formatted_reply, state}
lifespan_timeout -> {:reply, formatted_reply, state, lifespan_timeout}
end
end
telemetry_metadata = telemetry_metadata(context, from, state)
telemetry_stop(start_time, telemetry_metadata, result)
response
end
@doc false
@impl GenServer
def handle_call(:aggregate_state, _from, %Aggregate{} = state) do
%Aggregate{aggregate_state: aggregate_state} = state
{:reply, aggregate_state, state}
end
@doc false
@impl GenServer
def handle_call(:aggregate_version, _from, %Aggregate{} = state) do
%Aggregate{aggregate_version: aggregate_version} = state
{:reply, aggregate_version, state}
end
@doc false
@impl GenServer
def handle_info({:events, events}, %Aggregate{} = state) do
%Aggregate{application: application, lifespan_timeout: lifespan_timeout} = state
Logger.debug(fn -> describe(state) <> " received events: #{inspect(events)}" end)
try do
state =
events
|> Upcast.upcast_event_stream(additional_metadata: %{application: application})
|> Enum.reduce(state, &handle_event/2)
state = Enum.reduce(events, state, &handle_event/2)
{:noreply, state, lifespan_timeout}
catch
{:error, error} ->
Logger.debug(fn -> describe(state) <> " stopping due to: #{inspect(error)}" end)
# Stop after event handling returned an error
{:stop, error, state}
end
end
@doc false
@impl GenServer
def handle_info(:timeout, %Aggregate{} = state) do
Logger.debug(fn -> describe(state) <> " stopping due to inactivity timeout" end)
{:stop, :normal, state}
end
# Handle events appended to the aggregate's stream, received by its
# event store subscription, by applying any missed events to its state.
defp handle_event(%RecordedEvent{} = event, %Aggregate{} = state) do
%RecordedEvent{data: data, stream_version: stream_version} = event
%Aggregate{
aggregate_module: aggregate_module,
aggregate_state: aggregate_state,
aggregate_version: aggregate_version
} = state
expected_version = aggregate_version + 1
case stream_version do
^expected_version ->
# apply event to aggregate's state
%Aggregate{
state
| aggregate_version: stream_version,
aggregate_state: aggregate_module.apply(aggregate_state, data)
}
already_seen_version when already_seen_version <= aggregate_version ->
# ignore events already applied to aggregate state
state
_unexpected_version ->
Logger.debug(fn ->
describe(state) <> " received an unexpected event: #{inspect(event)}"
end)
# throw an error when an unexpected event is received
throw({:error, :unexpected_event_received})
end
end
defp aggregate_lifespan_timeout(lifespan, function_name, args) do
# Take the last event or the command or error
args = args |> List.wrap() |> Enum.take(-1)
case apply(lifespan, function_name, args) do
timeout when timeout in [:infinity, :hibernate] ->
timeout
:stop ->
{:stop, :normal}
{:stop, _reason} = reply ->
reply
timeout when is_integer(timeout) and timeout >= 0 ->
timeout
invalid ->
Logger.warn(fn ->
"Invalid timeout for aggregate lifespan " <>
inspect(lifespan) <>
", expected a non-negative integer, `:infinity`, `:hibernate`, `:stop`, or `{:stop, reason}` but got: " <>
inspect(invalid)
end)
:infinity
end
end
defp before_execute_command(_aggregate_state, %ExecutionContext{before_execute: nil}), do: :ok
defp before_execute_command(aggregate_state, %ExecutionContext{} = context) do
%ExecutionContext{handler: handler, before_execute: before_execute} = context
Kernel.apply(handler, before_execute, [aggregate_state, context])
end
defp execute_command(%ExecutionContext{} = context, %Aggregate{} = state) do
%ExecutionContext{command: command, handler: handler, function: function} = context
%Aggregate{aggregate_state: aggregate_state} = state
Logger.debug(fn -> describe(state) <> " executing command: " <> inspect(command) end)
with :ok <- before_execute_command(aggregate_state, context) do
case Kernel.apply(handler, function, [aggregate_state, command]) do
{:error, _error} = reply ->
{reply, state}
none when none in [:ok, nil, []] ->
{{:ok, []}, state}
%Multi{} = multi ->
case Multi.run(multi) do
{:error, _error} = reply ->
{reply, state}
{aggregate_state, pending_events} ->
persist_events(pending_events, aggregate_state, context, state)
end
{:ok, pending_events} ->
apply_and_persist_events(pending_events, context, state)
pending_events ->
apply_and_persist_events(pending_events, context, state)
end
else
{:error, _error} = reply ->
{reply, state}
end
rescue
error ->
stacktrace = __STACKTRACE__
Logger.error(Exception.format(:error, error, stacktrace))
{{:error, error, stacktrace}, state}
end
defp apply_and_persist_events(pending_events, context, %Aggregate{} = state) do
%Aggregate{aggregate_module: aggregate_module, aggregate_state: aggregate_state} = state
pending_events = List.wrap(pending_events)
aggregate_state = apply_events(aggregate_module, aggregate_state, pending_events)
persist_events(pending_events, aggregate_state, context, state)
end
defp apply_events(aggregate_module, aggregate_state, events) do
Enum.reduce(events, aggregate_state, &aggregate_module.apply(&2, &1))
end
defp persist_events(pending_events, aggregate_state, context, %Aggregate{} = state) do
%Aggregate{aggregate_version: expected_version} = state
with :ok <- append_to_stream(pending_events, context, state) do
aggregate_version = expected_version + length(pending_events)
state = %Aggregate{
state
| aggregate_state: aggregate_state,
aggregate_version: aggregate_version
}
{{:ok, pending_events}, state}
else
{:error, :wrong_expected_version} ->
# Fetch missing events from event store
state = AggregateStateBuilder.rebuild_from_events(state)
# Retry command if there are any attempts left
case ExecutionContext.retry(context) do
{:ok, context} ->
Logger.debug(fn -> describe(state) <> " wrong expected version, retrying command" end)
execute_command(context, state)
reply ->
Logger.debug(fn ->
describe(state) <> " wrong expected version, but not retrying command"
end)
{reply, state}
end
{:error, _error} = reply ->
{reply, state}
end
end
defp append_to_stream([], _context, _state), do: :ok
defp append_to_stream(pending_events, %ExecutionContext{} = context, %Aggregate{} = state) do
%Aggregate{
application: application,
aggregate_uuid: aggregate_uuid,
aggregate_version: expected_version
} = state
%ExecutionContext{
causation_id: causation_id,
correlation_id: correlation_id,
metadata: metadata
} = context
event_data =
Mapper.map_to_event_data(pending_events,
causation_id: causation_id,
correlation_id: correlation_id,
metadata: metadata
)
EventStore.append_to_stream(application, aggregate_uuid, expected_version, event_data)
end
defp telemetry_start(telemetry_metadata) do
Telemetry.start([:commanded, :aggregate, :execute], telemetry_metadata)
end
defp telemetry_stop(start_time, telemetry_metadata, result) do
event_prefix = [:commanded, :aggregate, :execute]
case result do
{:ok, events} ->
Telemetry.stop(event_prefix, start_time, Map.put(telemetry_metadata, :events, events))
{:error, error} ->
Telemetry.stop(event_prefix, start_time, Map.put(telemetry_metadata, :error, error))
{:error, error, stacktrace} ->
Telemetry.exception(
event_prefix,
start_time,
:error,
error,
stacktrace,
telemetry_metadata
)
end
end
defp telemetry_metadata(%ExecutionContext{} = context, from, %Aggregate{} = state) do
%Aggregate{
application: application,
aggregate_uuid: aggregate_uuid,
aggregate_state: aggregate_state,
aggregate_version: aggregate_version
} = state
{pid, _ref} = from
%{
application: application,
aggregate_uuid: aggregate_uuid,
aggregate_state: aggregate_state,
aggregate_version: aggregate_version,
caller: pid,
execution_context: context
}
end
defp via_name(application, aggregate_module, aggregate_uuid) do
name = name(application, aggregate_module, aggregate_uuid)
Registration.via_tuple(application, name)
end
defp describe(%Aggregate{} = aggregate) do
%Aggregate{
aggregate_module: aggregate_module,
aggregate_uuid: aggregate_uuid,
aggregate_version: aggregate_version
} = aggregate
"#{inspect(aggregate_module)}<#{aggregate_uuid}@#{aggregate_version}>"
end
end
| 31.555891 | 118 | 0.675395 |
f7fc1e3600442d00358b10cb7e77eaa1188bfb14 | 352 | exs | Elixir | priv/repo/seeds.exs | DylanGuedes/forensic | 16ea59bb6aeefe636012b747a7caab26e0b7f686 | [
"Apache-2.0"
] | null | null | null | priv/repo/seeds.exs | DylanGuedes/forensic | 16ea59bb6aeefe636012b747a7caab26e0b7f686 | [
"Apache-2.0"
] | null | null | null | priv/repo/seeds.exs | DylanGuedes/forensic | 16ea59bb6aeefe636012b747a7caab26e0b7f686 | [
"Apache-2.0"
] | null | null | null | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# Forensic.Repo.insert!(%Forensic.SomeModel{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 29.333333 | 61 | 0.707386 |
f7fc2254d58a9371e650120c0e059043d8621bdc | 497 | ex | Elixir | kousa/lib/kousa/metrics/prometheus.ex | lazarospsa/dogehouse | 4400518f5b6bce929e40eada615356e8814a8d28 | [
"MIT"
] | 9 | 2021-03-17T03:56:18.000Z | 2021-09-24T22:45:14.000Z | kousa/lib/kousa/metrics/prometheus.ex | lazarospsa/dogehouse | 4400518f5b6bce929e40eada615356e8814a8d28 | [
"MIT"
] | 12 | 2021-07-06T12:51:13.000Z | 2022-03-16T12:38:18.000Z | kousa/lib/kousa/metrics/prometheus.ex | lazarospsa/dogehouse | 4400518f5b6bce929e40eada615356e8814a8d28 | [
"MIT"
] | 4 | 2021-07-15T20:33:50.000Z | 2022-03-27T12:46:47.000Z | defmodule Kousa.Metric.PipelineInstrumenter do
use Prometheus.PlugPipelineInstrumenter
def label_value(:request_path, conn) do
conn.request_path
end
end
defmodule Kousa.Metric.PrometheusExporter do
use Prometheus.PlugExporter
end
defmodule Kousa.Metric.UserSessions do
use Prometheus.Metric
def setup do
Gauge.declare(
name: :user_sessions,
help: "Number of user sessions running"
)
end
def set(n) do
Gauge.set([name: :user_sessions], n)
end
end
| 18.407407 | 46 | 0.740443 |
f7fc31ff407232378b35f900374e29798c5492be | 1,820 | ex | Elixir | fw/phoenix/hello_phoenix/test/support/model_case.ex | alexgarzao/learning_elixir | f1b06f26898be35803591245d033d30dd4bd1e1a | [
"MIT"
] | 14 | 2016-09-11T02:31:36.000Z | 2021-07-15T12:51:31.000Z | fw/phoenix/hello_phoenix/test/support/model_case.ex | alexgarzao/learning_elixir | f1b06f26898be35803591245d033d30dd4bd1e1a | [
"MIT"
] | 4 | 2020-08-25T13:48:31.000Z | 2021-09-01T12:56:54.000Z | fw/phoenix/hello_phoenix/test/support/model_case.ex | alexgarzao/learning_elixir | f1b06f26898be35803591245d033d30dd4bd1e1a | [
"MIT"
] | 2 | 2017-08-14T22:05:47.000Z | 2017-11-10T18:42:23.000Z | defmodule HelloPhoenix.ModelCase do
@moduledoc """
This module defines the test case to be used by
model tests.
You may define functions here to be used as helpers in
your model tests. See `errors_on/2`'s definition as reference.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias HelloPhoenix.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import HelloPhoenix.ModelCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(HelloPhoenix.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(HelloPhoenix.Repo, {:shared, self()})
end
:ok
end
@doc """
Helper for returning list of errors in a struct when given certain data.
## Examples
Given a User schema that lists `:name` as a required field and validates
`:password` to be safe, it would return:
iex> errors_on(%User{}, %{password: "password"})
[password: "is unsafe", name: "is blank"]
You could then write your assertion like:
assert {:password, "is unsafe"} in errors_on(%User{}, %{password: "password"})
You can also create the changeset manually and retrieve the errors
field directly:
iex> changeset = User.changeset(%User{}, password: "password")
iex> {:password, "is unsafe"} in changeset.errors
true
"""
def errors_on(struct, data) do
struct.__struct__.changeset(struct, data)
|> Ecto.Changeset.traverse_errors(&HelloPhoenix.ErrorHelpers.translate_error/1)
|> Enum.flat_map(fn {key, errors} -> for msg <- errors, do: {key, msg} end)
end
end
| 27.575758 | 84 | 0.69011 |
f7fcd97cb4e440cef60c6c0d0fe3b08525d9906c | 838 | ex | Elixir | lib/ingram_marketplace/model/report_item.ex | fbettag/ingram_marketplace.ex | 1c63d391707058fb8cf58fdefd54e2ade97acf4b | [
"MIT"
] | null | null | null | lib/ingram_marketplace/model/report_item.ex | fbettag/ingram_marketplace.ex | 1c63d391707058fb8cf58fdefd54e2ade97acf4b | [
"MIT"
] | null | null | null | lib/ingram_marketplace/model/report_item.ex | fbettag/ingram_marketplace.ex | 1c63d391707058fb8cf58fdefd54e2ade97acf4b | [
"MIT"
] | null | null | null | defmodule Ingram.Marketplace.Model.ReportItem do
@moduledoc """
A rated data report generated and exported during a specific period.
"""
@derive [Poison.Encoder]
defstruct [
:id,
:name,
:status,
:type,
:format,
:creationDate,
:startDate,
:endDate,
:downloadUrl
]
@type t :: %__MODULE__{
:id => String.t() | nil,
:name => String.t() | nil,
:status => String.t() | nil,
:type => String.t() | nil,
:format => String.t() | nil,
:creationDate => DateTime.t() | nil,
:startDate => DateTime.t() | nil,
:endDate => DateTime.t() | nil,
:downloadUrl => String.t() | nil
}
end
defimpl Poison.Decoder, for: Ingram.Marketplace.Model.ReportItem do
def decode(value, _options) do
value
end
end
| 22.648649 | 70 | 0.554893 |
f7fcde8916eded5d2442e5eb0e98991a11fdeef1 | 1,773 | ex | Elixir | clients/composer/lib/google_api/composer/v1beta1/model/web_server_config.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/composer/lib/google_api/composer/v1beta1/model/web_server_config.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/composer/lib/google_api/composer/v1beta1/model/web_server_config.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Composer.V1beta1.Model.WebServerConfig do
@moduledoc """
The configuration settings for the Airflow web server App Engine instance.
## Attributes
* `machineType` (*type:* `String.t`, *default:* `nil`) - Optional. Machine type on which Airflow web server is running. It has to be one of: composer-n1-webserver-2, composer-n1-webserver-4 or composer-n1-webserver-8. If not specified, composer-n1-webserver-2 will be used. Value custom is returned only in response, if Airflow web server parameters were manually changed to a non-standard values.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:machineType => String.t()
}
field(:machineType)
end
defimpl Poison.Decoder, for: GoogleApi.Composer.V1beta1.Model.WebServerConfig do
def decode(value, options) do
GoogleApi.Composer.V1beta1.Model.WebServerConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Composer.V1beta1.Model.WebServerConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.723404 | 401 | 0.753525 |
f7fcdf37164b6a479eec468d785ac9efe858b601 | 412 | exs | Elixir | mix.exs | nicholasbair/const | c951b8d768e50268819dd061f066797f4432882b | [
"Apache-2.0"
] | null | null | null | mix.exs | nicholasbair/const | c951b8d768e50268819dd061f066797f4432882b | [
"Apache-2.0"
] | 3 | 2019-11-07T02:22:21.000Z | 2020-04-20T21:07:52.000Z | mix.exs | nicholasbair/const | c951b8d768e50268819dd061f066797f4432882b | [
"Apache-2.0"
] | null | null | null | defmodule Const.MixProject do
use Mix.Project
def project do
[
app: :const,
version: "0.1.0",
elixir: "~> 1.9",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
def application do
[
extra_applications: [:logger]
]
end
defp deps do
[
{:jason, "~> 1.1"},
{:dialyxir, "~> 0.5", only: [:dev], runtime: false},
]
end
end
| 15.259259 | 58 | 0.504854 |
f7fcf0464cd8d0a58a8082d38bbede872ea2fd5f | 2,749 | ex | Elixir | lib/chaperon/action/websocket/client.ex | ideo/chaperon | 6298e96ae2b8f39a1d729b7363f4a451bc4c3a6e | [
"MIT"
] | null | null | null | lib/chaperon/action/websocket/client.ex | ideo/chaperon | 6298e96ae2b8f39a1d729b7363f4a451bc4c3a6e | [
"MIT"
] | null | null | null | lib/chaperon/action/websocket/client.ex | ideo/chaperon | 6298e96ae2b8f39a1d729b7363f4a451bc4c3a6e | [
"MIT"
] | null | null | null | defmodule Chaperon.Action.WebSocket.Client do
@moduledoc """
Implements Chaperon's WebSocket client (behavior of WebSockex WS library).
"""
use WebSockex
require Logger
defmodule State do
@moduledoc """
WebSocket client process state.
"""
defstruct messages: EQ.new(),
awaiting_clients: EQ.new(),
log_prefix: nil
end
alias __MODULE__.State
def start_link(session, url) do
WebSockex.start_link(url, __MODULE__, %State{log_prefix: "#{session.id} [WS Client] |"})
end
@spec send_frame(pid, WebSockex.frame()) :: :ok
def send_frame(pid, frame = {:text, _}) do
WebSockex.send_frame(pid, frame)
end
def send_frame(pid, frame) do
WebSockex.send_frame(pid, frame)
end
def handle_frame(msg, state) do
Logger.debug(fn -> "#{state.log_prefix} Received Frame" end)
if EQ.empty?(state.awaiting_clients) do
state = update_in(state.messages, &EQ.push(&1, msg))
{:ok, state}
else
state.awaiting_clients
|> EQ.to_list()
|> Enum.each(&send(&1, {:next_frame, msg}))
{:ok, put_in(state.awaiting_clients, EQ.new())}
end
end
def handle_ping(:ping, state) do
{:reply, {:ping, "pong"}, state}
end
def handle_ping({:ping, msg}, state) do
{:reply, {:ping, msg}, state}
end
def handle_pong(:pong, state) do
{:ok, state}
end
def handle_pong({:pong, _}, state) do
{:ok, state}
end
def handle_disconnect(%{reason: {:local, reason}}, state) do
Logger.debug(fn ->
"#{state.log_prefix} Local close with reason: #{inspect(reason)}"
end)
{:ok, state}
end
def handle_disconnect(disconnect_map, state) do
super(disconnect_map, state)
end
def handle_info({:ssl_closed, _info}, state) do
{:close, state}
end
def handle_info({:next_frame, pid}, state) do
case EQ.pop(state.messages) do
{{:value, msg}, remaining} ->
state = put_in(state.messages, remaining)
send(pid, {:next_frame, msg})
{:ok, state}
{:empty, _} ->
state = update_in(state.awaiting_clients, &EQ.push(&1, pid))
{:ok, state}
end
end
def handle_info(:close, state) do
{:close, state}
end
def recv_message(pid, timeout \\ nil) do
# ask for next frame frmo WebSockex process and then await response
send(pid, {:next_frame, self()})
case timeout do
nil ->
receive do
{:next_frame, msg} ->
msg
end
timeout when is_integer(timeout) ->
receive do
{:next_frame, msg} ->
msg
after
timeout ->
{:error, {:timeout, timeout}}
end
end
end
def close(pid) do
send(pid, :close)
end
end
| 22.169355 | 92 | 0.603856 |
f7fcfe4ddeb379dd7ee24d27af01e4730e72f53e | 504 | ex | Elixir | lib/edgedb/protocol/datatypes/uint8.ex | nsidnev/edgedb-elixir | bade2b9daba2e83bfaa5915b2addb74f41610968 | [
"MIT"
] | 30 | 2021-05-19T08:54:44.000Z | 2022-03-11T22:52:25.000Z | lib/edgedb/protocol/datatypes/uint8.ex | nsidnev/edgedb-elixir | bade2b9daba2e83bfaa5915b2addb74f41610968 | [
"MIT"
] | 3 | 2021-11-17T21:26:01.000Z | 2022-03-12T09:49:25.000Z | lib/edgedb/protocol/datatypes/uint8.ex | nsidnev/edgedb-elixir | bade2b9daba2e83bfaa5915b2addb74f41610968 | [
"MIT"
] | 3 | 2021-08-29T14:55:41.000Z | 2022-03-12T01:30:35.000Z | defmodule EdgeDB.Protocol.Datatypes.UInt8 do
use EdgeDB.Protocol.Datatype
@uint8_max 0xFF
@uint8_min 0x0
defguard is_uint8(number)
when is_integer(number) and @uint8_min <= number and number <= @uint8_max
defdatatype(type: non_neg_integer())
@impl EdgeDB.Protocol.Datatype
def encode_datatype(number) when is_uint8(number) do
<<number::uint8>>
end
@impl EdgeDB.Protocol.Datatype
def decode_datatype(<<number::uint8, rest::binary>>) do
{number, rest}
end
end
| 22.909091 | 84 | 0.722222 |
f7fcffe4b1c30f3ea5348513e09a339a1efce028 | 1,489 | exs | Elixir | test/vintage_net_qmi/asu_calculator_test.exs | nerves-networking/vintage_net_qmi | 228baec658befc83ec72e24a8ec9b003c8df668c | [
"Apache-2.0"
] | 3 | 2021-05-28T00:43:55.000Z | 2022-02-05T00:45:35.000Z | test/vintage_net_qmi/asu_calculator_test.exs | nerves-networking/vintage_net_qmi | 228baec658befc83ec72e24a8ec9b003c8df668c | [
"Apache-2.0"
] | 12 | 2021-06-30T18:37:16.000Z | 2022-02-11T12:40:36.000Z | test/vintage_net_qmi/asu_calculator_test.exs | nerves-networking/vintage_net_qmi | 228baec658befc83ec72e24a8ec9b003c8df668c | [
"Apache-2.0"
] | null | null | null | defmodule VintageNetQMI.ASUCalculatorTest do
use ExUnit.Case
alias VintageNetQMI.ASUCalculator
test "computes gsm dbm" do
assert ASUCalculator.from_gsm_asu(2).dbm == -109
assert ASUCalculator.from_gsm_asu(9).dbm == -95
assert ASUCalculator.from_gsm_asu(15).dbm == -83
assert ASUCalculator.from_gsm_asu(30).dbm == -53
assert ASUCalculator.from_gsm_asu(99).dbm == -113
# Bad values
assert ASUCalculator.from_gsm_asu(-100).dbm == -113
assert ASUCalculator.from_gsm_asu(31).dbm == -53
end
test "computes gsm bars" do
assert ASUCalculator.from_gsm_asu(2).bars == 1
assert ASUCalculator.from_gsm_asu(9).bars == 1
assert ASUCalculator.from_gsm_asu(14).bars == 2
assert ASUCalculator.from_gsm_asu(15).bars == 3
assert ASUCalculator.from_gsm_asu(30).bars == 4
assert ASUCalculator.from_gsm_asu(99).bars == 0
end
test "computes lte asu" do
# Really, the only important thing here is that -113 goes to 99
# since 99 is a special number.
assert ASUCalculator.from_lte_rssi(-113).asu == 99
assert ASUCalculator.from_lte_rssi(-112).asu == 0
assert ASUCalculator.from_lte_rssi(0).asu == 56
end
test "computes lte bars" do
assert ASUCalculator.from_lte_rssi(-64).bars == 4
assert ASUCalculator.from_lte_rssi(-74).bars == 3
assert ASUCalculator.from_lte_rssi(-84).bars == 2
assert ASUCalculator.from_lte_rssi(-112).bars == 1
assert ASUCalculator.from_lte_rssi(-113).bars == 0
end
end
| 34.627907 | 67 | 0.718603 |
f7fd27cfddee9e9b8483c529ba72907915e0de07 | 106 | ex | Elixir | lib/egcovac/repo.ex | karembadawy/egcovac | a1ddb339656d41b29ea098cd8be6c4934dec6eee | [
"MIT"
] | null | null | null | lib/egcovac/repo.ex | karembadawy/egcovac | a1ddb339656d41b29ea098cd8be6c4934dec6eee | [
"MIT"
] | null | null | null | lib/egcovac/repo.ex | karembadawy/egcovac | a1ddb339656d41b29ea098cd8be6c4934dec6eee | [
"MIT"
] | null | null | null | defmodule Egcovac.Repo do
use Ecto.Repo,
otp_app: :egcovac,
adapter: Ecto.Adapters.Postgres
end
| 17.666667 | 35 | 0.726415 |
f7fd2c81ca615f9bbca70a88c2ee6603d6097bb9 | 3,704 | ex | Elixir | clients/big_query/lib/google_api/big_query/v2/model/table_list_tables.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/big_query/lib/google_api/big_query/v2/model/table_list_tables.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/big_query/lib/google_api/big_query/v2/model/table_list_tables.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.BigQuery.V2.Model.TableListTables do
@moduledoc """
## Attributes
* `clustering` (*type:* `GoogleApi.BigQuery.V2.Model.Clustering.t`, *default:* `nil`) - [Beta] Clustering specification for this table, if configured.
* `creationTime` (*type:* `String.t`, *default:* `nil`) - The time when this table was created, in milliseconds since the epoch.
* `expirationTime` (*type:* `String.t`, *default:* `nil`) - [Optional] The time when this table expires, in milliseconds since the epoch. If not present, the table will persist indefinitely. Expired tables will be deleted and their storage reclaimed.
* `friendlyName` (*type:* `String.t`, *default:* `nil`) - The user-friendly name for this table.
* `id` (*type:* `String.t`, *default:* `nil`) - An opaque ID of the table
* `kind` (*type:* `String.t`, *default:* `bigquery#table`) - The resource type.
* `labels` (*type:* `map()`, *default:* `nil`) - The labels associated with this table. You can use these to organize and group your tables.
* `tableReference` (*type:* `GoogleApi.BigQuery.V2.Model.TableReference.t`, *default:* `nil`) - A reference uniquely identifying the table.
* `timePartitioning` (*type:* `GoogleApi.BigQuery.V2.Model.TimePartitioning.t`, *default:* `nil`) - The time-based partitioning specification for this table, if configured.
* `type` (*type:* `String.t`, *default:* `nil`) - The type of table. Possible values are: TABLE, VIEW.
* `view` (*type:* `GoogleApi.BigQuery.V2.Model.TableListTablesView.t`, *default:* `nil`) - Additional details for a view.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:clustering => GoogleApi.BigQuery.V2.Model.Clustering.t(),
:creationTime => String.t(),
:expirationTime => String.t(),
:friendlyName => String.t(),
:id => String.t(),
:kind => String.t(),
:labels => map(),
:tableReference => GoogleApi.BigQuery.V2.Model.TableReference.t(),
:timePartitioning => GoogleApi.BigQuery.V2.Model.TimePartitioning.t(),
:type => String.t(),
:view => GoogleApi.BigQuery.V2.Model.TableListTablesView.t()
}
field(:clustering, as: GoogleApi.BigQuery.V2.Model.Clustering)
field(:creationTime)
field(:expirationTime)
field(:friendlyName)
field(:id)
field(:kind)
field(:labels, type: :map)
field(:tableReference, as: GoogleApi.BigQuery.V2.Model.TableReference)
field(:timePartitioning, as: GoogleApi.BigQuery.V2.Model.TimePartitioning)
field(:type)
field(:view, as: GoogleApi.BigQuery.V2.Model.TableListTablesView)
end
defimpl Poison.Decoder, for: GoogleApi.BigQuery.V2.Model.TableListTables do
def decode(value, options) do
GoogleApi.BigQuery.V2.Model.TableListTables.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.BigQuery.V2.Model.TableListTables do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 48.103896 | 254 | 0.698704 |
f7fd6bfd880da55bb23890a7f1fca141d45c8c72 | 729 | ex | Elixir | apps/rest_api/lib/routers/public.ex | lcpojr/watcher_ex | bd5a9210b5b41a6c9b5d4255de19fc6967d29fb7 | [
"Apache-2.0"
] | 9 | 2020-10-13T14:11:37.000Z | 2021-08-12T18:40:08.000Z | apps/rest_api/lib/routers/public.ex | lcpojr/watcher_ex | bd5a9210b5b41a6c9b5d4255de19fc6967d29fb7 | [
"Apache-2.0"
] | 28 | 2020-10-04T14:43:48.000Z | 2021-12-07T16:54:22.000Z | apps/rest_api/lib/routers/public.ex | lcpojr/watcher_ex | bd5a9210b5b41a6c9b5d4255de19fc6967d29fb7 | [
"Apache-2.0"
] | 3 | 2020-11-25T20:59:47.000Z | 2021-08-30T10:36:58.000Z | defmodule RestAPI.Routers.Public do
@moduledoc false
use RestAPI.Router
alias RestAPI.Controllers.Public
alias RestAPI.Plugs.{Authentication, Authorization, Tracker}
pipeline :rest_api do
plug :accepts, ["json", "urlencoded"]
plug Tracker
end
pipeline :authorized do
plug Authorization, type: "public"
end
pipeline :authenticated do
plug Authentication
end
scope "/v1", Public do
pipe_through :rest_api
scope "/auth/protocol/openid-connect" do
post "/token", Auth, :token
post "/revoke", Auth, :revoke
scope "/authorize" do
pipe_through :authenticated
pipe_through :authorized
post "/", Auth, :authorize
end
end
end
end
| 19.184211 | 62 | 0.670782 |
f7fdb0fd1bec493bec910b61bd762fc347749423 | 4,097 | exs | Elixir | test/skogsra/env_test.exs | davorbadrov/skogsra | 785a7b9cbe92c1e96fae0381b9087843dc46926e | [
"MIT"
] | null | null | null | test/skogsra/env_test.exs | davorbadrov/skogsra | 785a7b9cbe92c1e96fae0381b9087843dc46926e | [
"MIT"
] | null | null | null | test/skogsra/env_test.exs | davorbadrov/skogsra | 785a7b9cbe92c1e96fae0381b9087843dc46926e | [
"MIT"
] | null | null | null | defmodule Skogsra.EnvTest do
use ExUnit.Case
alias Skogsra.Env
describe "new/2" do
test "adds default options" do
%Env{options: options} = Env.new(nil, :app, :key, [])
assert options[:skip_system] == false
assert options[:skip_config] == false
assert options[:required] == false
assert options[:cached] == true
end
test "converts single key to a list" do
assert %Env{keys: [:key]} = Env.new(nil, :app, :key, [])
end
test "sets namespace" do
assert %Env{namespace: Test} = Env.new(nil, :app, :key, namespace: Test)
end
end
describe "skip_system?/1" do
test "gets default value for skip system if not set" do
env = Env.new(nil, :app, :key, [])
assert not Env.skip_system?(env)
end
test "gets value for skip system if set" do
env = Env.new(nil, :app, :key, skip_system: true)
assert Env.skip_system?(env)
end
end
describe "skip_config?/1" do
test "gets default value for skip config if not set" do
env = Env.new(nil, :app, :key, [])
assert not Env.skip_config?(env)
end
test "gets value for skip config if set" do
env = Env.new(nil, :app, :key, skip_config: true)
assert Env.skip_config?(env)
end
end
describe "os_env/1" do
test "when os_env defined, returns it" do
env = Env.new(nil, :app, [:a, :b], os_env: "FOO")
assert "FOO" == Env.os_env(env)
end
test "when namespace is nil, is not present" do
env = Env.new(nil, :app, [:a, :b], [])
assert "APP_A_B" == Env.os_env(env)
end
test "when namespace is not nil, is present" do
env = Env.new(My.Custom.Namespace, :app, [:a, :b], [])
assert "MY_CUSTOM_NAMESPACE_APP_A_B" == Env.os_env(env)
end
end
describe "type/1" do
test "gets default value type if none is defined" do
env = Env.new(nil, :app, :key, default: 42)
assert :integer = Env.type(env)
end
test "gets type when defined" do
env = Env.new(nil, :app, :key, type: :integer)
assert :integer = Env.type(env)
end
end
describe "default/1" do
test "gets default value if set" do
env = Env.new(nil, :app, :key, default: 42)
assert 42 = Env.default(env)
end
end
describe "required?/1" do
test "gets default value for required if not set" do
env = Env.new(nil, :app, :key, [])
assert not Env.required?(env)
end
test "gets value for required if set" do
env = Env.new(nil, :app, :key, required: true)
assert Env.required?(env)
end
end
describe "cached?/1" do
test "gets default value for cached if not set" do
env = Env.new(nil, :app, :key, [])
assert Env.cached?(env)
end
test "gets value for cached if set" do
env = Env.new(nil, :app, :key, cached: false)
assert not Env.cached?(env)
end
end
describe "gen_namespace/1" do
test "when nil, then is empty" do
env = Env.new(nil, :app, :key, [])
assert "" == Env.gen_namespace(env)
end
test "when not nil, then converts it to binary" do
env = Env.new(My.Custom.Namespace, :app, :key, [])
assert "MY_CUSTOM_NAMESPACE_" == Env.gen_namespace(env)
end
end
describe "gen_app_name/1" do
test "transforms app_name to binary" do
env = Env.new(nil, :app, [:a, :b], [])
assert "APP" == Env.gen_app_name(env)
end
end
describe "gen_keys/1" do
test "transforms keys to binary" do
env = Env.new(nil, :app, [:a, :b], [])
assert "A_B" == Env.gen_keys(env)
end
end
describe "get_type/1" do
test "when nil" do
assert :binary == Env.get_type(nil)
end
test "when binary" do
assert :binary == Env.get_type("foo")
end
test "when integer" do
assert :integer == Env.get_type(42)
end
test "when float" do
assert :float == Env.get_type(42.0)
end
test "when atom" do
assert :atom == Env.get_type(:atom)
end
test "when other" do
assert :binary == Env.get_type([])
end
end
end
| 22.888268 | 78 | 0.59629 |
f7fdcd2181a112732cf70c8ecde1f5c544b8e447 | 2,563 | ex | Elixir | lib/nerves_init_ec2/ssh_console.ex | cogini/nerves_init_ec2 | 8f68b069db48f6aa32c6b3ad2c73c1756c9afeb3 | [
"Apache-2.0"
] | 3 | 2018-08-05T12:48:49.000Z | 2018-08-09T07:44:35.000Z | lib/nerves_init_ec2/ssh_console.ex | cogini/nerves_init_ec2 | 8f68b069db48f6aa32c6b3ad2c73c1756c9afeb3 | [
"Apache-2.0"
] | 1 | 2018-08-04T23:55:54.000Z | 2018-08-05T23:16:23.000Z | lib/nerves_init_ec2/ssh_console.ex | cogini/nerves_init_ec2 | 8f68b069db48f6aa32c6b3ad2c73c1756c9afeb3 | [
"Apache-2.0"
] | null | null | null | defmodule NervesInitEc2.SSHConsole do
@moduledoc """
SSH IEx console.
"""
use GenServer
require Logger
@doc false
def start_link(opts) do
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
end
def init([opts]) do
# Logger.debug("#{__MODULE__}: opts: #{inspect opts}")
SystemRegistry.register()
init_daemon(opts.ssh_console_port, opts.ssh_authorized_keys)
end
def terminate(_reason, %{daemon_ref: ref}), do: :ssh.stop_daemon(ref)
def terminate(_reason, _state), do: :ok
@spec init_daemon(non_neg_integer, list(binary)) :: {:ok, Map.t}
defp init_daemon(port, []) do
Logger.debug("authorized_keys not defined, waiting for metadata")
{:ok, %{keys: [], port: port}}
end
defp init_daemon(port, keys) do
Logger.debug("Starting SSH console on port #{port}, keys #{inspect keys}")
case start_daemon(port, keys) do
{:ok, ref} ->
{:ok, %{daemon_ref: ref, keys: keys, port: port}}
{:error, reason} ->
Logger.warn("Could not start SSH console: #{reason}")
{:ok, %{keys: keys, port: port}}
end
end
@spec start_daemon(non_neg_integer, list(binary)) :: {:ok, Map.t} | {:error, atom}
defp start_daemon(port, config_keys) do
string_keys = Enum.join(config_keys, "\n")
auth_keys = :public_key.ssh_decode(string_keys, :auth_keys)
cb_opts = [authorized_keys: auth_keys]
:ssh.daemon(port, [
{:id_string, :random},
{:key_cb, {Nerves.Firmware.SSH.Keys, cb_opts}},
{:system_dir, Nerves.Firmware.SSH.Application.system_dir()},
{:shell, {Elixir.IEx, :start, []}}
])
end
def handle_info({:system_registry, :global, registry}, state) do
keys = get_in(registry, [:config, :ssh, :authorized_keys])
restart_daemon(keys, state)
end
def restart_daemon(nil, state), do: {:noreply, state}
def restart_daemon(new_keys, %{keys: keys} = state) when new_keys == keys do
{:noreply, state}
end
def restart_daemon(new_keys, %{daemon_ref: ref, port: port} = state) do
Logger.debug("Stopping SSH console #{inspect ref}")
:ssh.stop_daemon(ref)
Logger.debug("Starting SSH console on port #{port}, keys #{inspect new_keys}")
{:ok, ref} = start_daemon(port, new_keys)
{:noreply, %{state | daemon_ref: ref, keys: new_keys}}
end
def restart_daemon(new_keys, %{port: port} = state) do
Logger.debug("Starting SSH console on port #{port}, keys #{inspect new_keys}")
{:ok, ref} = start_daemon(port, new_keys)
{:noreply, Map.merge(state, %{daemon_ref: ref, keys: new_keys})}
end
end
| 32.858974 | 84 | 0.660554 |
f7fdcd2a7fd54e93a1f7ec209e906f7c58d9ef53 | 3,862 | ex | Elixir | clients/service_user/lib/google_api/service_user/v1/model/monitored_resource_descriptor.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/service_user/lib/google_api/service_user/v1/model/monitored_resource_descriptor.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/service_user/lib/google_api/service_user/v1/model/monitored_resource_descriptor.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ServiceUser.V1.Model.MonitoredResourceDescriptor do
@moduledoc """
An object that describes the schema of a MonitoredResource object using a
type name and a set of labels. For example, the monitored resource
descriptor for Google Compute Engine VM instances has a type of
`"gce_instance"` and specifies the use of the labels `"instance_id"` and
`"zone"` to identify particular VM instances.
Different APIs can support different monitored resource types. APIs generally
provide a `list` method that returns the monitored resource descriptors used
by the API.
## Attributes
* `description` (*type:* `String.t`, *default:* `nil`) - Optional. A detailed description of the monitored resource type that might
be used in documentation.
* `displayName` (*type:* `String.t`, *default:* `nil`) - Optional. A concise name for the monitored resource type that might be
displayed in user interfaces. It should be a Title Cased Noun Phrase,
without any article or other determiners. For example,
`"Google Cloud SQL Database"`.
* `labels` (*type:* `list(GoogleApi.ServiceUser.V1.Model.LabelDescriptor.t)`, *default:* `nil`) - Required. A set of labels used to describe instances of this monitored
resource type. For example, an individual Google Cloud SQL database is
identified by values for the labels `"database_id"` and `"zone"`.
* `launchStage` (*type:* `String.t`, *default:* `nil`) - Optional. The launch stage of the monitored resource definition.
* `name` (*type:* `String.t`, *default:* `nil`) - Optional. The resource name of the monitored resource descriptor:
`"projects/{project_id}/monitoredResourceDescriptors/{type}"` where
{type} is the value of the `type` field in this object and
{project_id} is a project ID that provides API-specific context for
accessing the type. APIs that do not use project information can use the
resource name format `"monitoredResourceDescriptors/{type}"`.
* `type` (*type:* `String.t`, *default:* `nil`) - Required. The monitored resource type. For example, the type
`"cloudsql_database"` represents databases in Google Cloud SQL.
The maximum length of this value is 256 characters.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:description => String.t(),
:displayName => String.t(),
:labels => list(GoogleApi.ServiceUser.V1.Model.LabelDescriptor.t()),
:launchStage => String.t(),
:name => String.t(),
:type => String.t()
}
field(:description)
field(:displayName)
field(:labels, as: GoogleApi.ServiceUser.V1.Model.LabelDescriptor, type: :list)
field(:launchStage)
field(:name)
field(:type)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceUser.V1.Model.MonitoredResourceDescriptor do
def decode(value, options) do
GoogleApi.ServiceUser.V1.Model.MonitoredResourceDescriptor.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceUser.V1.Model.MonitoredResourceDescriptor do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 46.53012 | 172 | 0.721129 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.