hexsha
stringlengths
40
40
size
int64
2
991k
ext
stringclasses
2 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
4
208
max_stars_repo_name
stringlengths
6
106
max_stars_repo_head_hexsha
stringlengths
40
40
max_stars_repo_licenses
sequence
max_stars_count
int64
1
33.5k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
4
208
max_issues_repo_name
stringlengths
6
106
max_issues_repo_head_hexsha
stringlengths
40
40
max_issues_repo_licenses
sequence
max_issues_count
int64
1
16.3k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
4
208
max_forks_repo_name
stringlengths
6
106
max_forks_repo_head_hexsha
stringlengths
40
40
max_forks_repo_licenses
sequence
max_forks_count
int64
1
6.91k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
2
991k
avg_line_length
float64
1
36k
max_line_length
int64
1
977k
alphanum_fraction
float64
0
1
9ec3d4eddbdcb346de639f1897db00ddcbdad943
878
exs
Elixir
test/test_helper.exs
BKStephens/phoenix_liveview_stock_tracker
7cbf5e546f797a33d3e47b91fd75768bc18d8a5c
[ "MIT" ]
null
null
null
test/test_helper.exs
BKStephens/phoenix_liveview_stock_tracker
7cbf5e546f797a33d3e47b91fd75768bc18d8a5c
[ "MIT" ]
null
null
null
test/test_helper.exs
BKStephens/phoenix_liveview_stock_tracker
7cbf5e546f797a33d3e47b91fd75768bc18d8a5c
[ "MIT" ]
null
null
null
ExUnit.start() Mox.defmock(PhoenixLiveviewStockTracker.AlphaVantageApiClientMock, for: PhoenixLiveviewStockTracker.AlphaVantageApiClientBehaviour ) Application.put_env( :phoenix_liveview_stock_tracker, :alpha_vantage_api_client, PhoenixLiveviewStockTracker.AlphaVantageApiClientMock ) Mox.defmock(HttpMock, for: HTTPoison.Base) Application.put_env( :phoenix_liveview_stock_tracker, :http_client, HttpMock ) defmodule TestHelper do def wait_for_mailbox_to_drain(pid, max_milli \\ 5000, time_elapsed \\ 0) do if {:message_queue_len, 0} == :erlang.process_info(pid, :message_queue_len) do :ok else if time_elapsed > max_milli do {:error, "Timeout"} else time_to_sleep = 10 Process.sleep(time_to_sleep) wait_for_mailbox_to_drain(pid, max_milli, time_elapsed + time_to_sleep) end end end end
24.388889
82
0.750569
9ec3f3a4d427880e3999f17e7451e6eabd74254f
995
exs
Elixir
ds_backend/config/config.exs
ohr486/d-screen
5a70c4f599e63c07bcefc7867bbd4d98d7cc4be7
[ "MIT" ]
null
null
null
ds_backend/config/config.exs
ohr486/d-screen
5a70c4f599e63c07bcefc7867bbd4d98d7cc4be7
[ "MIT" ]
null
null
null
ds_backend/config/config.exs
ohr486/d-screen
5a70c4f599e63c07bcefc7867bbd4d98d7cc4be7
[ "MIT" ]
null
null
null
# This file is responsible for configuring your application # and its dependencies with the aid of the Mix.Config module. # # This configuration file is loaded before any dependency and # is restricted to this project. # General application configuration use Mix.Config # Configures the endpoint config :ds_backend, DsBackendWeb.Endpoint, url: [host: "localhost"], secret_key_base: "mgKvHw2+Kxdyq5VymOozLya2o1sg7taX0+ywK11rgGbyuV0c4fMbjogswfpvZHfg", render_errors: [view: DsBackendWeb.ErrorView, accepts: ~w(html json), layout: false], pubsub_server: DsBackend.PubSub, live_view: [signing_salt: "ywpZcrKM"] # Configures Elixir's Logger config :logger, :console, format: "$time $metadata[$level] $message\n", metadata: [:request_id] # Use Jason for JSON parsing in Phoenix config :phoenix, :json_library, Jason # Import environment specific config. This must remain at the bottom # of this file so it overrides the configuration defined above. import_config "#{Mix.env()}.exs"
34.310345
87
0.773869
9ec45d128877ac8dd7f9fb9a6704d5d689362efc
152
exs
Elixir
config/prod.exs
jimydotorg/ex_binance
fe16a12e5c3da9ae6e551bcb8daa1257061079cb
[ "MIT" ]
1
2018-01-09T00:05:34.000Z
2018-01-09T00:05:34.000Z
config/prod.exs
jimydotorg/ex_binance
fe16a12e5c3da9ae6e551bcb8daa1257061079cb
[ "MIT" ]
null
null
null
config/prod.exs
jimydotorg/ex_binance
fe16a12e5c3da9ae6e551bcb8daa1257061079cb
[ "MIT" ]
null
null
null
use Mix.Config config :ex_binance, api_key: System.get_env("BINANCE_API_KEY"), api_secret: System.get_env("BINANCE_API_SECRET")
30.4
68
0.684211
9ec491e2b9856357493f00fec986f130c5c2bbad
5,855
ex
Elixir
lib/ace/http/service.ex
CharlesOkwuagwu/Ace
e39bfbf5a99dde225d3adcf680885e0bd71a86a4
[ "MIT" ]
null
null
null
lib/ace/http/service.ex
CharlesOkwuagwu/Ace
e39bfbf5a99dde225d3adcf680885e0bd71a86a4
[ "MIT" ]
null
null
null
lib/ace/http/service.ex
CharlesOkwuagwu/Ace
e39bfbf5a99dde225d3adcf680885e0bd71a86a4
[ "MIT" ]
null
null
null
defmodule Ace.HTTP.Service do @moduledoc """ Run a `Raxx.Server` application for HTTP/1.x and HTTP/2 clients **NOTE:** Ace services are served over a secure transport layer TLS(SSL), therefore `:cert` + `:key` or `:certfile` + `:keyfile` are required options. Starting a service will start and manage a cohort of endpoint process. The number of awaiting endpoint processes is set by the acceptors option. Each endpoint process manages communicate to a single connected client. Using HTTP/1.1 pipelining of HTTP/2 multiplexing one connection may be used for multiple HTTP exchanges. An HTTP exchange consisting of one request from the client and one response from the server. Each exchange is isolated in a dedicated worker process. Raxx specifies early abortion of an exchange can be achieved by causing the worker process to exit. """ use GenServer require Logger @socket_options [ # Received packets are delivered as a binary("string"). {:mode, :binary}, # Handle packets as soon as they are available. {:packet, :raw}, # Set the socket to execute in passive mode, it must be prompted to read data. {:active, false}, # it is possible for the process to complete before the kernel has released the associated network resource, and this port cannot be bound to another process until the kernel has decided that it is done. # A detailed explaination is given at http://hea-www.harvard.edu/~fine/Tech/addrinuse.html # This setting is a security vulnerability only on multi-user machines. # It is NOT a vulnerability from outside the machine. {:reuseaddr, true}, {:alpn_preferred_protocols, ["h2", "http/1.1"]} ] @doc """ Start a HTTP web service. ## Options * `:cleartext` - Serve over TCP rather than TLS(ssl), will not support HTTP/2. * `:certfile` - the certificate. * `:keyfile` - the private key used to sign the certificate request. * `:cert` - the certificate. * `:key` - the private key used to sign the certificate request. * `:port` - the port to run the server on. Defaults to port 8443. * `:name` - name to register the spawned endpoint under. The supported values are the same as GenServers. * `:acceptors` - The number of servers simultaneously waiting for a connection. Defaults to 50. """ def start_link(app = {module, config}, options) do case Ace.HTTP2.Settings.for_server(options) do {:ok, _settings} -> service_options = Keyword.take(options, [:name]) GenServer.start_link(__MODULE__, {app, options}, service_options) {:error, reason} -> {:error, reason} end # module.module_info[:attributes] # |> Keyword.get(:behaviour, []) # |> Enum.member?(Raxx.Server) # case Ace.Application.is_implemented?(mod) do # true -> # :ok # false -> # Logger.warn("#{__MODULE__}: #{mod} does not implement Ace.Application behaviour.") # end # TODO test that module implements `Raxx.Server` end @doc """ Fetch the port number of a running service. **OS assigned ports:** If an endpoint is started with port number `0` it will be assigned a port by the underlying system. This can be used to start many endpoints simultaneously. It can be useful running parallel tests. """ def port(endpoint) do GenServer.call(endpoint, :port) end ## SERVER CALLBACKS @impl GenServer def init({app, options}) do port = case Keyword.fetch(options, :port) do {:ok, port} when is_integer(port) -> port _ -> raise "#{__MODULE__} must be started with a port to listen too." end acceptors = Keyword.get(options, :acceptors, 100) listen_socket = case Keyword.fetch(options, :cleartext) do {:ok, true} -> tcp_options = Keyword.take(@socket_options ++ options, [:mode, :packet, :active, :reuseaddr]) {:ok, listen_socket} = :gen_tcp.listen(port, tcp_options) {:ok, port} = :inet.port(listen_socket) Logger.info("Serving cleartext using HTTP/1 on port #{port}") {:tcp, listen_socket} _ -> ssl_options = Keyword.take(@socket_options ++ options, [ :mode, :packet, :active, :reuseaddr, :alpn_preferred_protocols, :cert, :key, :certfile, :keyfile ]) {:ok, listen_socket} = :ssl.listen(port, ssl_options) {:ok, {_, port}} = :ssl.sockname(listen_socket) Logger.info("Serving securely using HTTP/1 and HTTP/2 on port #{port}") listen_socket end {:ok, worker_supervisor} = Supervisor.start_link( [{Ace.HTTP.Worker, app}], strategy: :simple_one_for_one, max_restarts: 5000 ) {:ok, endpoint_supervisor} = Supervisor.start_link( [{Ace.HTTP.Server, {worker_supervisor, options}}], strategy: :simple_one_for_one, max_restarts: 5000 ) # DEBT reduce restarts {:ok, governor_supervisor} = Supervisor.start_link( [{Ace.Governor, {endpoint_supervisor, listen_socket}}], strategy: :simple_one_for_one, max_restarts: 5000 ) for _index <- 1..acceptors do Supervisor.start_child(governor_supervisor, []) end {:ok, {listen_socket, worker_supervisor, endpoint_supervisor, governor_supervisor}} end @impl GenServer def handle_call(:port, _from, state = {{:tcp, listen_socket}, _, _, _}) do {:reply, :inet.port(listen_socket), state} end def handle_call(:port, _from, state = {listen_socket, _, _, _}) do {:ok, {_, port}} = :ssl.sockname(listen_socket) {:reply, {:ok, port}, state} end end
31.820652
207
0.640478
9ec4c43eb7daae136ddee5469445a13e516afb62
794
exs
Elixir
test/rocketpay_web/controllers/users_controller_test.exs
leandroslc/rocketpay
92e807998ca48e842a30f10be681481271905691
[ "MIT" ]
null
null
null
test/rocketpay_web/controllers/users_controller_test.exs
leandroslc/rocketpay
92e807998ca48e842a30f10be681481271905691
[ "MIT" ]
null
null
null
test/rocketpay_web/controllers/users_controller_test.exs
leandroslc/rocketpay
92e807998ca48e842a30f10be681481271905691
[ "MIT" ]
null
null
null
defmodule RocketpayWeb.UsersControllerTest do use RocketpayWeb.ConnCase, async: true describe "create/2" do test "should create user", %{conn: conn} do params = %{ "name" => "Test User", "age" => "20", "email" => "[email protected]", "password" => "Pass$123", "nickname" => "test-user" } response = conn |> post(Routes.users_path(conn, :create, params)) |> json_response(:created) assert %{ "message" => "User created", "user" => %{ "id" => _id, "name" => "Test User", "nickname" => "test-user", "account" => %{ "id" => _account_id, "balance" => "0.0" } } } = response end end end
23.352941
57
0.469773
9ec4cad724eb6f43fd828b94b6ff952184c74d32
1,589
ex
Elixir
lib/phelmx_web.ex
ilkka/phoenix-elm-graphql-starter
fd88adf0e471eb97feb98a32026c2a1581e55490
[ "Apache-2.0" ]
null
null
null
lib/phelmx_web.ex
ilkka/phoenix-elm-graphql-starter
fd88adf0e471eb97feb98a32026c2a1581e55490
[ "Apache-2.0" ]
null
null
null
lib/phelmx_web.ex
ilkka/phoenix-elm-graphql-starter
fd88adf0e471eb97feb98a32026c2a1581e55490
[ "Apache-2.0" ]
null
null
null
defmodule PhelmxWeb do @moduledoc """ The entrypoint for defining your web interface, such as controllers, views, channels and so on. This can be used in your application as: use PhelmxWeb, :controller use PhelmxWeb, :view The definitions below will be executed for every view, controller, etc, so keep them short and clean, focused on imports, uses and aliases. Do NOT define functions inside the quoted expressions below. Instead, define any helper function in modules and import those modules here. """ def controller do quote do use Phoenix.Controller, namespace: PhelmxWeb import Plug.Conn import PhelmxWeb.Router.Helpers import PhelmxWeb.Gettext end end def view do quote do use Phoenix.View, root: "lib/phelmx_web/templates", namespace: PhelmxWeb # Import convenience functions from controllers import Phoenix.Controller, only: [get_flash: 2, view_module: 1] # Use all HTML functionality (forms, tags, etc) use Phoenix.HTML import PhelmxWeb.Router.Helpers import PhelmxWeb.ErrorHelpers import PhelmxWeb.Gettext end end def router do quote do use Phoenix.Router import Plug.Conn import Phoenix.Controller end end def channel do quote do use Phoenix.Channel import PhelmxWeb.Gettext end end @doc """ When used, dispatch to the appropriate controller/view/etc. """ defmacro __using__(which) when is_atom(which) do apply(__MODULE__, which, []) end end
23.367647
69
0.684078
9ec4d88e088915c9cd865087bdd1d4d842ae4e93
1,355
ex
Elixir
lib/kronky/validation_message.ex
mirego/kronky
cfc786bd87c1968d4186407fb8683f2bee3457c2
[ "MIT" ]
1
2019-03-21T12:57:00.000Z
2019-03-21T12:57:00.000Z
lib/kronky/validation_message.ex
mirego/kronky
cfc786bd87c1968d4186407fb8683f2bee3457c2
[ "MIT" ]
1
2018-09-19T10:58:11.000Z
2018-09-19T10:58:11.000Z
lib/kronky/validation_message.ex
mirego/kronky
cfc786bd87c1968d4186407fb8683f2bee3457c2
[ "MIT" ]
8
2018-02-18T10:41:04.000Z
2019-08-19T13:28:45.000Z
defmodule Kronky.ValidationMessage do @moduledoc """ Stores validation message information. ## Fields ### :field The input field that the error applies to. The field can be used to identify which field the error message should be displayed next to in the presentation layer. If there are multiple errors to display for a field, multiple validation messages will be in the result. This field may be nil in cases where an error cannot be applied to a specific field. ### :message A friendly error message, appropriate for display to the end user. The message is interpolated to include the appropriate variables. Example: `"Username must be at least 10 characters"` ### :template A template used to generate the error message, with placeholders for option substiution. Example: `"Username must be at least %{count} characters"` ### :code A unique error code for the type of validation that failed. This field must be provided. See `Kronky.ChangesetParser.to_code/1` for built in codes corresponding to most Ecto validations. ### :options A Keyword List of substitutions to be applied to a validation message template. ### :key Deprecated, use :field instead """ @enforce_keys [:code] defstruct field: nil, key: nil, code: nil, options: [], template: "is invalid", message: "is invalid" end
31.511628
103
0.737269
9ec4dba445b89732fee0913cfd0ed952a8232e2b
8,137
ex
Elixir
lib/aws/signature.ex
kianmeng/aws-elixir
07017efc4cd9f5c245f7851736204ac91e15ec18
[ "Apache-2.0" ]
null
null
null
lib/aws/signature.ex
kianmeng/aws-elixir
07017efc4cd9f5c245f7851736204ac91e15ec18
[ "Apache-2.0" ]
null
null
null
lib/aws/signature.ex
kianmeng/aws-elixir
07017efc4cd9f5c245f7851736204ac91e15ec18
[ "Apache-2.0" ]
null
null
null
defmodule AWS.Signature do @moduledoc false # Implements the Signature algorithm v4. # See: https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html alias AWS.Client alias AWS.Util # https://docs.aws.amazon.com/general/latest/gr/sigv4_changes.html @default_region_for_global_services "us-east-1" @doc """ Generate headers with an AWS signature version 4 for the specified request using the specified time. """ def sign_v4(client, now, method, url, headers, body) do long_date = NaiveDateTime.to_iso8601(now, :basic) <> "Z" short_date = Date.to_iso8601(now, :basic) region = client.region || @default_region_for_global_services headers = headers |> add_date_header(long_date) |> add_content_hash(body) |> add_security_token(client) canonical_request = canonical_request(method, url, headers, body) hashed_canonical_request = Util.sha256_hexdigest(canonical_request) credential_scope = credential_scope(short_date, region, client.service) signing_key = signing_key(client, short_date) string_to_sign = string_to_sign(long_date, credential_scope, hashed_canonical_request) signature = Util.hmac_sha256_hexdigest(signing_key, string_to_sign) signed_headers = signed_headers(headers) authorization = authorization(client.access_key_id, credential_scope, signed_headers, signature) add_authorization_header(headers, authorization) end @doc """ Generate headers with an AWS signature version 4 for the specified request using the specified time that can be transformed into a query string. """ def sign_v4_query(client, now, method, url, headers, body) do long_date = NaiveDateTime.to_iso8601(now, :basic) <> "Z" short_date = Date.to_iso8601(now, :basic) headers = add_date_header(headers, long_date) canonical_request = canonical_request(method, url, headers, body) hashed_canonical_request = Util.sha256_hexdigest(canonical_request) region = client.region || @default_region_for_global_services credential_scope = credential_scope(short_date, region, client.service) signing_key = signing_key(client, short_date) string_to_sign = string_to_sign(long_date, credential_scope, hashed_canonical_request) signature = Util.hmac_sha256_hexdigest(signing_key, string_to_sign) signed_headers = signed_headers(headers) credential = Enum.join( [client.access_key_id, short_date, region, client.service, "aws4_request"], "/" ) result = [ {"X-Amz-Algorithm", "AWS4-HMAC-SHA256"}, {"X-Amz-Credential", credential}, {"X-Amz-Date", long_date}, {"X-Amz-SignedHeaders", signed_headers}, {"X-Amz-Signature", signature} ] if expiry = :proplists.get_value("X-Amz-Expires", headers, nil) do [{"X-Amz-Expires", expiry} | result] else result end end defp add_date_header(headers, date) do [{"X-Amz-Date", date} | headers] end # Add an X-Amz-Content-SHA256 header which is the hash of the payload. # This header is required for S3 when using the v4 signature. Adding it # in requests for all services does not cause any issues. defp add_content_hash(headers, body) do [{"X-Amz-Content-SHA256", AWS.Util.sha256_hexdigest(body)} | headers] end # Add an `X-Amz-Security-Token` if credentials configurations are configured for it defp add_security_token(headers, %AWS.Client{session_token: nil}), do: headers defp add_security_token(headers, %AWS.Client{session_token: session_token}), do: [{"X-Amz-Security-Token", session_token} | headers] defp add_authorization_header(headers, authorization) do [{"Authorization", authorization} | headers] end # Generate an AWS4-HMAC-SHA256 authorization signature. defp authorization(access_key_id, credential_scope, signed_headers, signature) do Enum.join( [ "AWS4-HMAC-SHA256 ", "Credential=", access_key_id, "/", credential_scope, ", ", "SignedHeaders=", signed_headers, ", ", "Signature=", signature ], "" ) end # Convert a list of headers to canonical header format. Leading and trailing # whitespace around header names and values is stripped, header names are # lowercased, and headers are newline-joined in alphabetical order (with a # trailing newline). defp canonical_headers(headers) do headers |> Enum.map(fn {name, value} -> name = String.downcase(name) |> String.trim() value = String.trim(value) {name, value} end) |> Enum.sort(fn {a, _}, {b, _} -> a <= b end) |> Enum.map(fn {name, value} -> [name, ":", value, "\n"] end) |> Enum.join() end # Process and merge request values into a canonical request for AWS signature # version 4. defp canonical_request(method, url, headers, body) when is_atom(method) do Atom.to_string(method) |> String.upcase() |> canonical_request(url, headers, body) end defp canonical_request(method, url, headers, body) do {canonical_url, canonical_query_string} = split_url(url) canonical_headers = canonical_headers(headers) signed_headers = signed_headers(headers) payload_hash = AWS.Util.sha256_hexdigest(body) Enum.join( [ method, canonical_url, canonical_query_string, canonical_headers, signed_headers, payload_hash ], "\n" ) end # Generate a credential scope from a short date in `YYMMDD` format, a region # identifier and a service identifier. defp credential_scope(short_date, region, service) do Enum.join([short_date, region, service, "aws4_request"], "/") end # Convert a list of headers to canonicals signed header format. Leading and # trailing whitespace around names is stripped, header names are lowercased, # and header names are semicolon-joined in alphabetical order. @spec signed_headers([{binary(), binary()}]) :: binary() defp signed_headers(headers) do headers |> Enum.map(fn {name, _value} -> name |> String.downcase() |> String.trim() end) |> Enum.sort() |> Enum.join(";") end # Generate a signing key from a secret access key, a short date in `YYMMDD` # format, a region identifier and a service identifier. defp signing_key(%Client{} = client, short_date) do ("AWS4" <> client.secret_access_key) |> AWS.Util.hmac_sha256(short_date) |> AWS.Util.hmac_sha256(client.region || @default_region_for_global_services) |> AWS.Util.hmac_sha256(client.service) |> AWS.Util.hmac_sha256("aws4_request") end # Strip the query string from the URL, if one if present, and return the URL # and the normalized query string as separate values. defp split_url(url) do url = URI.parse(url) {uri_encode(url.path), normalize_query(url.query)} end # Copied from https://github.com/ex-aws/ex_aws/blob/623478ed321ffc6c07fdd7236a2f0e03f1cbd517/lib/ex_aws/request/url.ex#L108 def uri_encode(url), do: URI.encode(url, &valid_path_char?/1) # Space character defp valid_path_char?(?\ ), do: false defp valid_path_char?(?/), do: true defp valid_path_char?(c) do URI.char_unescaped?(c) && !URI.char_reserved?(c) end # Sort query params by name first, then by value (if present). Append "=" to # params with missing value. # Example: "foo=bar&baz" becomes "baz=&foo=bar" defp normalize_query(nil), do: "" defp normalize_query(""), do: "" defp normalize_query(query) do query |> String.split("&") |> Enum.map(&String.split(&1, "=")) |> Enum.sort(fn [a, _], [b, _] -> a <= b end) |> Enum.map_join("&", fn [key, value] -> key <> "=" <> value [key] -> key <> "=" end) end # Generate the text to sign from a long date in `YYMMDDTHHMMSSZ` format, a # credential scope and a hashed canonical request. defp string_to_sign(long_date, credential_scope, hashed_canonical_request) do Enum.join( ["AWS4-HMAC-SHA256", long_date, credential_scope, hashed_canonical_request], "\n" ) end end
33.485597
146
0.690304
9ec528539085c976f612a72a58792255c0ffe09c
6,339
ex
Elixir
apps/site/lib/site_web/views/mode_view.ex
noisecapella/dotcom
d5ef869412102d2230fac3dcc216f01a29726227
[ "MIT" ]
42
2019-05-29T16:05:30.000Z
2021-08-09T16:03:37.000Z
apps/site/lib/site_web/views/mode_view.ex
noisecapella/dotcom
d5ef869412102d2230fac3dcc216f01a29726227
[ "MIT" ]
872
2019-05-29T17:55:50.000Z
2022-03-30T09:28:43.000Z
apps/site/lib/site_web/views/mode_view.ex
noisecapella/dotcom
d5ef869412102d2230fac3dcc216f01a29726227
[ "MIT" ]
12
2019-07-01T18:33:21.000Z
2022-03-10T02:13:57.000Z
defmodule SiteWeb.ModeView do @moduledoc false use SiteWeb, :view alias Alerts.Match alias Plug.Conn alias Routes.Route alias Site.MapHelpers alias SiteWeb.PartialView alias SiteWeb.PartialView.SvgIconWithCircle def get_route_group(:commuter_rail = route_type, route_groups) do # Note that we do not sort the commuter rail routes by name as we # want to preserve the order supplied by the API, keeping Foxboro # last. route_groups[route_type] end def get_route_group(:the_ride, _) do [ {"MBTA Paratransit Program", cms_static_page_path(SiteWeb.Endpoint, "/accessibility/the-ride")} ] end def get_route_group(route_type, route_groups), do: route_groups[route_type] # Remove this once it is no longer being used in the content on any environment -- MSS 20200730 @spec fares_note(String) :: Phoenix.HTML.safe() | String.t() def fares_note(_mode) do "" end @doc """ Builds the header tag for a mode group. Adds a "view all" link for bus. """ @spec mode_group_header(atom, String.t(), boolean) :: Phoenix.HTML.Safe.t() def mode_group_header(mode, href, is_homepage?) do is_homepage? |> mode_group_header_tag() |> content_tag(mode_group_header_content(mode, href), class: "m-mode__header") end @spec mode_group_header_tag(boolean) :: :h2 | :h3 defp mode_group_header_tag(is_homepage?) defp mode_group_header_tag(true), do: :h3 defp mode_group_header_tag(false), do: :h2 @spec mode_group_header_content(atom, String.t()) :: [Phoenix.HTML.Safe.t()] defp mode_group_header_content(mode, href) do [ link( [ svg_icon_with_circle(%SvgIconWithCircle{icon: mode, aria_hidden?: true}), " ", Route.type_name(mode) ], to: href, class: "m-mode__name" ), view_all_link(mode, href) ] end @spec view_all_link(atom, String.t()) :: [Phoenix.HTML.Safe.t()] defp view_all_link(:bus, href) do [ link("View all bus routes", to: href, class: "c-call-to-action m-mode__view-all") ] end defp view_all_link(_, _) do [] end @spec grid_button_path(atom, Conn.t()) :: String.t() def grid_button_path(:the_ride, %Conn{} = conn) do cms_static_page_path(conn, "/accessibility/the-ride") end def grid_button_path(%Route{id: route_id}, %Conn{} = conn) do schedule_path(conn, :show, route_id) end @doc """ Returns the value to add as a modifier for the .c-grid-button class. """ @spec grid_button_class_modifier(atom | Route.t()) :: String.t() def grid_button_class_modifier(:the_ride) do "the-ride" end def grid_button_class_modifier(%Route{} = route) do route_to_class(route) end @doc """ Used to determine if the mode icon should be rendered on a mode button. The Ride icon is never shown. Subway icons are always rendered. Other modes rely on the :show_icon? boolean assign. """ @spec show_icon?(atom | Route.t(), boolean) :: boolean def show_icon?(:the_ride, _) do false end def show_icon?(%Route{type: type}, _) when type in [0, 1] do true end def show_icon?(_, bool) when bool in [true, false] do bool end @spec grid_button_text(atom | Route.t()) :: String.t() def grid_button_text(:the_ride) do "MBTA Paratransit Program" end def grid_button_text(%Route{name: name}) do break_text_at_slash(name) end # Returns true if there is a non-notice alert for the given route on `date` @spec has_alert?(Route.t() | :the_ride, [Alerts.Alert.t()], DateTime.t() | nil) :: boolean def has_alert?(:the_ride, _, _) do false end def has_alert?(%Route{} = route, alerts, date) do date = date || Util.now() entity = %Alerts.InformedEntity{route_type: route.type, route: route.id} Enum.any?(alerts, fn alert -> Alerts.Alert.is_high_severity_or_high_priority(alert) and Match.match([alert], entity, date) == [alert] end) end def map_buttons(types) do content_tag(:div, Enum.map(types, &map_button/1), class: "m-mode-hub__map-btns") end def map_button(type) do content_tag( :div, [ link( [ type |> MapHelpers.thumbnail() |> img_tag(class: "m-mode-hub__map-btn-thumbnail"), content_tag(:span, map_button_text(type), class: "m-mode-hub__map-btn-text"), fa("angle-right", class: "m-mode-hub__map-btn-caret") ], to: MapHelpers.map_pdf_url(type), class: "m-mode-hub__map-btn" ) ], class: "m-mode-hub__map-btn-wrapper" ) end def map_image(type) do content_tag(:div, [ link( [ type |> MapHelpers.image() |> img_tag(class: "m-mode-hub__map-image") ], to: MapHelpers.map_pdf_url(type) ) ]) end def map_button_text(:commuter_rail_zones), do: "Commuter Rail Zones Map" def map_button_text(:commuter_rail), do: "Commuter Rail Map" def map_button_text(:subway), do: "Subway Map" def map_button_text(:bus), do: "Bus Map" def map_button_text(:ferry), do: "Ferry Map" @spec grid_button_id(map) :: String.t() | nil defp grid_button_id(%{id_prefix: <<prefix::binary>>, index: idx}) do prefix <> "--" <> Integer.to_string(idx) end defp grid_button_id(_) do nil end @spec bus_filter_atom(atom) :: (Route.t() -> boolean) def bus_filter_atom(:sl), do: &Route.silver_line?/1 def bus_filter_atom(:ct), do: fn route -> route.name =~ "CT" end @spec bus_filter_range(integer, integer) :: (Route.t() -> boolean) def bus_filter_range(start, stop) do fn route -> case Integer.parse(route.name) do :error -> false {value, _} -> in_range?(start, stop, value) end end end @spec in_range?(integer, integer, integer) :: boolean defp in_range?(first, last, value) when value >= first and value <= last, do: true defp in_range?(_, _, _), do: false @spec mode_fare_card(Route.gtfs_route_type()) :: String.t() def mode_fare_card(:commuter_rail) do "paragraphs/multi-column/commuter-rail-fares" end def mode_fare_card(:ferry) do "paragraphs/multi-column/ferry-fares" end def mode_fare_card(:subway) do "paragraphs/multi-column/subway-fares" end def mode_fare_card(:bus) do "paragraphs/multi-column/bus-fares" end end
28.426009
97
0.659252
9ec52b20bac14f25d98468184cb16882b26e2b3e
1,301
ex
Elixir
lib/ex_chat_web/channels/user_socket.ex
embik/ex_chat
08d83fe2076a96f9dad647fe509daec301b4965a
[ "Apache-2.0" ]
1
2017-12-28T12:49:19.000Z
2017-12-28T12:49:19.000Z
lib/ex_chat_web/channels/user_socket.ex
embik/ex_chat
08d83fe2076a96f9dad647fe509daec301b4965a
[ "Apache-2.0" ]
null
null
null
lib/ex_chat_web/channels/user_socket.ex
embik/ex_chat
08d83fe2076a96f9dad647fe509daec301b4965a
[ "Apache-2.0" ]
null
null
null
defmodule ExChatWeb.UserSocket do use Phoenix.Socket use Guardian.Phoenix.Socket ## Channels channel "room:*", ExChatWeb.RoomChannel ## Transports transport :websocket, Phoenix.Transports.WebSocket # transport :longpoll, Phoenix.Transports.LongPoll # Socket params are passed from the client and can # be used to verify and authenticate a user. After # verification, you can put default assigns into # the socket that will be set for all channels, ie # # {:ok, assign(socket, :user_id, verified_user_id)} # # To deny connection, return `:error`. # # See `Phoenix.Token` documentation for examples in # performing token verification on connect. #def connect(%{"username" => username}, socket) do # {:ok, assign(socket, :username, username) } #end def connect(_params, _socket) do :error end # Socket id's are topics that allow you to identify all sockets for a given user: # # def id(socket), do: "user_socket:#{socket.assigns.user_id}" # # Would allow you to broadcast a "disconnect" event and terminate # all active sockets and channels for a given user: # # ExChatWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{}) # # Returning `nil` makes this socket anonymous. def id(_socket), do: nil end
30.97619
83
0.700231
9ec55fb1bcc33a2c2b5a01d24df8e5793ee9c00d
302
exs
Elixir
apps/ewallet_config/config/test.exs
jimpeebles/ewallet
ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405
[ "Apache-2.0" ]
null
null
null
apps/ewallet_config/config/test.exs
jimpeebles/ewallet
ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405
[ "Apache-2.0" ]
null
null
null
apps/ewallet_config/config/test.exs
jimpeebles/ewallet
ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405
[ "Apache-2.0" ]
null
null
null
use Mix.Config config :ewallet_config, EWalletConfig.Repo, adapter: Ecto.Adapters.Postgres, pool: Ecto.Adapters.SQL.Sandbox, url: {:system, "DATABASE_URL", "postgres://localhost/ewallet_test"}, migration_timestamps: [type: :naive_datetime_usec], queue_target: 1_000, queue_interval: 5_000
30.2
70
0.764901
9ec56a4a3ef6b1c88af6051159433436f1dac11f
1,323
exs
Elixir
config/prod.secret.exs
gratraw/home_appliances_store
36b5274294ef3e0124740c2f992695860884976d
[ "MIT" ]
null
null
null
config/prod.secret.exs
gratraw/home_appliances_store
36b5274294ef3e0124740c2f992695860884976d
[ "MIT" ]
null
null
null
config/prod.secret.exs
gratraw/home_appliances_store
36b5274294ef3e0124740c2f992695860884976d
[ "MIT" ]
null
null
null
# In this file, we load production configuration and secrets # from environment variables. You can also hardcode secrets, # although such is generally not recommended and you have to # remember to add this file to your .gitignore. use Mix.Config database_url = System.get_env("DATABASE_URL") || raise """ environment variable DATABASE_URL is missing. For example: ecto://USER:PASS@HOST/DATABASE """ config :home_appliances_store, HomeAppliancesStore.Repo, # ssl: true, url: database_url, pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10") secret_key_base = System.get_env("SECRET_KEY_BASE") || raise """ environment variable SECRET_KEY_BASE is missing. You can generate one by calling: mix phx.gen.secret """ config :home_appliances_store, HomeAppliancesStoreWeb.Endpoint, http: [ port: String.to_integer(System.get_env("PORT") || "4000"), transport_options: [socket_opts: [:inet6]] ], secret_key_base: secret_key_base # ## Using releases (Elixir v1.9+) # # If you are doing OTP releases, you need to instruct Phoenix # to start each relevant endpoint: # # config :home_appliances_store, HomeAppliancesStoreWeb.Endpoint, server: true # # Then you can assemble a release by calling `mix release`. # See `mix help release` for more information.
31.5
82
0.73545
9ec5739a72255fd64a03f84129e2c6d5263bbd6a
2,325
ex
Elixir
lib/epi_contacts/commcare/api.ex
RatioPBC/epi-contacts
6c43eea52cbfe2097f48b02e3d0c8fce3b46f1ee
[ "Apache-2.0" ]
null
null
null
lib/epi_contacts/commcare/api.ex
RatioPBC/epi-contacts
6c43eea52cbfe2097f48b02e3d0c8fce3b46f1ee
[ "Apache-2.0" ]
13
2021-06-29T04:35:41.000Z
2022-02-09T04:25:39.000Z
lib/epi_contacts/commcare/api.ex
RatioPBC/epi-contacts
6c43eea52cbfe2097f48b02e3d0c8fce3b46f1ee
[ "Apache-2.0" ]
null
null
null
defmodule EpiContacts.Commcare.Api do @moduledoc """ This module provides an interface to the Commcare API. """ require Logger @timeout 20_000 @recv_timeout 45_000 def get_case(case_id, domain) do commcare_api_case_url(domain, case_id) |> get() |> parse_response() end def post_case(xml_body, domain) do xml_body |> post(commcare_api_post_url(domain)) |> parse_response() end defp get(url) do http_client().get( url, headers() ) end defp post(xml_body, url) do http_client().post( url, xml_body, headers(), timeout: @timeout, recv_timeout: @recv_timeout ) end defp commcare_api_case_url(commcare_domain, case_id) do "https://www.commcarehq.org/a/#{commcare_domain}/api/v0.5/case/#{case_id}/?format=json&child_cases__full=true" end defp commcare_api_post_url(commcare_domain) do "https://www.commcarehq.org/a/#{commcare_domain}/receiver/" end defp headers do [Authorization: "ApiKey #{commcare_api_token()}"] end defp commcare_api_token, do: Application.fetch_env!(:epi_contacts, :commcare_api_token) defp http_client, do: Application.fetch_env!(:epi_contacts, :http_client) def post_success?(body) do body |> Floki.parse_document!() |> Floki.find(~S(openrosaresponse[xmlns="http://openrosa.org/http/response"] message[nature="submit_success"])) |> Floki.text() |> String.trim() |> Kernel.==("√") end defp parse_response({:ok, %{status_code: 201, body: body}} = response) do if post_success?(body) do :ok else Logger.warn("bad response from commcare: #{inspect(body)}") {:error, {:commcare_post_error, response}} end end defp parse_response({:ok, %{status_code: 200, body: body}}), do: {:ok, Jason.decode!(body)} defp parse_response({:ok, %{status_code: 400}}), do: {:error, :commcare_data_error} defp parse_response({:ok, %{status_code: 401}}), do: {:error, :commcare_authorization_error} defp parse_response({:ok, %{status_code: 403}}), do: {:error, :commcare_forbidden} defp parse_response({:ok, %{status_code: 404}}), do: {:error, :not_found} defp parse_response({:error, %HTTPoison.Error{reason: :timeout}}), do: {:error, :timeout} defp parse_response({:error, _} = response), do: response end
28.703704
115
0.674409
9ec578e2eb9dd9726d42334b51fb1bfe12e33ff1
558
exs
Elixir
test/views/error_view_test.exs
slurmulon/thing
21791e7a9579d41de28a0a6131218aed3679d033
[ "Apache-2.0" ]
null
null
null
test/views/error_view_test.exs
slurmulon/thing
21791e7a9579d41de28a0a6131218aed3679d033
[ "Apache-2.0" ]
null
null
null
test/views/error_view_test.exs
slurmulon/thing
21791e7a9579d41de28a0a6131218aed3679d033
[ "Apache-2.0" ]
null
null
null
defmodule Thing.ErrorViewTest do use Thing.ConnCase, async: true # Bring render/3 and render_to_string/3 for testing custom views import Phoenix.View test "renders 404.html" do assert render_to_string(Thing.ErrorView, "404.html", []) == "Page not found" end test "render 500.html" do assert render_to_string(Thing.ErrorView, "500.html", []) == "Server internal error" end test "render any other" do assert render_to_string(Thing.ErrorView, "505.html", []) == "Server internal error" end end
25.363636
66
0.670251
9ec5848a8b87e76f237b5f88b7d3ab4cbd6070ad
2,251
exs
Elixir
apps/ewallet_db/test/ewallet_db/blockchain_wallet_test.exs
AndonMitev/EWallet
898cde38933d6f134734528b3e594eedf5fa50f3
[ "Apache-2.0" ]
322
2018-02-28T07:38:44.000Z
2020-05-27T23:09:55.000Z
apps/ewallet_db/test/ewallet_db/blockchain_wallet_test.exs
AndonMitev/EWallet
898cde38933d6f134734528b3e594eedf5fa50f3
[ "Apache-2.0" ]
643
2018-02-28T12:05:20.000Z
2020-05-22T08:34:38.000Z
apps/ewallet_db/test/ewallet_db/blockchain_wallet_test.exs
AndonMitev/EWallet
898cde38933d6f134734528b3e594eedf5fa50f3
[ "Apache-2.0" ]
63
2018-02-28T10:57:06.000Z
2020-05-27T23:10:38.000Z
# Copyright 2018-2019 OmiseGO Pte Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. defmodule EWalletDB.BlockchainWalletTest do use EWalletDB.SchemaCase, async: true import EWalletDB.Factory alias EWalletDB.{BlockchainWallet} describe "BlockchainWallet factory" do test_has_valid_factory(BlockchainWallet) end describe "insert/1" do test_insert_generate_uuid(BlockchainWallet, :uuid) test_insert_generate_timestamps(BlockchainWallet) test_insert_field_length(BlockchainWallet, :address) test_insert_field_length(BlockchainWallet, :name) test_insert_field_length(BlockchainWallet, :public_key) test_insert_prevent_duplicate(BlockchainWallet, :address, "0x123") test_insert_prevent_duplicate(BlockchainWallet, :name, "A name") test_insert_prevent_duplicate(BlockchainWallet, :public_key, "0x321") test "insert successfuly when type is valid" do {res_1, _wallet} = :blockchain_wallet |> params_for(%{type: "hot"}) |> BlockchainWallet.insert() {res_2, _wallet} = :blockchain_wallet |> params_for(%{type: "cold"}) |> BlockchainWallet.insert() assert res_1 == :ok assert res_2 == :ok end test "fails to insert when type is invalid" do {res, _wallet} = :blockchain_wallet |> params_for(%{type: "invalid_type"}) |> BlockchainWallet.insert() assert res == :error end end describe "get_by/2" do test_schema_get_by_allows_search_by(BlockchainWallet, :address) test_schema_get_by_allows_search_by(BlockchainWallet, :name) test_schema_get_by_allows_search_by(BlockchainWallet, :public_key) test_schema_get_by_allows_search_by(BlockchainWallet, :type) end end
33.102941
74
0.733452
9ec5887b0733eac58319d4edbc7d324fc2c2a293
256
ex
Elixir
test/support/mocks.ex
f0lio/edgedb-elixir
b285bd8037b0b951aabfa1d1733889880f8bfd66
[ "MIT" ]
30
2021-05-19T08:54:44.000Z
2022-03-11T22:52:25.000Z
test/support/mocks.ex
f0lio/edgedb-elixir
b285bd8037b0b951aabfa1d1733889880f8bfd66
[ "MIT" ]
3
2021-11-17T21:26:01.000Z
2022-03-12T09:49:25.000Z
test/support/mocks.ex
f0lio/edgedb-elixir
b285bd8037b0b951aabfa1d1733889880f8bfd66
[ "MIT" ]
3
2021-08-29T14:55:41.000Z
2022-03-12T01:30:35.000Z
Mox.defmock(Tests.Support.Mocks.FileMock, for: Tests.Support.Mocks.Behaviours.File) Mox.defmock(Tests.Support.Mocks.SystemMock, for: Tests.Support.Mocks.Behaviours.System) Mox.defmock(Tests.Support.Mocks.PathMock, for: Tests.Support.Mocks.Behaviours.Path)
64
87
0.824219
9ec595fb9075c17ba142f2c4dc5cf67f8c9a0881
6,715
ex
Elixir
lib/cloudinary/transformation/layer.ex
h-ikeda/cloudinary-elixir
5e70aedb6d1e51839f1e21c49b40293036b99efd
[ "MIT" ]
1
2021-05-23T09:17:44.000Z
2021-05-23T09:17:44.000Z
lib/cloudinary/transformation/layer.ex
h-ikeda/cloudinary-elixir
5e70aedb6d1e51839f1e21c49b40293036b99efd
[ "MIT" ]
44
2020-05-15T03:36:36.000Z
2022-03-23T21:39:11.000Z
lib/cloudinary/transformation/layer.ex
h-ikeda/cloudinary-elixir
5e70aedb6d1e51839f1e21c49b40293036b99efd
[ "MIT" ]
null
null
null
defmodule Cloudinary.Transformation.Layer do @moduledoc """ The overlay/underlay transformations. """ defguardp is_font_size(font_size) when is_integer(font_size) and font_size >= 0 @typedoc """ Any type of overlay/underlay options. The `t:String.t/0` is treated as a public ID of the overlay image. ## Example iex> #{__MODULE__}.to_url_string(overlay: "badge") "l_badge" """ @type t :: String.t() | fetch | lut | text | subtitles | video @doc """ Converts the overlay/underlay options to an URL string. """ @spec to_url_string(t) :: String.t() def to_url_string(id) when is_binary(id), do: String.replace(id, "/", ":") @typedoc """ The overlay image coming from a remote server. ## Official documentation * https://cloudinary.com/documentation/image_transformation_reference#overlay_parameter ## Example iex> #{__MODULE__}.to_url_string(%{url: "http://example.com/path/to/remote/image.jpg"}) "fetch:aHR0cDovL2V4YW1wbGUuY29tL3BhdGgvdG8vcmVtb3RlL2ltYWdlLmpwZw==" """ @type fetch :: %{url: String.t()} def to_url_string(%{url: url}) when is_binary(url), do: "fetch:#{Base.url_encode64(url)}" @typedoc """ Applying 3D look up tables to the image. ## Official documentation * https://cloudinary.com/documentation/image_transformations#applying_3d_luts_to_images ## Example iex> #{__MODULE__}.to_url_string(%{lut: "iwltbap_aspen.3dl"}) "lut:iwltbap_aspen.3dl" """ @type lut :: %{lut: String.t()} def to_url_string(%{lut: id}) when is_binary(id), do: "lut:#{id}" @typedoc """ The overlay video specified by the public ID. ## Official documentation * https://cloudinary.com/documentation/video_transformation_reference#adding_overlays_to_videos * https://cloudinary.com/documentation/video_manipulation_and_delivery#adding_video_overlays ## Example iex> #{__MODULE__}.to_url_string(%{video: "dog"}) "video:dog" """ @type video :: %{video: String.t()} def to_url_string(%{video: id}) when is_binary(id), do: "video:#{id}" @typedoc """ Adding subtitles to the video. ## Official documentation * https://cloudinary.com/documentation/video_manipulation_and_delivery#adding_subtitles ## Example iex> #{__MODULE__}.to_url_string(%{subtitles: "sample_sub_en.srt"}) "subtitles:sample_sub_en.srt" iex> #{__MODULE__}.to_url_string(%{subtitles: "sample_sub_en.srt", font_family: "arial", font_size: 20}) "subtitles:arial_20:sample_sub_en.srt" """ @type subtitles :: %{ required(:subtitles) => String.t(), optional(:font_family) => String.t(), optional(:font_size) => non_neg_integer } def to_url_string(%{subtitles: id, font_family: font_family, font_size: font_size}) when is_binary(id) and is_binary(font_family) and is_font_size(font_size) do "subtitles:#{font_family}_#{font_size}:#{id}" end def to_url_string(%{subtitles: id}) when is_binary(id), do: "subtitles:#{id}" @typedoc """ Adding a text captions to the image. The font styles should be specified by the options `:font_family`, `:font_size`, and optional style parameters, or a public ID of predefined text image with the `:predefined` option. ## Official documentation * https://cloudinary.com/documentation/image_transformations#adding_text_captions * https://cloudinary.com/documentation/video_manipulation_and_delivery#adding_text_captions ## Example iex> #{__MODULE__}.to_url_string(%{text: "Flowers", font_family: "Arial", font_size: 80}) "text:Arial_80:Flowers" iex> #{__MODULE__}.to_url_string(%{text: "Stylish Text/", font_family: "verdana", font_size: 75, font_weight: :bold, text_decoration: :underline, letter_spacing: 14}) "text:verdana_75_bold_underline_letter_spacing_14:Stylish%20Text%2F" iex> #{__MODULE__}.to_url_string(%{text: "Stylish text", predefined: "sample_text_style"}) "text:sample_text_style:Stylish%20Text" """ @type text :: %{text: String.t(), predefined: String.t()} | %{ required(:text) => String.t(), required(:font_family) => String.t(), required(:font_size) => non_neg_integer, optional(:font_weight) => :normal | :bold | :thin | :light, optional(:font_style) => :normal | :italic, optional(:text_decoration) => :normal | :underline | :strikethrough, optional(:text_align) => :left | :center | :right | :end | :start | :justify, optional(:stroke) => :none | :stroke, optional(:letter_spacing) => number, optional(:line_spacing) => number, optional(:font_antialias) => :none | :gray | :subpixel | :fast | :good | :best, optional(:font_hinting) => :none | :slight | :medium | :full } def to_url_string(%{text: text, predefined: id}) when is_binary(text) and is_binary(id) do "text:#{id}:#{escape(text)}" end def to_url_string(%{text: text, font_family: font_family, font_size: font_size} = options) when is_binary(text) and is_binary(font_family) and is_font_size(font_size) do text_style = options |> Map.take([ :font_weight, :font_style, :text_decoration, :text_align, :stroke, :letter_spacing, :line_spacing, :font_antialias, :font_hinting ]) |> Enum.map(fn {:font_weight, font_weight} when font_weight in [:normal, :bold, :thin, :light] -> font_weight {:font_style, font_style} when font_style in [:normal, :italic] -> font_style {:text_decoration, decoration} when decoration in [:normal, :underline, :strikethrough] -> decoration {:text_align, align} when align in [:left, :center, :right, :end, :start, :justify] -> align {:stroke, stroke} when stroke in [:none, :stroke] -> stroke {:letter_spacing, letter_spacing} when is_number(letter_spacing) -> "letter_spacing_#{letter_spacing}" {:line_spacing, line_spacing} when is_number(line_spacing) -> "line_spacing_#{line_spacing}" {:font_antialias, aalias} when aalias in [:none, :gray, :subpixel, :fast, :good, :best] -> "antialias_#{aalias}" {:font_hinting, hinting} when hinting in [:none, :slight, :medium, :full] -> "hinting_#{hinting}" end) "text:#{Enum.join([font_family, font_size | text_style], "_")}:#{escape(text)}" end @spec escape(String.t()) :: String.t() defp escape(text) do text |> URI.encode(&(&1 not in [?,, ?%])) |> URI.encode(&URI.char_unreserved?/1) end end
39.733728
172
0.651229
9ec5a26f456d896676e2f6763cfa8d26db722f96
411
ex
Elixir
day_05/lib/day05.ex
simon-wolf/advent-of-code-2019
571d30f156a2beeeb49a52a2f0223fff5051e7b3
[ "MIT" ]
null
null
null
day_05/lib/day05.ex
simon-wolf/advent-of-code-2019
571d30f156a2beeeb49a52a2f0223fff5051e7b3
[ "MIT" ]
null
null
null
day_05/lib/day05.ex
simon-wolf/advent-of-code-2019
571d30f156a2beeeb49a52a2f0223fff5051e7b3
[ "MIT" ]
null
null
null
defmodule Day05 do @moduledoc """ Documentation for Day05. """ alias Day05.Coder def air_conditioning_test() do input_value = 1 "./intcode.txt" |> Aoc.get_file_list_values() |> Coder.execute_program(input_value) end def radiator_controller_test() do input_value = 5 "./intcode.txt" |> Aoc.get_file_list_values() |> Coder.execute_program(input_value) end end
17.125
41
0.673966
9ec5a71632babd2bd6d4cae218afcdb7276a4144
3,741
ex
Elixir
lib/yuki_helper/donwload.ex
penqen/yuki_helper
9952dd4711fc2dc98d8cdc41bd758b9b271ea036
[ "MIT" ]
null
null
null
lib/yuki_helper/donwload.ex
penqen/yuki_helper
9952dd4711fc2dc98d8cdc41bd758b9b271ea036
[ "MIT" ]
1
2021-05-13T16:28:58.000Z
2021-05-13T16:28:58.000Z
lib/yuki_helper/donwload.ex
penqen/yuki_helper
9952dd4711fc2dc98d8cdc41bd758b9b271ea036
[ "MIT" ]
null
null
null
defmodule YukiHelper.Download do @moduledoc """ Provides a module related to downloading teastcases. """ alias YukiHelper.{Config, Problem, Api.Yukicoder} alias YukiHelper.Exceptions.DownloadError @typedoc """ Two types of testcase file, input file and output file. """ @type filetype() :: :in | :out @typedoc """ Filename of the testcase. """ @type filename() :: String.t() @typedoc """ A list of filename of the testcase. """ @type filename_list() :: [filename()] @typedoc """ Data of the response body. """ @type data() :: String.t() @doc """ Gets a list of testcase for the specified problem. """ @spec get_testcases(Config.t(), Problem.no(), keyword()) :: {:ok, filename_list()} | {:error, term()} def get_testcases(config, no, opts \\ []) do path = if Keyword.get(opts, :problem_id), do: "/problems/#{no}/file/in", else: "/problems/no/#{no}/file/in" headers = Config.headers!(config) options = Config.options!(config) with res <- Yukicoder.get!(path, headers, options), 200 <- Map.get(res, :status_code), body <- Map.get(res, :body) do {:ok, body} else 404 -> { :error, %DownloadError{ path: path, status: 404, description: "a target was not found" } } code -> { :error, %DownloadError{ path: path, status: code, description: "an unexpected error has occurred" } } end end @spec get_testcases!(Config.t(), Problem.no(), keyword()) :: filename_list() def get_testcases!(config, no, opts \\ []) do case get_testcases(config, no, opts) do {:ok, body} -> body {:error, err} -> Mix.raise err end end @doc """ Downloads the specified testcase for the problem. """ @spec get_testcase(Config.t(), Problem.no(), filename(), filetype(), keyword()) :: {:ok, data()} | {:error, term()} def get_testcase(config, no, filename, type, opts \\ []) do path = if Keyword.get(opts, :problem_id), do: "/problems/#{no}/file/#{type}/#{filename}", else: "/problems/no/#{no}/file/#{type}/#{filename}" headers = Config.headers!(config) options = Config.options!(config) with res <- Yukicoder.get!(path, headers, options), 200 <- Map.get(res, :status_code), body <- Map.get(res, :body) do body = if is_number(body), do: "#{body}\n", else: body {:ok, body} else 404 -> { :error, %DownloadError{ path: path, status: 404, description: "a target was not found" } } code -> { :error, %DownloadError{ path: path, status: code, description: "an unexpected error has occurred" } } end end @spec get_testcase!(Config.t(), Problem.no(), filename(), filetype(), keyword()) :: data() def get_testcase!(config, no, filename, type, opts \\ []) do case get_testcase(config, no, filename, type, opts) do {:ok, body} -> body {:error, err} -> Mix.raise err end end @doc """ Returns whetheir testcases have already been downloaded or not. """ @spec download_tastcases?(filename_list(), Config.t(), Problem.no()) :: boolean() def download_tastcases?(testcase_list, config, no) do root = Path.expand(Problem.problem_path(config, no)) Enum.reduce(testcase_list, true, fn file, download? -> Enum.reduce([:in, :out], download?, fn filetype, download? -> download? && File.exists?(Path.join([root, "#{filetype}", file])) end) end) end end
28.12782
117
0.565357
9ec5a872df7bfcf0f716cf54b831aa72b74ca954
2,125
ex
Elixir
clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1_explicit_content_annotation.ex
hauptbenutzer/elixir-google-api
7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2
[ "Apache-2.0" ]
null
null
null
clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1_explicit_content_annotation.ex
hauptbenutzer/elixir-google-api
7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2
[ "Apache-2.0" ]
null
null
null
clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1_explicit_content_annotation.ex
hauptbenutzer/elixir-google-api
7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2
[ "Apache-2.0" ]
1
2020-11-10T16:58:27.000Z
2020-11-10T16:58:27.000Z
# Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the &quot;License&quot;); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an &quot;AS IS&quot; BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This class is auto generated by the swagger code generator program. # https://github.com/swagger-api/swagger-codegen.git # Do not edit the class manually. defmodule GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1ExplicitContentAnnotation do @moduledoc """ Explicit content annotation (based on per-frame visual signals only). If no explicit content has been detected in a frame, no annotations are present for that frame. ## Attributes - frames ([GoogleCloudVideointelligenceV1ExplicitContentFrame]): All video frames where explicit content was detected. Defaults to: `null`. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :frames => list( GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1ExplicitContentFrame.t() ) } field( :frames, as: GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1ExplicitContentFrame, type: :list ) end defimpl Poison.Decoder, for: GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1ExplicitContentAnnotation do def decode(value, options) do GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1ExplicitContentAnnotation.decode( value, options ) end end defimpl Poison.Encoder, for: GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1ExplicitContentAnnotation do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
34.274194
167
0.760471
9ec5ed1f0a44ffb51454df649ae563ffcef31996
781
ex
Elixir
lib/tpuserve/driver.ex
seanmor5/tpuserve
0a0a5fb74457b3893d87cb52bf98a660f8d1190d
[ "Apache-2.0" ]
null
null
null
lib/tpuserve/driver.ex
seanmor5/tpuserve
0a0a5fb74457b3893d87cb52bf98a660f8d1190d
[ "Apache-2.0" ]
null
null
null
lib/tpuserve/driver.ex
seanmor5/tpuserve
0a0a5fb74457b3893d87cb52bf98a660f8d1190d
[ "Apache-2.0" ]
null
null
null
defmodule TPUServe.Driver do require Logger use GenServer alias __MODULE__, as: Driver @name __MODULE__ defstruct [:ref] # TODO: Is driver thread safe? def start_link(_) do Logger.info("Starting TPUServe Driver") {:ok, driver_ref} = TPUServe.NIF.init_driver() driver = %Driver{ref: driver_ref} :persistent_term.put({__MODULE__, :driver}, driver) GenServer.start_link(__MODULE__, :ok, name: __MODULE__) end def fetch! do :persistent_term.get({__MODULE__, :driver}, nil) || GenServer.call(@name, :fetch, :infinity) end @impl true def init(:ok) do {:ok, :unused_state} end @impl true def handle_call(:fetch, _from, _state) do driver = :persistent_term.get({__MODULE__, :driver}, nil) {:reply, driver} end end
22.314286
96
0.681178
9ec5f2a82825d2281e054183c28fe2e6bc5e3ff9
584
ex
Elixir
apps/neoscan_cache/lib/neoscan_cache/ets_process.ex
vincentgeneste/neo-scan
4a654575331eeb3eb12d4fd61696a7bd6dbca3ce
[ "MIT" ]
1
2019-12-16T17:21:21.000Z
2019-12-16T17:21:21.000Z
apps/neoscan_cache/lib/neoscan_cache/ets_process.ex
vincentgeneste/neo-scan
4a654575331eeb3eb12d4fd61696a7bd6dbca3ce
[ "MIT" ]
null
null
null
apps/neoscan_cache/lib/neoscan_cache/ets_process.ex
vincentgeneste/neo-scan
4a654575331eeb3eb12d4fd61696a7bd6dbca3ce
[ "MIT" ]
null
null
null
defmodule NeoscanCache.EtsProcess do use GenServer # Callbacks def start_link do GenServer.start_link(__MODULE__, :ok, name: __MODULE__) end @impl true def init(:ok) do {:ok, %{}} end def create_table(name) do GenServer.call(__MODULE__, {:create, name}) end @impl true def handle_call({:create, name}, _from, state) do unless name in :ets.all() do :ets.new(name, [ :set, :named_table, :public, read_concurrency: true, write_concurrency: true ]) end {:reply, :ok, state} end end
17.176471
59
0.606164
9ec60c3e6f747ad2e77c9454e61dd5d337c8196a
1,615
ex
Elixir
lib/cforum/jobs/user_cleanup_job.ex
jrieger/cforum_ex
61f6ce84708cb55bd0feedf69853dae64146a7a0
[ "MIT" ]
16
2019-04-04T06:33:33.000Z
2021-08-16T19:34:31.000Z
lib/cforum/jobs/user_cleanup_job.ex
jrieger/cforum_ex
61f6ce84708cb55bd0feedf69853dae64146a7a0
[ "MIT" ]
294
2019-02-10T11:10:27.000Z
2022-03-30T04:52:53.000Z
lib/cforum/jobs/user_cleanup_job.ex
jrieger/cforum_ex
61f6ce84708cb55bd0feedf69853dae64146a7a0
[ "MIT" ]
10
2019-02-10T10:39:24.000Z
2021-07-06T11:46:05.000Z
defmodule Cforum.Jobs.UserCleanupJob do use Oban.Worker, queue: :background, max_attempts: 5 import Ecto.Query, warn: false require Logger alias Cforum.Repo alias Cforum.System alias Cforum.Users.User @impl Oban.Worker def perform(_) do cleanup_unconfirmed_users() cleanup_users_wo_posts() cleanup_inactive_users() Cachex.clear(:cforum) :ok end defp cleanup_inactive_users() do from(user in User, where: fragment("? < NOW() - INTERVAL '30 days'", user.inactivity_notification_sent_at)) |> delete_users() end defp cleanup_unconfirmed_users() do from(user in User, where: is_nil(user.confirmed_at), where: fragment("? + interval '24 hours'", user.confirmation_sent_at) <= ^DateTime.utc_now() ) |> delete_users() end defp cleanup_users_wo_posts() do from(user in User, where: (is_nil(user.last_visit) and fragment("? + interval '30 days' <= NOW()", user.created_at)) or fragment("? + interval '30 days'", user.last_visit) <= ^DateTime.utc_now(), where: fragment( "NOT EXISTS(SELECT message_id FROM messages WHERE messages.user_id = ? and messages.deleted = false)", user.user_id ) ) |> delete_users() end defp delete_users(query) do Repo.transaction( fn -> query |> Repo.stream() |> Enum.each(fn user -> Logger.info("Automatically deleting user #{user.username}") System.audited("autodestroy", nil, fn -> Repo.delete(user) end) end) end, timeout: :infinity ) end end
25.634921
112
0.637152
9ec63bdd2a580642a342d06556f1e32914ef1430
1,706
ex
Elixir
apps/dockup/lib/dockup/backends/compose/deploy_job.ex
rudydydy/dockup
0d05d1ef65cc5523800bd852178361521cd3e7d8
[ "MIT" ]
null
null
null
apps/dockup/lib/dockup/backends/compose/deploy_job.ex
rudydydy/dockup
0d05d1ef65cc5523800bd852178361521cd3e7d8
[ "MIT" ]
null
null
null
apps/dockup/lib/dockup/backends/compose/deploy_job.ex
rudydydy/dockup
0d05d1ef65cc5523800bd852178361521cd3e7d8
[ "MIT" ]
null
null
null
defmodule Dockup.Backends.Compose.DeployJob do require Logger alias Dockup.{ DefaultCallback, Project, Backends.Compose.Container, Backends.Compose.DockerComposeConfig } def spawn_process(%{id: id, git_url: repository, branch: branch}, callback) do spawn(fn -> perform(id, repository, branch, callback) end) end def perform(deployment_id, repository, branch, callback \\ DefaultCallback, deps \\ []) do project = deps[:project] || Project container = deps[:container] || Container docker_compose_config = deps[:docker_compose_config] || DockerComposeConfig project_id = to_string(deployment_id) project.clone_repository(project_id, repository, branch) urls = docker_compose_config.rewrite_variables(project_id) container.start_containers(project_id) callback.set_log_url(deployment_id, log_url(project_id)) callback.update_status(deployment_id, "waiting_for_urls") urls = project.wait_till_up(urls, project_id) callback.set_urls(deployment_id, urls) callback.update_status(deployment_id, "started") rescue exception -> stacktrace = System.stacktrace message = Exception.message(exception) handle_error_message(callback, deployment_id, message) reraise(exception, stacktrace) end defp log_url(project_id) do base_domain = Application.fetch_env!(:dockup, :base_domain) "logio.#{base_domain}/#?projectName=#{project_id}" end defp handle_error_message(callback, deployment_id, message) do message = "An error occured when deploying #{deployment_id} : #{message}" Logger.error message callback.update_status(deployment_id, "failed") end end
32.188679
80
0.732708
9ec647af0ec15261f78b43fae890d2e9a1bcc99e
1,248
ex
Elixir
test/support/conn_case.ex
Harmful-Alchemist/FunRetro
6b53c16adb2c233e5338799732a5a5c2fe10acaf
[ "MIT" ]
null
null
null
test/support/conn_case.ex
Harmful-Alchemist/FunRetro
6b53c16adb2c233e5338799732a5a5c2fe10acaf
[ "MIT" ]
null
null
null
test/support/conn_case.ex
Harmful-Alchemist/FunRetro
6b53c16adb2c233e5338799732a5a5c2fe10acaf
[ "MIT" ]
null
null
null
defmodule FunRetroWeb.ConnCase do @moduledoc """ This module defines the test case to be used by tests that require setting up a connection. Such tests rely on `Phoenix.ConnTest` and also import other functionality to make it easier to build common data structures and query the data layer. Finally, if the test case interacts with the database, we enable the SQL sandbox, so changes done to the database are reverted at the end of every test. If you are using PostgreSQL, you can even run database tests asynchronously by setting `use FunRetroWeb.ConnCase, async: true`, although this option is not recommended for other databases. """ use ExUnit.CaseTemplate using do quote do # Import conveniences for testing with connections import Plug.Conn import Phoenix.ConnTest import FunRetroWeb.ConnCase alias FunRetroWeb.Router.Helpers, as: Routes # The default endpoint for testing @endpoint FunRetroWeb.Endpoint end end setup tags do :ok = Ecto.Adapters.SQL.Sandbox.checkout(FunRetro.Repo) unless tags[:async] do Ecto.Adapters.SQL.Sandbox.mode(FunRetro.Repo, {:shared, self()}) end {:ok, conn: Phoenix.ConnTest.build_conn()} end end
28.363636
70
0.725962
9ec65425a05ffde549e76153e9aecb235b893815
2,210
ex
Elixir
project/grep/lib/grep/search.ex
MickeyOoh/Exercises
3b34e7fdab4a09e0269d20c68531b4fb75bb7f16
[ "MIT" ]
null
null
null
project/grep/lib/grep/search.ex
MickeyOoh/Exercises
3b34e7fdab4a09e0269d20c68531b4fb75bb7f16
[ "MIT" ]
1
2018-06-19T18:59:41.000Z
2018-06-19T18:59:41.000Z
project/grep/lib/grep/search.ex
MickeyOoh/Exercises
3b34e7fdab4a09e0269d20c68531b4fb75bb7f16
[ "MIT" ]
null
null
null
defmodule Grep.Search do @moduledoc """ Documentation for Grep. """ @switch [help: :boolean, line_num: :boolean, file_name: :boolean, case: :boolean, invert: :boolean, entire: :boolean] @alias [n: :line_num,l: :file_name,i: :case, v: :invert, x: :entire, h: :help] def parse_args(argv) do # parse(argv(), options()) :: {parsed(), argv(), errors()} parses = OptionParser.parse(argv, switches: @switch, aliases: @alias) case parses do {:help, _, _} -> :help { opts, [pattern | files], _} -> # search chars -> regex -> function fnregex = mk_regex(opts, pattern) # single file? , print_number? outfmt = line_formatter(length(files) == 1, opts[:line_num] != nil) #IO.puts "regex(#{inspect regex}),outfmt->#{inspect outfmt}" if opts[:file_name] == true do { :files, fnregex, files, outfmt} else { :lines, fnregex, files, outfmt} end _ -> :help end end defp mk_regex(opts, pattern) do pat = if opts[:entire], do: "^#{pattern}$",else: pattern cas = if opts[:case], do: "i", else: "" regex = Regex.compile!(pat, cas) #|> IO.inspect invert = Keyword.get_values(opts, :entire) != [] fn line -> Regex.match?(regex, line) != invert end end defp line_formatter(single_file?, print_nums?) do # f: file name, ix:line num, l: chars of line fn {f, ix, l} -> case {single_file?, print_nums?} do {true, true} -> "#{ix}:#{l}" {false, true} -> "#{f}:#{ix}:#{l}" {true, false} -> "#{l}" {false,false} -> "#{f}:#{l}" end end end defp matching_lines(fnregex, file) do File.stream!(file) |> Enum.map(fn l -> String.trim_trailing(l, "\n") end) |> Enum.with_index(1) |> Enum.map(fn {l, i} -> {file, i, l} end) |> Enum.filter(fn {_, _, l} -> fnregex.(l) end) end def process_files(fnregex, files, _) do files |> Enum.filter(fn x -> !Enum.empty?(matching_lines(fnregex, x)) end) end def process_lines(fnregex, files, outfmt) do files |> Enum.flat_map(fn x -> matching_lines(fnregex, x) end) |> Enum.map(outfmt) end end
30.694444
74
0.561991
9ec6643a7a7a911dc5ce9b7b966eb111e69ec441
19,759
ex
Elixir
lib/gnat.ex
mkaput/nats.ex
5c1ac83087e64fc3021093f1f7a7d494f81ff903
[ "MIT" ]
null
null
null
lib/gnat.ex
mkaput/nats.ex
5c1ac83087e64fc3021093f1f7a7d494f81ff903
[ "MIT" ]
null
null
null
lib/gnat.ex
mkaput/nats.ex
5c1ac83087e64fc3021093f1f7a7d494f81ff903
[ "MIT" ]
null
null
null
# State transitions: # :waiting_for_message => receive PING, send PONG => :waiting_for_message # :waiting_for_message => receive MSG... -> :waiting_for_message defmodule Gnat do use GenServer require Logger alias Gnat.{Command, Parsec} @type t :: GenServer.server() @type headers :: [{binary(), iodata()}] # A message received from NATS will be delivered to your process in this form. # Please note that the `:reply_to` and `:headers` keys are optional. # They will only be present if the message was received from the NATS server with # headers or a reply_to topic @type message :: %{ gnat: t(), topic: String.t(), body: String.t(), sid: non_neg_integer(), reply_to: String.t(), headers: headers() } @type sent_message :: {:msg, message()} @default_connection_settings %{ host: 'localhost', port: 4222, tcp_opts: [:binary], connection_timeout: 3_000, ssl_opts: [], tls: false, inbox_prefix: "_INBOX.", } @request_sid 0 @doc """ Starts a connection to a nats broker ``` {:ok, gnat} = Gnat.start_link(%{host: '127.0.0.1', port: 4222}) # if the server requires TLS you can start a connection with: {:ok, gnat} = Gnat.start_link(%{host: '127.0.0.1', port: 4222, tls: true}) # if the server requires TLS and a client certificate you can start a connection with: {:ok, gnat} = Gnat.start_link(%{tls: true, ssl_opts: [certfile: "client-cert.pem", keyfile: "client-key.pem"]}) # you can customize default "_INBOX." inbox prefix with: {:ok, gnat} = Gnat.start_link(%{host: '127.0.0.1', port: 4222, inbox_prefix: "my_prefix._INBOX."}) ``` You can also pass arbitrary SSL or TCP options in the `tcp_opts` and `ssl_opts` keys. If you pass custom TCP options please include `:binary`. Gnat uses binary matching to parse messages. The final `opts` argument will be passed to the `GenServer.start_link` call so you can pass things like `[name: :gnat_connection]`. """ @spec start_link(map(), keyword()) :: GenServer.on_start def start_link(connection_settings \\ %{}, opts \\ []) do GenServer.start_link(__MODULE__, connection_settings, opts) end @doc """ Gracefully shuts down a connection ``` {:ok, gnat} = Gnat.start_link() :ok = Gnat.stop(gnat) ``` """ @spec stop(t()) :: :ok def stop(pid), do: GenServer.call(pid, :stop) @doc """ Subscribe to a topic Supported options: * queue_group: a string that identifies which queue group you want to join By default each subscriber will receive a copy of every message on the topic. When a queue_group is supplied messages will be spread among the subscribers in the same group. (see [nats queueing](https://nats.io/documentation/concepts/nats-queueing/)) The subscribed process will begin receiving messages with a structure of `t:sent_message/0` ``` {:ok, gnat} = Gnat.start_link() {:ok, subscription} = Gnat.sub(gnat, self(), "topic") receive do {:msg, %{topic: "topic", body: body}} -> IO.puts "Received: \#\{body\}" end ``` """ @spec sub(t(), pid(), String.t, keyword()) :: {:ok, non_neg_integer()} | {:ok, String.t} | {:error, String.t} def sub(pid, subscriber, topic, opts \\ []) do start = :erlang.monotonic_time() result = GenServer.call(pid, {:sub, subscriber, topic, opts}) latency = :erlang.monotonic_time() - start :telemetry.execute([:gnat, :sub], %{latency: latency}, %{topic: topic}) result end @doc """ Publish a message ``` {:ok, gnat} = Gnat.start_link() :ok = Gnat.pub(gnat, "characters", "Ron Swanson") ``` If you want to provide a reply address to receive a response you can pass it as an option. [See request-response pattern](http://nats.io/documentation/concepts/nats-req-rep/). ``` {:ok, gnat} = Gnat.start_link() :ok = Gnat.pub(gnat, "characters", "Star Lord", reply_to: "me") ``` If you want to publish a message with headers you can pass the `:headers` key in the `opts` like this. ``` {:ok, gnat} = Gnat.start_link() :ok = Gnat.pub(gnat, "listen", "Yo", headers: [{"foo", "bar"}]) ``` Headers must be passed as a `t:headers()` value (a list of tuples). Sending and parsing headers has more overhead than typical nats messages (see [the Nats 2.2 release notes for details](https://docs.nats.io/whats_new_22#message-headers)), so only use them when they are really valuable. """ @spec pub(t(), String.t, binary(), keyword()) :: :ok def pub(pid, topic, message, opts \\ []) do start = :erlang.monotonic_time() opts = prepare_headers(opts) result = GenServer.call(pid, {:pub, topic, message, opts}) latency = :erlang.monotonic_time() - start :telemetry.execute([:gnat, :pub], %{latency: latency} , %{topic: topic}) result end @doc """ Send a request and listen for a response synchronously Following the nats [request-response pattern](http://nats.io/documentation/concepts/nats-req-rep/) this function generates a one-time topic to receive replies and then sends a message to the provided topic. Supported options: * receive_timeout: an integer number of milliseconds to wait for a response. Defaults to 60_000 * headers: a set of headers you want to send with the request (see `Gnat.pub/4`) ``` {:ok, gnat} = Gnat.start_link() case Gnat.request(gnat, "i_can_haz_cheezburger", "plZZZZ?!?!?") do {:ok, %{body: delicious_cheezburger}} -> :yum {:error, :timeout} -> :sad_cat end ``` """ @spec request(t(), String.t, binary(), keyword()) :: {:ok, message} | {:error, :timeout} def request(pid, topic, body, opts \\ []) do start = :erlang.monotonic_time() receive_timeout = Keyword.get(opts, :receive_timeout, 60_000) req = %{recipient: self(), body: body, topic: topic} opts = prepare_headers(opts) req = case Keyword.get(opts, :headers) do nil -> req headers -> Map.put(req, :headers, headers) end {:ok, subscription} = GenServer.call(pid, {:request, req}) response = receive_request_response(subscription, receive_timeout) :ok = unsub(pid, subscription) latency = :erlang.monotonic_time() - start :telemetry.execute([:gnat, :request], %{latency: latency}, %{topic: topic}) response end @doc """ Send a request and listen for multiple responses synchronously This function makes it easy to do a scatter-gather operation where you wait for a limited time and optionally a maximum number of replies. Supported options: * receive_timeout: an integer number of milliseconds to wait for responses. Defaults to 60_000 * max_messages: an integer number of messages to listen for. Defaults to -1 meaning unlimited * headers: a set of headers you want to send with the request (see `Gnat.pub/4`) ``` {:ok, gnat} = Gnat.start_link() {:ok, responses} = Gnat.request_multi(gnat, "i_can_haz_fries", "plZZZZZ!?!?", max_messages: 5) Enum.count(responses) #=> 5 ``` """ @spec request_multi(t(), String.t(), binary(), keyword()) :: {:ok, list(message())} def request_multi(pid, topic, body, opts \\ []) do start = :erlang.monotonic_time() receive_timeout_ms = Keyword.get(opts, :receive_timeout, 60_000) expiration = System.monotonic_time(:millisecond) + receive_timeout_ms max_messages = Keyword.get(opts, :max_messages, -1) req = %{recipient: self(), body: body, topic: topic} opts = prepare_headers(opts) req = case Keyword.get(opts, :headers) do nil -> req headers -> Map.put(req, :headers, headers) end {:ok, subscription} = GenServer.call(pid, {:request, req}) responses = receive_multi_request_responses(subscription, expiration, max_messages) :ok = unsub(pid, subscription) latency = :erlang.monotonic_time() - start :telemetry.execute([:gnat, :request_multi], %{latency: latency}, %{topic: topic}) {:ok, responses} end @doc """ Unsubscribe from a topic Supported options: * max_messages: number of messages to be received before automatically unsubscribed This correlates to the [UNSUB](http://nats.io/documentation/internals/nats-protocol/#UNSUB) command in the nats protocol. By default the unsubscribe is affected immediately, but an optional `max_messages` value can be provided which will allow `max_messages` to be received before affecting the unsubscribe. This is especially useful for [request response](http://nats.io/documentation/concepts/nats-req-rep/) patterns. ``` {:ok, gnat} = Gnat.start_link() {:ok, subscription} = Gnat.sub(gnat, self(), "my_inbox") :ok = Gnat.unsub(gnat, subscription) # OR :ok = Gnat.unsub(gnat, subscription, max_messages: 2) ``` """ @spec unsub(t(), non_neg_integer() | String.t, keyword()) :: :ok def unsub(pid, sid, opts \\ []) do start = :erlang.monotonic_time() result = GenServer.call(pid, {:unsub, sid, opts}) :telemetry.execute([:gnat, :unsub], %{latency: :erlang.monotonic_time() - start}) result end @doc """ Ping the NATS server This correlates to the [PING](http://nats.io/documentation/internals/nats-protocol/#PINGPONG) command in the NATS protocol. If the NATS server responds with a PONG message this function will return `:ok` ``` {:ok, gnat} = Gnat.start_link() :ok = Gnat.ping(gnat) ``` """ @deprecated "Pinging is handled internally by the connection, this functionality will be removed" def ping(pid) do GenServer.call(pid, {:ping, self()}) receive do :pong -> :ok after 3_000 -> {:error, "No PONG response after 3 sec"} end end @doc "get the number of active subscriptions" @spec active_subscriptions(t()) :: {:ok, non_neg_integer()} def active_subscriptions(pid) do GenServer.call(pid, :active_subscriptions) end @impl GenServer def init(connection_settings) do connection_settings = Map.merge(@default_connection_settings, connection_settings) case Gnat.Handshake.connect(connection_settings) do {:ok, socket} -> parser = Parsec.new request_inbox_prefix = Map.fetch!(connection_settings, :inbox_prefix) <> "#{nuid()}." state = %{socket: socket, connection_settings: connection_settings, next_sid: 1, receivers: %{}, parser: parser, request_receivers: %{}, request_inbox_prefix: request_inbox_prefix} state = create_request_subscription(state) {:ok, state} {:error, reason} -> {:stop, reason} end end @impl GenServer def handle_info({:tcp, socket, data}, %{socket: socket}=state) do data_packets = receive_additional_tcp_data(socket, [data], 10) new_state = Enum.reduce(data_packets, state, fn(data, %{parser: parser}=state) -> {new_parser, messages} = Parsec.parse(parser, data) new_state = %{state | parser: new_parser} Enum.reduce(messages, new_state, &process_message/2) end) {:noreply, new_state} end def handle_info({:ssl, socket, data}, state) do handle_info({:tcp, socket, data}, state) end def handle_info({:tcp_closed, _}, state) do {:stop, "connection closed", state} end def handle_info({:ssl_closed, _}, state) do {:stop, "connection closed", state} end def handle_info({:tcp_error, _, reason}, state) do {:stop, "tcp transport error #{inspect(reason)}", state} end def handle_info(other, state) do Logger.error "#{__MODULE__} received unexpected message: #{inspect other}" {:noreply, state} end @impl GenServer def handle_call(:stop, _from, state) do socket_close(state) {:stop, :normal, :ok, state} end def handle_call({:sub, receiver, topic, opts}, _from, %{next_sid: sid}=state) do sub = Command.build(:sub, topic, sid, opts) :ok = socket_write(state, sub) next_state = add_subscription_to_state(state, sid, receiver) |> Map.put(:next_sid, sid + 1) {:reply, {:ok, sid}, next_state} end def handle_call({:pub, topic, message, opts}, from, state) do commands = [Command.build(:pub, topic, message, opts)] froms = [from] {commands, froms} = receive_additional_pubs(commands, froms, 10) :ok = socket_write(state, commands) Enum.each(froms, fn(from) -> GenServer.reply(from, :ok) end) {:noreply, state} end def handle_call({:request, request}, _from, state) do inbox = make_new_inbox(state) new_state = %{state | request_receivers: Map.put(state.request_receivers, inbox, request.recipient)} pub = case request do %{headers: headers} -> Command.build(:pub, request.topic, request.body, headers: headers, reply_to: inbox) _ -> Command.build(:pub, request.topic, request.body, reply_to: inbox) end :ok = socket_write(new_state, [pub]) {:reply, {:ok, inbox}, new_state} end # When the SID is a string, it's a topic, which is used as a key in the request receiver map. def handle_call({:unsub, topic, _opts}, _from, state) when is_binary(topic) do if Map.has_key?(state.request_receivers, topic) do request_receivers = Map.delete(state.request_receivers, topic) new_state = %{state | request_receivers: request_receivers} {:reply, :ok, new_state} else {:reply, :ok, state} end end def handle_call({:unsub, sid, opts}, _from, %{receivers: receivers}=state) do case Map.has_key?(receivers, sid) do false -> {:reply, :ok, state} true -> command = Command.build(:unsub, sid, opts) :ok = socket_write(state, command) state = cleanup_subscription_from_state(state, sid, opts) {:reply, :ok, state} end end def handle_call({:ping, pinger}, _from, state) do :ok = socket_write(state, "PING\r\n") {:reply, :ok, Map.put(state, :pinger, pinger)} end def handle_call(:active_subscriptions, _from, state) do active_subscriptions = Enum.count(state.receivers) {:reply, {:ok, active_subscriptions}, state} end defp create_request_subscription(%{request_inbox_prefix: request_inbox_prefix}=state) do # Example: "_INBOX.Jhf7AcTGP3x4dAV9.*" wildcard_inbox_topic = request_inbox_prefix <> "*" sub = Command.build(:sub, wildcard_inbox_topic, @request_sid, []) :ok = socket_write(state, [sub]) add_subscription_to_state(state, @request_sid, self()) end defp make_new_inbox(%{request_inbox_prefix: prefix}), do: prefix <> nuid() defp nuid(), do: :crypto.strong_rand_bytes(12) |> Base.encode64 defp prepare_headers(opts) do if Keyword.has_key?(opts, :headers) do headers = :cow_http.headers(Keyword.get(opts, :headers)) Keyword.put(opts, :headers, headers) else opts end end defp socket_close(%{socket: socket, connection_settings: %{tls: true}}), do: :ssl.close(socket) defp socket_close(%{socket: socket}), do: :gen_tcp.close(socket) defp socket_write(%{socket: socket, connection_settings: %{tls: true}}, iodata) do :ssl.send(socket, iodata) end defp socket_write(%{socket: socket}, iodata), do: :gen_tcp.send(socket, iodata) defp add_subscription_to_state(%{receivers: receivers}=state, sid, pid) do receivers = Map.put(receivers, sid, %{recipient: pid, unsub_after: :infinity}) %{state | receivers: receivers} end defp cleanup_subscription_from_state(%{receivers: receivers}=state, sid, []) do receivers = Map.delete(receivers, sid) %{state | receivers: receivers} end defp cleanup_subscription_from_state(%{receivers: receivers}=state, sid, [max_messages: n]) do receivers = put_in(receivers, [sid, :unsub_after], n) %{state | receivers: receivers} end defp process_message({:msg, topic, @request_sid, reply_to, body}, state) do if Map.has_key?(state.request_receivers, topic) do send state.request_receivers[topic], {:msg, %{topic: topic, body: body, reply_to: reply_to, gnat: self()}} state else Logger.error "#{__MODULE__} got a response for a request, but that is no longer registered" state end end defp process_message({:msg, topic, sid, reply_to, body}, state) do unless is_nil(state.receivers[sid]) do :telemetry.execute([:gnat, :message_received], %{count: 1}, %{topic: topic}) send state.receivers[sid].recipient, {:msg, %{topic: topic, body: body, reply_to: reply_to, sid: sid, gnat: self()}} update_subscriptions_after_delivering_message(state, sid) else Logger.error "#{__MODULE__} got message for sid #{sid}, but that is no longer registered" state end end defp process_message({:hmsg, topic, @request_sid, reply_to, headers, body}, state) do if Map.has_key?(state.request_receivers, topic) do send state.request_receivers[topic], {:msg, %{topic: topic, body: body, reply_to: reply_to, gnat: self(), headers: headers}} state else Logger.error "#{__MODULE__} got a response for a request, but that is no longer registered" state end end defp process_message({:hmsg, topic, sid, reply_to, headers, body}, state) do unless is_nil(state.receivers[sid]) do :telemetry.execute([:gnat, :message_received], %{count: 1}, %{topic: topic}) send state.receivers[sid].recipient, {:msg, %{topic: topic, body: body, reply_to: reply_to, sid: sid, gnat: self(), headers: headers}} update_subscriptions_after_delivering_message(state, sid) else Logger.error "#{__MODULE__} got message for sid #{sid}, but that is no longer registered" state end end defp process_message(:ping, state) do socket_write(state, "PONG\r\n") state end defp process_message(:pong, state) do send state.pinger, :pong state end defp process_message({:error, message}, state) do :error_logger.error_report([ type: :gnat_error_from_broker, message: message, ]) state end defp receive_additional_pubs(commands, froms, 0), do: {commands, froms} defp receive_additional_pubs(commands, froms, how_many_more) do receive do {:"$gen_call", from, {:pub, topic, message, opts}} -> commands = [Command.build(:pub, topic, message, opts) | commands] froms = [from | froms] receive_additional_pubs(commands, froms, how_many_more - 1) after 0 -> {commands, froms} end end defp receive_additional_tcp_data(_socket, packets, 0), do: Enum.reverse(packets) defp receive_additional_tcp_data(socket, packets, n) do receive do {:tcp, ^socket, data} -> receive_additional_tcp_data(socket, [data | packets], n - 1) after 0 -> Enum.reverse(packets) end end defp update_subscriptions_after_delivering_message(%{receivers: receivers}=state, sid) do receivers = case get_in(receivers, [sid, :unsub_after]) do :infinity -> receivers 1 -> Map.delete(receivers, sid) n -> put_in(receivers, [sid, :unsub_after], n - 1) end %{state | receivers: receivers} end defp receive_multi_request_responses(_sub, _exp, 0), do: [] defp receive_multi_request_responses(subscription, expiration, max_messages) do timeout = expiration - :erlang.monotonic_time(:millisecond) cond do timeout < 1 -> [] true -> case receive_request_response(subscription, timeout) do {:error, :timeout} -> [] {:ok, msg} -> [msg | receive_multi_request_responses(subscription, expiration, max_messages - 1)] end end end defp receive_request_response(subscription, timeout) do receive do {:msg, %{topic: ^subscription}=msg} -> {:ok, msg} after timeout -> {:error, :timeout} end end end
36.795158
140
0.665115
9ec6692cd8123ff527829af1bfcb621ff86c253c
2,739
ex
Elixir
lib/web/controllers/admin/shop_item_controller.ex
stevegrossi/ex_venture
e02d5a63fdb882d92cfb4af3e15f7b48ad7054aa
[ "MIT" ]
2
2019-05-14T11:36:44.000Z
2020-07-01T08:54:04.000Z
lib/web/controllers/admin/shop_item_controller.ex
nickwalton/ex_venture
d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb
[ "MIT" ]
null
null
null
lib/web/controllers/admin/shop_item_controller.ex
nickwalton/ex_venture
d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb
[ "MIT" ]
1
2021-01-29T14:12:40.000Z
2021-01-29T14:12:40.000Z
defmodule Web.Admin.ShopItemController do use Web.AdminController alias Web.Item alias Web.Room alias Web.Shop def new(conn, %{"shop_id" => shop_id}) do shop = Shop.get(shop_id) room = Room.get(shop.room_id) changeset = Shop.new_item(shop) conn |> assign(:items, Item.all()) |> assign(:shop, shop) |> assign(:room, room) |> assign(:changeset, changeset) |> render("new.html") end def create(conn, %{"shop_id" => shop_id, "item" => %{"id" => item_id}, "shop_item" => params}) do shop = Shop.get(shop_id) item = Item.get(item_id) case Shop.add_item(shop, item, params) do {:ok, shop_item} -> conn |> put_flash(:info, "Item added to #{shop.name}!") |> redirect(to: shop_path(conn, :show, shop_item.shop_id)) {:error, changeset} -> room = Room.get(shop.room_id) conn |> put_flash(:error, "There was an issue adding the item to the shop. Please try again.") |> assign(:items, Item.all()) |> assign(:shop, shop) |> assign(:room, room) |> assign(:changeset, changeset) |> render("new.html") end end def edit(conn, %{"id" => id}) do shop_item = Shop.get_item(id) shop = Shop.get(shop_item.shop_id) room = Room.get(shop.room_id) changeset = Shop.edit_item(shop_item) conn |> assign(:items, Item.all()) |> assign(:shop_item, shop_item) |> assign(:shop, shop) |> assign(:room, room) |> assign(:changeset, changeset) |> render("edit.html") end def update(conn, %{"id" => id, "shop_item" => params}) do case Shop.update_item(id, params) do {:ok, shop_item} -> conn |> put_flash(:info, "Item updated!") |> redirect(to: shop_path(conn, :show, shop_item.shop_id)) {:error, changeset} -> shop_item = Shop.get_item(id) shop = Shop.get(shop_item.shop_id) room = Room.get(shop.room_id) conn |> put_flash(:error, "There was an issue updating the item. Please try again.") |> assign(:items, Item.all()) |> assign(:shop_item, shop_item) |> assign(:shop, shop) |> assign(:room, room) |> assign(:changeset, changeset) |> render("edit.html") end end def delete(conn, %{"id" => id}) do case Shop.delete_item(id) do {:ok, shop_item} -> conn |> put_flash(:info, "Item deleted!") |> redirect(to: shop_path(conn, :show, shop_item.shop_id)) _ -> conn |> put_flash( :error, "There was an issue deleting the item from the shop. Please try again." ) |> redirect(to: dashboard_path(conn, :index)) end end end
27.666667
99
0.571742
9ec66fbba594645dbdc2d659895e16eb5d4f3d2f
1,102
exs
Elixir
day_23/solution.exs
polarfoxgirl/advent-of-code-2020
549794d5aefca5d413b97a29244b228b470383bb
[ "Unlicense" ]
null
null
null
day_23/solution.exs
polarfoxgirl/advent-of-code-2020
549794d5aefca5d413b97a29244b228b470383bb
[ "Unlicense" ]
null
null
null
day_23/solution.exs
polarfoxgirl/advent-of-code-2020
549794d5aefca5d413b97a29244b228b470383bb
[ "Unlicense" ]
null
null
null
{:ok, input} = File.read("test_input") defmodule CrabCups do defp find_insert_after_candidates(current, max_value) do cond do current > 4 -> (current-1)..(current-4) current > 1 -> Enum.concat((current - 1)..1, max_value..(max_value - 4 + current)) current == 1 -> max_value..(max_value - 3) end end defp do_round(cups) do [current, r1, r2, r3 | tail] = cups insert_after = find_insert_after_candidates(current, 9) |> Enum.find(fn x -> x not in [r1, r2, r3] end) insert_pos = Enum.find_index(tail, fn x -> x == insert_value end) {tail1, tail2} = Enum.split(tail, insert_pos + 1) tail1 ++ [r1, r2, r3] ++ tail2 ++ [current] end def solve(input) do cups = input |> String.graphemes() |> Enum.map(&String.to_integer/1) result_cups = Enum.reduce(1..100, cups, fn _, acc -> do_round(acc) end) i_1 = Enum.find_index(result_cups, fn x -> x == 1 end) {tail, [1 | head]} = Enum.split(result_cups, i_1) result = head ++ tail IO.puts("Result: #{Enum.join(result)}") end end CrabCups.solve(input)
27.55
75
0.611615
9ec68099c7ed41ead1a7e64756a5c1b8b8f874fe
65
ex
Elixir
lib/servantex_web/views/page_view.ex
alisinabh/servantex
03bd4b7c1c47fc926a37b52670394f827c2d97ad
[ "Apache-2.0" ]
null
null
null
lib/servantex_web/views/page_view.ex
alisinabh/servantex
03bd4b7c1c47fc926a37b52670394f827c2d97ad
[ "Apache-2.0" ]
null
null
null
lib/servantex_web/views/page_view.ex
alisinabh/servantex
03bd4b7c1c47fc926a37b52670394f827c2d97ad
[ "Apache-2.0" ]
null
null
null
defmodule ServantexWeb.PageView do use ServantexWeb, :view end
16.25
34
0.815385
9ec6b114c0ff118eb5f6426b7ba7d4b43d3681e0
299
ex
Elixir
lib/quebo.ex
mlikoga/code-beer-6
249bb3c46b8d09461f3e7c28d5a88a9314c53658
[ "MIT" ]
null
null
null
lib/quebo.ex
mlikoga/code-beer-6
249bb3c46b8d09461f3e7c28d5a88a9314c53658
[ "MIT" ]
null
null
null
lib/quebo.ex
mlikoga/code-beer-6
249bb3c46b8d09461f3e7c28d5a88a9314c53658
[ "MIT" ]
null
null
null
defmodule Quebo do def shorten(url) do if !url do raise "errrrrrouu!" end "que.bo/" <> Base.encode64(url) end def decode(short_url) do { :ok, response } = short_url |> String.split("que.bo/") |> List.last() |> Base.decode64() response end end
14.95
35
0.561873
9ec6cd356ab5eb0939485545f5cd7fe70ab787f4
2,935
exs
Elixir
test/integration/crontab_test.exs
wtfleming/oban
1ffc4d1f25bb5dfb737b9e19e54ab0324e189ea5
[ "Apache-2.0" ]
null
null
null
test/integration/crontab_test.exs
wtfleming/oban
1ffc4d1f25bb5dfb737b9e19e54ab0324e189ea5
[ "Apache-2.0" ]
null
null
null
test/integration/crontab_test.exs
wtfleming/oban
1ffc4d1f25bb5dfb737b9e19e54ab0324e189ea5
[ "Apache-2.0" ]
null
null
null
defmodule Oban.Integration.CrontabTest do use Oban.Case import Ecto.Query, only: [select: 2] @moduletag :integration test "cron jobs are enqueued on startup" do oban_opts = [ repo: Repo, queues: [default: 5], crontab: [ {"* * * * *", Worker, args: worker_args(1)}, {"59 23 31 12 0", Worker, args: worker_args(2)}, {"* * * * *", Worker, args: worker_args(3)}, {"* * * * *", Worker, args: worker_args(4), queue: "alpha"} ] ] start_supervised!({Oban, oban_opts}) assert_receive {:ok, 1} refute_receive {:ok, 2} assert_receive {:ok, 3} refute_receive {:ok, 4} :ok = stop_supervised(Oban) end test "cron jobs are not enqueued twice within the same minute" do oban_opts = [ repo: Repo, queues: [default: 5], crontab: [{"* * * * *", Worker, args: worker_args(1)}] ] start_supervised!({Oban, oban_opts}) assert_receive {:ok, 1} :ok = stop_supervised(Oban) start_supervised!({Oban, oban_opts}) refute_receive {:ok, 1} :ok = stop_supervised(Oban) end test "cron jobs are only enqueued once between nodes" do base_opts = [ repo: Repo, queues: [default: 5], crontab: [{"* * * * *", Worker, args: worker_args(1)}] ] start_supervised!({Oban, Keyword.put(base_opts, :name, ObanA)}, id: ObanA) start_supervised!({Oban, Keyword.put(base_opts, :name, ObanB)}, id: ObanB) assert_receive {:ok, 1} assert 1 == Job |> select(count()) |> Repo.one() :ok = stop_supervised(ObanA) :ok = stop_supervised(ObanB) end test "cron jobs are scheduled within a prefix" do base_opts = [ repo: Repo, queues: [default: 5], crontab: [{"* * * * *", Worker, args: worker_args(1)}] ] start_supervised!({Oban, base_opts ++ [name: ObanA, prefix: "public"]}, id: ObanA) start_supervised!({Oban, base_opts ++ [name: ObanB, prefix: "private"]}, id: ObanB) assert_receive {:ok, 1} assert_receive {:ok, 1} assert 1 == Job |> select(count()) |> Repo.one(prefix: "public") assert 1 == Job |> select(count()) |> Repo.one(prefix: "private") :ok = stop_supervised(ObanA) :ok = stop_supervised(ObanB) end test "cron jobs are scheduled using the configured timezone" do {:ok, %DateTime{hour: chi_hour}} = DateTime.now("America/Chicago") {:ok, %DateTime{hour: utc_hour}} = DateTime.now("Etc/UTC") oban_opts = [ repo: Repo, queues: [default: 5], timezone: "America/Chicago", crontab: [ {"* #{chi_hour} * * *", Worker, args: worker_args(1)}, {"* #{utc_hour} * * *", Worker, args: worker_args(2)} ] ] start_supervised!({Oban, oban_opts}) assert_receive {:ok, 1} refute_receive {:ok, 2} :ok = stop_supervised(Oban) end defp worker_args(ref) do %{ref: ref, action: "OK", bin_pid: Worker.pid_to_bin()} end end
25.745614
87
0.590801
9ec7165fb201d6c94509cf8a19c33dcbcf51f520
629
exs
Elixir
test/chess_app/web/views/error_view_test.exs
leobessa/exchess
289819d183f3001dddf56810c36298fa669c3a06
[ "MIT" ]
3
2017-06-02T20:47:07.000Z
2018-05-25T11:17:12.000Z
test/chess_app/web/views/error_view_test.exs
leobessa/exchess
289819d183f3001dddf56810c36298fa669c3a06
[ "MIT" ]
null
null
null
test/chess_app/web/views/error_view_test.exs
leobessa/exchess
289819d183f3001dddf56810c36298fa669c3a06
[ "MIT" ]
null
null
null
defmodule ChessApp.Web.ErrorViewTest do use ChessApp.Web.ConnCase, async: true # Bring render/3 and render_to_string/3 for testing custom views import Phoenix.View test "renders 404.json" do assert render(ChessApp.Web.ErrorView, "404.json", []) == %{errors: %{detail: "Page not found"}} end test "render 500.json" do assert render(ChessApp.Web.ErrorView, "500.json", []) == %{errors: %{detail: "Internal server error"}} end test "render any other" do assert render(ChessApp.Web.ErrorView, "505.json", []) == %{errors: %{detail: "Internal server error"}} end end
28.590909
66
0.651828
9ec71a6c4dff3972512e07d74e020041b8bbe9f1
20,605
ex
Elixir
lib/elixir/lib/calendar/time.ex
ReefTrip/elixir
43a4dca94ab605a0a5795d43e43e5671f4a49071
[ "Apache-2.0" ]
1
2018-11-12T22:34:19.000Z
2018-11-12T22:34:19.000Z
lib/elixir/lib/calendar/time.ex
ReefTrip/elixir
43a4dca94ab605a0a5795d43e43e5671f4a49071
[ "Apache-2.0" ]
null
null
null
lib/elixir/lib/calendar/time.ex
ReefTrip/elixir
43a4dca94ab605a0a5795d43e43e5671f4a49071
[ "Apache-2.0" ]
null
null
null
defmodule Time do @moduledoc """ A Time struct and functions. The Time struct contains the fields hour, minute, second and microseconds. New times can be built with the `new/4` function or using the `~T` (see `Kernel.sigil_T/2`) sigil: iex> ~T[23:00:07.001] ~T[23:00:07.001] Both `new/4` and sigil return a struct where the time fields can be accessed directly: iex> time = ~T[23:00:07.001] iex> time.hour 23 iex> time.microsecond {1000, 3} The functions on this module work with the `Time` struct as well as any struct that contains the same fields as the `Time` struct, such as `NaiveDateTime` and `DateTime`. Such functions expect `t:Calendar.time/0` in their typespecs (instead of `t:t/0`). Developers should avoid creating the Time structs directly and instead rely on the functions provided by this module as well as the ones in third-party calendar libraries. ## Comparing times Comparisons in Elixir using `==/2`, `>/2`, `</2` and similar are structural and based on the `Time` struct fields. For proper comparison between times, use the `compare/2` function. """ @enforce_keys [:hour, :minute, :second] defstruct [:hour, :minute, :second, microsecond: {0, 0}, calendar: Calendar.ISO] @type t :: %__MODULE__{ hour: Calendar.hour(), minute: Calendar.minute(), second: Calendar.second(), microsecond: Calendar.microsecond(), calendar: Calendar.calendar() } @parts_per_day 86_400_000_000 @doc """ Returns the current time in UTC. ## Examples iex> time = Time.utc_now() iex> time.hour >= 0 true """ @doc since: "1.4.0" @spec utc_now(Calendar.calendar()) :: t def utc_now(calendar \\ Calendar.ISO) do {:ok, _, time, microsecond} = Calendar.ISO.from_unix(:os.system_time(), :native) {hour, minute, second} = time iso_time = %Time{ hour: hour, minute: minute, second: second, microsecond: microsecond, calendar: Calendar.ISO } convert!(iso_time, calendar) end @doc """ Builds a new time. Expects all values to be integers. Returns `{:ok, time}` if each entry fits its appropriate range, returns `{:error, reason}` otherwise. Note a time may have 60 seconds in case of leap seconds. Microseconds can also be given with a precision, which must be an integer between 0 and 6. ## Examples iex> Time.new(0, 0, 0, 0) {:ok, ~T[00:00:00.000000]} iex> Time.new(23, 59, 59, 999_999) {:ok, ~T[23:59:59.999999]} iex> Time.new(23, 59, 60, 999_999) {:ok, ~T[23:59:60.999999]} # Time with microseconds and their precision iex> Time.new(23, 59, 60, {10_000, 2}) {:ok, ~T[23:59:60.01]} iex> Time.new(24, 59, 59, 999_999) {:error, :invalid_time} iex> Time.new(23, 60, 59, 999_999) {:error, :invalid_time} iex> Time.new(23, 59, 61, 999_999) {:error, :invalid_time} iex> Time.new(23, 59, 59, 1_000_000) {:error, :invalid_time} # Invalid precision Time.new(23, 59, 59, {999_999, 10}) {:error, :invalid_time} """ @spec new( Calendar.hour(), Calendar.minute(), Calendar.second(), Calendar.microsecond() | integer, Calendar.calendar() ) :: {:ok, t} | {:error, atom} def new(hour, minute, second, microsecond \\ {0, 0}, calendar \\ Calendar.ISO) def new(hour, minute, second, microsecond, calendar) when is_integer(microsecond) do new(hour, minute, second, {microsecond, 6}, calendar) end def new(hour, minute, second, {microsecond, precision}, calendar) when is_integer(hour) and is_integer(minute) and is_integer(second) and is_integer(microsecond) and is_integer(precision) do case calendar.valid_time?(hour, minute, second, {microsecond, precision}) do true -> time = %Time{ hour: hour, minute: minute, second: second, microsecond: {microsecond, precision}, calendar: calendar } {:ok, time} false -> {:error, :invalid_time} end end @doc """ Converts the given `time` to a string. ### Examples iex> Time.to_string(~T[23:00:00]) "23:00:00" iex> Time.to_string(~T[23:00:00.001]) "23:00:00.001" iex> Time.to_string(~T[23:00:00.123456]) "23:00:00.123456" iex> Time.to_string(~N[2015-01-01 23:00:00.001]) "23:00:00.001" iex> Time.to_string(~N[2015-01-01 23:00:00.123456]) "23:00:00.123456" """ @spec to_string(Calendar.time()) :: String.t() def to_string(time) def to_string(%{ hour: hour, minute: minute, second: second, microsecond: microsecond, calendar: calendar }) do calendar.time_to_string(hour, minute, second, microsecond) end @doc """ Parses the extended "Local time" format described by [ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601). Time zone offset may be included in the string but they will be simply discarded as such information is not included in times. As specified in the standard, the separator "T" may be omitted if desired as there is no ambiguity within this function. Time representations with reduced accuracy are not supported. Note that while ISO 8601 allows times to specify 24:00:00 as the zero hour of the next day, this notation is not supported by Elixir. ## Examples iex> Time.from_iso8601("23:50:07") {:ok, ~T[23:50:07]} iex> Time.from_iso8601("23:50:07Z") {:ok, ~T[23:50:07]} iex> Time.from_iso8601("T23:50:07Z") {:ok, ~T[23:50:07]} iex> Time.from_iso8601("23:50:07,0123456") {:ok, ~T[23:50:07.012345]} iex> Time.from_iso8601("23:50:07.0123456") {:ok, ~T[23:50:07.012345]} iex> Time.from_iso8601("23:50:07.123Z") {:ok, ~T[23:50:07.123]} iex> Time.from_iso8601("2015:01:23 23-50-07") {:error, :invalid_format} iex> Time.from_iso8601("23:50:07A") {:error, :invalid_format} iex> Time.from_iso8601("23:50:07.") {:error, :invalid_format} iex> Time.from_iso8601("23:50:61") {:error, :invalid_time} """ @spec from_iso8601(String.t(), Calendar.calendar()) :: {:ok, t} | {:error, atom} def from_iso8601(string, calendar \\ Calendar.ISO) def from_iso8601(<<?T, rest::binary>>, calendar) do raw_from_iso8601(rest, calendar) end def from_iso8601(<<rest::binary>>, calendar) do raw_from_iso8601(rest, calendar) end [match_time, guard_time, read_time] = Calendar.ISO.__match_time__() defp raw_from_iso8601(string, calendar) do with <<unquote(match_time), rest::binary>> <- string, true <- unquote(guard_time), {microsec, rest} <- Calendar.ISO.parse_microsecond(rest), {_offset, ""} <- Calendar.ISO.parse_offset(rest) do {hour, min, sec} = unquote(read_time) with {:ok, utc_time} <- new(hour, min, sec, microsec, Calendar.ISO) do convert(utc_time, calendar) end else _ -> {:error, :invalid_format} end end @doc """ Parses the extended "Local time" format described by [ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601). Raises if the format is invalid. ## Examples iex> Time.from_iso8601!("23:50:07,123Z") ~T[23:50:07.123] iex> Time.from_iso8601!("23:50:07.123Z") ~T[23:50:07.123] iex> Time.from_iso8601!("2015:01:23 23-50-07") ** (ArgumentError) cannot parse "2015:01:23 23-50-07" as time, reason: :invalid_format """ @spec from_iso8601!(String.t(), Calendar.calendar()) :: t def from_iso8601!(string, calendar \\ Calendar.ISO) do case from_iso8601(string, calendar) do {:ok, value} -> value {:error, reason} -> raise ArgumentError, "cannot parse #{inspect(string)} as time, reason: #{inspect(reason)}" end end @doc """ Converts the given time to [ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601). By default, `Time.to_iso8601/2` returns times formatted in the "extended" format, for human readability. It also supports the "basic" format through passing the `:basic` option. ### Examples iex> Time.to_iso8601(~T[23:00:13]) "23:00:13" iex> Time.to_iso8601(~T[23:00:13.001]) "23:00:13.001" iex> Time.to_iso8601(~T[23:00:13.001], :basic) "230013.001" iex> Time.to_iso8601(~N[2010-04-17 23:00:13]) "23:00:13" """ @spec to_iso8601(Calendar.time(), :extended | :basic) :: String.t() def to_iso8601(time, format \\ :extended) def to_iso8601(%{calendar: Calendar.ISO} = time, format) when format in [:extended, :basic] do %{ hour: hour, minute: minute, second: second, microsecond: microsecond } = time Calendar.ISO.time_to_iso8601(hour, minute, second, microsecond, format) end def to_iso8601(%{calendar: _} = time, format) when format in [:extended, :basic] do time |> convert!(Calendar.ISO) |> to_iso8601(format) end @doc """ Converts given `time` to an Erlang time tuple. WARNING: Loss of precision may occur, as Erlang time tuples only contain hours/minutes/seconds. ## Examples iex> Time.to_erl(~T[23:30:15.999]) {23, 30, 15} iex> Time.to_erl(~N[2010-04-17 23:30:15.999]) {23, 30, 15} """ @spec to_erl(Calendar.time()) :: :calendar.time() def to_erl(time) do %{hour: hour, minute: minute, second: second} = convert!(time, Calendar.ISO) {hour, minute, second} end @doc """ Converts an Erlang time tuple to a `Time` struct. ## Examples iex> Time.from_erl({23, 30, 15}, {5000, 3}) {:ok, ~T[23:30:15.005]} iex> Time.from_erl({24, 30, 15}) {:error, :invalid_time} """ @spec from_erl(:calendar.time(), Calendar.microsecond(), Calendar.calendar()) :: {:ok, t} | {:error, atom} def from_erl(tuple, microsecond \\ {0, 0}, calendar \\ Calendar.ISO) def from_erl({hour, minute, second}, microsecond, calendar) do with {:ok, time} <- new(hour, minute, second, microsecond, Calendar.ISO), do: convert(time, calendar) end @doc """ Converts an Erlang time tuple to a `Time` struct. ## Examples iex> Time.from_erl!({23, 30, 15}) ~T[23:30:15] iex> Time.from_erl!({23, 30, 15}, {5000, 3}) ~T[23:30:15.005] iex> Time.from_erl!({24, 30, 15}) ** (ArgumentError) cannot convert {24, 30, 15} to time, reason: :invalid_time """ @spec from_erl!(:calendar.time(), Calendar.microsecond(), Calendar.calendar()) :: t def from_erl!(tuple, microsecond \\ {0, 0}, calendar \\ Calendar.ISO) do case from_erl(tuple, microsecond, calendar) do {:ok, value} -> value {:error, reason} -> raise ArgumentError, "cannot convert #{inspect(tuple)} to time, reason: #{inspect(reason)}" end end @doc """ Adds the `number` of `unit`s to the given `time`. This function accepts the `number` measured according to `Calendar.ISO`. The time is returned in the same calendar as it was given in. Note the result value represents the time of day, meaning that it is cyclic, for instance, it will never go over 24 hours for the ISO calendar. ## Examples iex> Time.add(~T[10:00:00], 27000) ~T[17:30:00.000000] iex> Time.add(~T[11:00:00.005], 2400) ~T[11:40:00.005000] iex> Time.add(~T[00:00:00], 86_399_999, :millisecond) ~T[23:59:59.999000] iex> Time.add(~T[17:10:05], 86400) ~T[17:10:05.000000] iex> Time.add(~T[23:00:00], -60) ~T[22:59:00.000000] """ @doc since: "1.6.0" @spec add(Calendar.time(), integer, System.time_unit()) :: t def add(%{calendar: calendar} = time, number, unit \\ :second) when is_integer(number) do number = System.convert_time_unit(number, unit, :microsecond) total = time_to_microseconds(time) + number parts = Integer.mod(total, @parts_per_day) {hour, minute, second, microsecond} = calendar.time_from_day_fraction({parts, @parts_per_day}) %Time{ hour: hour, minute: minute, second: second, microsecond: microsecond, calendar: calendar } end defp time_to_microseconds(%{ calendar: Calendar.ISO, hour: 0, minute: 0, second: 0, microsecond: {0, _} }) do 0 end defp time_to_microseconds(time) do iso_days = {0, to_day_fraction(time)} Calendar.ISO.iso_days_to_unit(iso_days, :microsecond) end @doc """ Compares two time structs. Returns `:gt` if first time is later than the second and `:lt` for vice versa. If the two times are equal `:eq` is returned. ## Examples iex> Time.compare(~T[16:04:16], ~T[16:04:28]) :lt iex> Time.compare(~T[16:04:16], ~T[16:04:16]) :eq iex> Time.compare(~T[16:04:16.01], ~T[16:04:16.001]) :gt This function can also be used to compare across more complex calendar types by considering only the time fields: iex> Time.compare(~N[1900-01-01 16:04:16], ~N[2015-01-01 16:04:16]) :eq iex> Time.compare(~N[2015-01-01 16:04:16], ~N[2015-01-01 16:04:28]) :lt iex> Time.compare(~N[2015-01-01 16:04:16.01], ~N[2000-01-01 16:04:16.001]) :gt """ @doc since: "1.4.0" @spec compare(Calendar.time(), Calendar.time()) :: :lt | :eq | :gt def compare(%{calendar: calendar} = time1, %{calendar: calendar} = time2) do %{hour: hour1, minute: minute1, second: second1, microsecond: {microsecond1, _}} = time1 %{hour: hour2, minute: minute2, second: second2, microsecond: {microsecond2, _}} = time2 case {{hour1, minute1, second1, microsecond1}, {hour2, minute2, second2, microsecond2}} do {first, second} when first > second -> :gt {first, second} when first < second -> :lt _ -> :eq end end def compare(time1, time2) do {parts1, ppd1} = to_day_fraction(time1) {parts2, ppd2} = to_day_fraction(time2) case {parts1 * ppd2, parts2 * ppd1} do {first, second} when first > second -> :gt {first, second} when first < second -> :lt _ -> :eq end end @doc """ Converts given `time` to a different calendar. Returns `{:ok, time}` if the conversion was successful, or `{:error, reason}` if it was not, for some reason. ## Examples Imagine someone implements `Calendar.Holocene`, a calendar based on the Gregorian calendar that adds exactly 10,000 years to the current Gregorian year: iex> Time.convert(~T[13:30:15], Calendar.Holocene) {:ok, %Time{calendar: Calendar.Holocene, hour: 13, minute: 30, second: 15, microsecond: {0, 0}}} """ @doc since: "1.5.0" @spec convert(Calendar.time(), Calendar.calendar()) :: {:ok, t} | {:error, atom} # Keep it multiline for proper function clause errors. def convert( %{ calendar: calendar, hour: hour, minute: minute, second: second, microsecond: microsecond }, calendar ) do time = %Time{ calendar: calendar, hour: hour, minute: minute, second: second, microsecond: microsecond } {:ok, time} end def convert(%{microsecond: {_, precision}} = time, calendar) do {hour, minute, second, {microsecond, _}} = time |> to_day_fraction() |> calendar.time_from_day_fraction() time = %Time{ calendar: calendar, hour: hour, minute: minute, second: second, microsecond: {microsecond, precision} } {:ok, time} end @doc """ Similar to `Time.convert/2`, but raises an `ArgumentError` if the conversion between the two calendars is not possible. ## Examples Imagine someone implements `Calendar.Holocene`, a calendar based on the Gregorian calendar that adds exactly 10,000 years to the current Gregorian year: iex> Time.convert!(~T[13:30:15], Calendar.Holocene) %Time{calendar: Calendar.Holocene, hour: 13, minute: 30, second: 15, microsecond: {0, 0}} """ @doc since: "1.5.0" @spec convert!(Calendar.time(), Calendar.calendar()) :: t def convert!(time, calendar) do case convert(time, calendar) do {:ok, value} -> value {:error, reason} -> raise ArgumentError, "cannot convert #{inspect(time)} to target calendar #{inspect(calendar)}, " <> "reason: #{inspect(reason)}" end end @doc """ Returns the difference between two times, considering only the hour, minute, second and microsecond. As with the `compare/2` function both `Time` structs and other structures containing time can be used. If for instance a `NaiveDateTime` or `DateTime` is passed, only the hour, month, second, and microsecond is considered. Any additional information about a date or time zone is ignored when calculating the difference. The answer can be returned in any `unit` available from `t:System.time_unit/0`. If the first unit is smaller than the second, a negative number is returned. This function returns the difference in seconds where seconds are measured according to `Calendar.ISO`. ## Examples iex> Time.diff(~T[00:29:12], ~T[00:29:10]) 2 # When passing a `NaiveDateTime` the date part is ignored. iex> Time.diff(~N[2017-01-01 00:29:12], ~T[00:29:10]) 2 # Two `NaiveDateTime` structs could have big differences in the date # but only the time part is considered. iex> Time.diff(~N[2017-01-01 00:29:12], ~N[1900-02-03 00:29:10]) 2 iex> Time.diff(~T[00:29:12], ~T[00:29:10], :microsecond) 2_000_000 iex> Time.diff(~T[00:29:10], ~T[00:29:12], :microsecond) -2_000_000 """ @doc since: "1.5.0" @spec diff(Calendar.time(), Calendar.time(), System.time_unit()) :: integer def diff(time1, time2, unit \\ :second) def diff( %{ calendar: Calendar.ISO, hour: hour1, minute: minute1, second: second1, microsecond: {microsecond1, @parts_per_day} }, %{ calendar: Calendar.ISO, hour: hour2, minute: minute2, second: second2, microsecond: {microsecond2, @parts_per_day} }, unit ) do total = (hour1 - hour2) * 3_600_000_000 + (minute1 - minute2) * 60_000_000 + (second1 - second2) * 1_000_000 + (microsecond1 - microsecond2) System.convert_time_unit(total, :microsecond, unit) end def diff(time1, time2, unit) do fraction1 = to_day_fraction(time1) fraction2 = to_day_fraction(time2) Calendar.ISO.iso_days_to_unit({0, fraction1}, unit) - Calendar.ISO.iso_days_to_unit({0, fraction2}, unit) end @doc """ Returns the given time with the microsecond field truncated to the given precision (`:microsecond`, `millisecond` or `:second`). The given time is returned unchanged if it already has lower precision than the given precision. ## Examples iex> Time.truncate(~T[01:01:01.123456], :microsecond) ~T[01:01:01.123456] iex> Time.truncate(~T[01:01:01.123456], :millisecond) ~T[01:01:01.123] iex> Time.truncate(~T[01:01:01.123456], :second) ~T[01:01:01] """ @doc since: "1.6.0" @spec truncate(t(), :microsecond | :millisecond | :second) :: t() def truncate(%Time{microsecond: microsecond} = time, precision) do %{time | microsecond: Calendar.truncate(microsecond, precision)} end ## Helpers defp to_day_fraction(%{ hour: hour, minute: minute, second: second, microsecond: {_, _} = microsecond, calendar: calendar }) do calendar.time_to_day_fraction(hour, minute, second, microsecond) end defimpl String.Chars do def to_string(time) do %{ hour: hour, minute: minute, second: second, microsecond: microsecond, calendar: calendar } = time calendar.time_to_string(hour, minute, second, microsecond) end end defimpl Inspect do def inspect(%{calendar: Calendar.ISO} = time, _) do %{ hour: hour, minute: minute, second: second, microsecond: microsecond, calendar: Calendar.ISO } = time "~T[" <> Calendar.ISO.time_to_string(hour, minute, second, microsecond) <> "]" end def inspect(time, opts) do Inspect.Any.inspect(time, opts) end end end
28.737796
102
0.621111
9ec7534a901d23b4f3aa98fd56348b6d3d495f20
3,413
ex
Elixir
lib/dovetail/deliver.ex
thusfresh/dovetail
f4a498f0db572484b6fe70a3885ce4a89e3a3492
[ "BSD-3-Clause" ]
2
2016-02-07T22:36:34.000Z
2016-02-09T13:33:01.000Z
lib/dovetail/deliver.ex
thusfresh/dovetail
f4a498f0db572484b6fe70a3885ce4a89e3a3492
[ "BSD-3-Clause" ]
1
2015-08-10T10:00:18.000Z
2015-08-10T15:44:21.000Z
lib/dovetail/deliver.ex
thusfresh/dovetail
f4a498f0db572484b6fe70a3885ce4a89e3a3492
[ "BSD-3-Clause" ]
null
null
null
defmodule Dovetail.Deliver do @moduledoc """ Use dovecot's LDA [deliver] to deliver mail. """ alias Dovetail.Config require Logger require EEx @timeout 5000 @default_exec_path Path.join(Config.dovecot_path(), "libexec/dovecot/deliver") @doc """ Deliver the email to the dovecot user. ## Options * `:exec_path :: String.t` the path to the deliver executable. """ @spec call(String.t, String.t, Keyword.t) :: :ok | {:error, any} def call(username, email, options \\ []) when is_binary(username) and is_binary(email) and is_list(options) do exec_path = Keyword.get(options, :exec_path, @default_exec_path) args = ["-c", Config.conf_path(), "-e", "-d", username] true = :erlang.open_port({:spawn_executable, exec_path}, [:use_stdio | [args: args]]) |> send_email(email) |> :erlang.port_close() :ok end # Date: Fri, 21 Nov 1997 09:55:06 -0600 @email_template """ From: <%= @from %> To: <%= @to %> Subject: <%= @subject %> Date: <%= @date %> Message-ID: <%= @message_id %> <%= @body %> """ EEx.function_from_string :def, :new_email, @email_template, [:assigns] def new_message_id do {:ok, host} = :inet.gethostname() "#{:erlang.unique_integer()}@#{host}.com" end # Private Functions defmodule DateTimeOffset do defstruct [:datetime, :offset] @type t :: %__MODULE__{datetime: :calendar.datetime, offset: integer} @spec now :: t def now do # For now, return universal time and an time zone adjust of 0 %__MODULE__{datetime: :calendar.universal_time(), offset: 0} end end @spec send_email(Port.t, String.t) :: Port.t defp send_email(port, email) do true = :erlang.port_command(port, email) port end end defimpl String.Chars, for: Dovetail.Deliver.DateTimeOffset do alias Dovetail.Deliver.DateTimeOffset def to_string(%DateTimeOffset{datetime: {{year, month, day} = date, time}, offset: 0}) do # Example: Wed Feb 10 11:23:57 2016 join([:calendar.day_of_the_week(date) |> weekday_to_string(), month_to_string(month), int_to_string(day), time_to_string(time), Integer.to_string(year)], " ") end @weekdays ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] for {weekday, index} <- Enum.with_index(@weekdays) do defp weekday_to_string(unquote(index + 1)), do: unquote(weekday) end @months ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"] for {month, index} <- Enum.with_index(@months) do defp month_to_string(unquote(index + 1)), do: unquote(month) end defp time_to_string({hours, minutes, seconds}) do join([int_to_string(hours), int_to_string(minutes), int_to_string(seconds)], ":") end @spec join([String.t], String.t) :: String.t defp join(strings, spacer) do join(Enum.reverse(strings), spacer, "") end defp join([], _spacer, acc), do: acc defp join([string], spacer, acc) do join([], spacer, string <> acc) end defp join([string | strings], spacer, acc) do join(strings, spacer, spacer <> string <> acc) end @spec int_to_string(integer, integer) :: String.t defp int_to_string(int, padding \\ 2) when is_integer(int) do Integer.to_string(int) |> String.rjust(padding, ?0) end end
27.524194
76
0.622912
9ec7542ba5a8cdfe2cece85bd96b640c4068a5b8
52
exs
Elixir
priv/repo/migrations/.formatter.exs
SquashConsulting/hopper
d68ac8b4749b2411959c2ba7be7cd9402a3e4b2b
[ "BSD-3-Clause" ]
1
2019-12-22T16:00:11.000Z
2019-12-22T16:00:11.000Z
priv/repo/migrations/.formatter.exs
SquashConsulting/hopper
d68ac8b4749b2411959c2ba7be7cd9402a3e4b2b
[ "BSD-3-Clause" ]
2
2021-03-10T02:31:42.000Z
2021-05-10T22:02:29.000Z
priv/repo/migrations/.formatter.exs
SquashConsulting/hopper
d68ac8b4749b2411959c2ba7be7cd9402a3e4b2b
[ "BSD-3-Clause" ]
null
null
null
[ inputs: ["*.exs"], import_deps: [:ecto_aql] ]
10.4
26
0.538462
9ec75ad8e71beef21f1e42defa46527a4568c069
3,885
ex
Elixir
lib/phoenix_live_view_collection/collection.ex
craft-ex/phoenix_live_view_collection
e0e05e6a5008dcd66d6121c54ec879b1d283c48a
[ "MIT" ]
4
2021-05-04T13:14:32.000Z
2021-09-04T12:16:44.000Z
lib/phoenix_live_view_collection/collection.ex
craft-ex/phoenix_live_view_collection
e0e05e6a5008dcd66d6121c54ec879b1d283c48a
[ "MIT" ]
null
null
null
lib/phoenix_live_view_collection/collection.ex
craft-ex/phoenix_live_view_collection
e0e05e6a5008dcd66d6121c54ec879b1d283c48a
[ "MIT" ]
null
null
null
defmodule LiveViewCollection.Collection do @moduledoc "Source of truth for collection entries" use GenServer require Logger @default_page_size 15 @spec start_link(keyword()) :: GenServer.on_start() def start_link(opts \\ []) do GenServer.start_link(__MODULE__, opts, name: __MODULE__) end @spec all() :: [map()] def all do GenServer.call(__MODULE__, :all) end @spec count() :: non_neg_integer() def count do GenServer.call(__MODULE__, :count) end @spec count(String.t()) :: non_neg_integer() def count(search) do search = cast_search(search) GenServer.call(__MODULE__, {:count, search}) end @spec pages(String.t()) :: pos_integer() def pages(search) do collection_count = count(search) pages = (collection_count / @default_page_size()) |> Float.ceil() |> round() if pages <= 1, do: 1, else: pages end @spec fetch(Keyword.t()) :: [map()] def fetch(opts \\ []) do search = opts |> Keyword.get(:search) |> cast_search() page = opts |> Keyword.get(:page) |> cast_page() GenServer.call(__MODULE__, {:fetch, search, page}) end defp cast_search(search) when is_binary(search) do String.trim(search) end defp cast_search(_), do: "" defp cast_page(page) when is_integer(page) and page >= 1, do: page defp cast_page(_), do: 1 ## Callbacks @impl GenServer def init(opts) do if entries = Keyword.get(opts, :override_entries) do {:ok, entries} else send(self(), :load_collection) {:ok, []} end end @impl GenServer def handle_info(:load_collection, _state) do collection = with {:ok, collection} <- YamlElixir.read_from_file(Path.join(File.cwd!(), "collection.yml")), :ok <- Logger.debug(fn -> "[Collection] Loading #{length(collection)} entries from collection.yml" end), {:ok, collection} <- request_embeded_tweets(collection), :ok <- Logger.debug(fn -> "[Collection] Loaded #{length(collection)} entries from twitter" end) do collection else {:error, error} -> Logger.error(fn -> "[Collection] Error loading collection: #{inspect(error)}" end) [] end {:noreply, collection} end defp request_embeded_tweets([]), do: {:ok, []} defp request_embeded_tweets(collection) do {:ok, collection |> Enum.map(&fetch_tweet(&1["tweet_url"])) |> Enum.reject(&is_nil/1)} end defp fetch_tweet(tweet_url) when is_nil(tweet_url) or tweet_url == "", do: nil defp fetch_tweet(tweet_url) when is_binary(tweet_url) do with {:ok, %{body: body, status_code: 200}} <- Mojito.request( method: :get, url: "https://publish.twitter.com/oembed?url=#{tweet_url}&omit_script=true&hide_thread=true&dnt=true" ), {:ok, tweet} <- Jason.decode(body) do tweet else _ -> Logger.error(fn -> "[Collection] Error fetching embeded tweet: #{tweet_url}" end) nil end end @impl GenServer def handle_call(:all, _from, state) do {:reply, state, state} end def handle_call(:count, _from, state) do {:reply, length(state), state} end def handle_call({:count, search}, _from, state) do count = state |> do_filter(search) |> length() {:reply, count, state} end def handle_call({:fetch, search, page}, _from, state) do result = state |> do_filter(search) |> do_paginate(page) {:reply, result, state} end defp do_filter(collection, ""), do: collection defp do_filter(collection, search) do {:ok, regex} = Regex.compile(search, "i") Enum.filter(collection, fn %{"html" => search} -> String.match?(search, regex) _ -> false end) end defp do_paginate(collection, page) do Enum.slice(collection, (page - 1) * @default_page_size(), @default_page_size()) end end
26.793103
115
0.630888
9ec786326a026954a43f1f03c04243a4eed8e027
202
ex
Elixir
lib/elixibop.ex
derrickp/elixibop
e84cee0d0ce1b194a7de91aaefbb0b9ff3c4c9df
[ "MIT" ]
null
null
null
lib/elixibop.ex
derrickp/elixibop
e84cee0d0ce1b194a7de91aaefbb0b9ff3c4c9df
[ "MIT" ]
null
null
null
lib/elixibop.ex
derrickp/elixibop
e84cee0d0ce1b194a7de91aaefbb0b9ff3c4c9df
[ "MIT" ]
null
null
null
defmodule Elixibop do @moduledoc """ Documentation for Elixibop. """ @doc """ Hello world. ## Examples iex> Elixibop.hello() :world """ def hello do :world end end
10.631579
29
0.569307
9ec79e25768c6ef2c643dbee7c91218dffcdc38e
270
exs
Elixir
apps/snitch_api/config/test.exs
VeryBigThings/avia
7ce5d5b244ae0dfddc30c09c17efe27f1718a4c9
[ "MIT" ]
1
2021-04-08T22:29:19.000Z
2021-04-08T22:29:19.000Z
apps/snitch_api/config/test.exs
VeryBigThings/avia
7ce5d5b244ae0dfddc30c09c17efe27f1718a4c9
[ "MIT" ]
null
null
null
apps/snitch_api/config/test.exs
VeryBigThings/avia
7ce5d5b244ae0dfddc30c09c17efe27f1718a4c9
[ "MIT" ]
null
null
null
import Config # We don't run a server during test. If one is required, # you can enable the server option below. config :snitch_api, SnitchApiWeb.Endpoint, http: [port: 4001], server: false # Print only warnings and errors during test config :logger, level: :warn
24.545455
56
0.744444
9ec7afeec7266bcb413ff4a83c7df2c1952eecfd
1,441
exs
Elixir
mix.exs
ashton314/excoveralls
58bcdd2d10af7c999cf1214cb79b4646ffde44aa
[ "MIT" ]
null
null
null
mix.exs
ashton314/excoveralls
58bcdd2d10af7c999cf1214cb79b4646ffde44aa
[ "MIT" ]
null
null
null
mix.exs
ashton314/excoveralls
58bcdd2d10af7c999cf1214cb79b4646ffde44aa
[ "MIT" ]
null
null
null
defmodule ExCoveralls.Mixfile do use Mix.Project def project do [ app: :excoveralls, version: "0.13.1", elixir: "~> 1.3", elixirc_paths: elixirc_paths(Mix.env()), deps: deps(), description: description(), package: package(), test_coverage: [tool: ExCoveralls], preferred_cli_env: cli_env_for(:test, [ "coveralls", "coveralls.detail", "coveralls.html", "coveralls.json", "coveralls.post", ]) ] end defp cli_env_for(env, tasks) do Enum.reduce(tasks, [], fn(key, acc) -> Keyword.put(acc, :"#{key}", env) end) end # Configuration for the OTP application def application do [] end # Specifies which paths to compile per environment. defp elixirc_paths(:test), do: ["lib", "test/fixtures/test_missing.ex"] defp elixirc_paths(_), do: ["lib"] # Returns the list of dependencies in the format: # { :foobar, "~> 0.1", git: "https://github.com/elixir-lang/foobar.git" } def deps do [ {:mock, "~> 0.2", only: :test}, {:meck, "~> 0.8", only: :test}, {:jason, "~> 1.0"}, {:hackney, "~> 1.16"}, {:ex_doc, "~> 0.21.0", only: :dev} ] end defp description do """ Coverage report tool for Elixir with coveralls.io integration. """ end defp package do [ maintainers: ["parroty"], licenses: ["MIT"], links: %{"GitHub" => "https://github.com/parroty/excoveralls"} ] end end
25.732143
94
0.591256
9ec7c781a48b946545c2822c9ffc5ed35fc38637
17,774
ex
Elixir
lib/elixir/lib/task/supervisor.ex
samgaw/elixir
92e8263102d95281a5aa7850b747636805f13fc9
[ "Apache-2.0" ]
null
null
null
lib/elixir/lib/task/supervisor.ex
samgaw/elixir
92e8263102d95281a5aa7850b747636805f13fc9
[ "Apache-2.0" ]
null
null
null
lib/elixir/lib/task/supervisor.ex
samgaw/elixir
92e8263102d95281a5aa7850b747636805f13fc9
[ "Apache-2.0" ]
null
null
null
defmodule Task.Supervisor do @moduledoc """ A task supervisor. This module defines a supervisor which can be used to dynamically supervise tasks. A task supervisor is started with no children, often under a supervisor and a name: children = [ {Task.Supervisor, name: MyApp.TaskSupervisor} ] Supervisor.start_link(children, strategy: :one_for_one) The options given in the child specification are documented in `start_link/1`. See the `Task` module for more examples. ## Name registration A `Task.Supervisor` is bound to the same name registration rules as a `GenServer`. Read more about them in the `GenServer` docs. """ @typedoc "Option values used by `start_link`" @type option :: DynamicSupervisor.option() | DynamicSupervisor.init_option() @doc false def child_spec(opts) when is_list(opts) do id = case Keyword.get(opts, :name, Task.Supervisor) do name when is_atom(name) -> name {:global, name} -> name {:via, _module, name} -> name end %{ id: id, start: {Task.Supervisor, :start_link, [opts]}, type: :supervisor } end @doc """ Starts a new supervisor. ## Examples A task supervisor is typically started under a supervision tree using the tuple format: {Task.Supervisor, name: MyApp.TaskSupervisor} You can also start it by calling `start_link/1` directly: Task.Supervisor.start_link(name: MyApp.TaskSupervisor) But this is recommended only for scripting and should be avoided in production code. Generally speaking, processes should always be started inside supervision trees. ## Options * `:name` - used to register a supervisor name, the supported values are described under the `Name Registration` section in the `GenServer` module docs; * `:max_restarts`, `:max_seconds`, and `:max_children` - as specified in `DynamicSupervisor`; This function could also receive `:restart` and `:shutdown` as options but those two options have been deprecated and it is now preferred to give them directly to `start_child` and `async`. """ @spec start_link([option]) :: Supervisor.on_start() def start_link(options \\ []) do {restart, options} = Keyword.pop(options, :restart) {shutdown, options} = Keyword.pop(options, :shutdown) if restart || shutdown do IO.warn( ":restart and :shutdown options in Task.Supervisor.start_link/1 " <> "are deprecated. Please pass those options on start_child/3 instead" ) end keys = [:max_children, :max_seconds, :max_restarts] {sup_opts, start_opts} = Keyword.split(options, keys) restart_and_shutdown = {restart || :temporary, shutdown || 5000} DynamicSupervisor.start_link(__MODULE__, {restart_and_shutdown, sup_opts}, start_opts) end @doc false def init({{_restart, _shutdown} = arg, options}) do Process.put(__MODULE__, arg) DynamicSupervisor.init([strategy: :one_for_one] ++ options) end @doc """ Starts a task that can be awaited on. The `supervisor` must be a reference as defined in `Supervisor`. The task will still be linked to the caller, see `Task.async/3` for more information and `async_nolink/2` for a non-linked variant. Raises an error if `supervisor` has reached the maximum number of children. ## Options * `:shutdown` - `:brutal_kill` if the tasks must be killed directly on shutdown or an integer indicating the timeout value, defaults to 5000 milliseconds. """ @spec async(Supervisor.supervisor(), (() -> any), Keyword.t()) :: Task.t() def async(supervisor, fun, options \\ []) do async(supervisor, :erlang, :apply, [fun, []], options) end @doc """ Starts a task that can be awaited on. The `supervisor` must be a reference as defined in `Supervisor`. The task will still be linked to the caller, see `Task.async/3` for more information and `async_nolink/2` for a non-linked variant. Raises an error if `supervisor` has reached the maximum number of children. ## Options * `:shutdown` - `:brutal_kill` if the tasks must be killed directly on shutdown or an integer indicating the timeout value, defaults to 5000 milliseconds. """ @spec async(Supervisor.supervisor(), module, atom, [term], Keyword.t()) :: Task.t() def async(supervisor, module, fun, args, options \\ []) do async(supervisor, :link, module, fun, args, options) end @doc """ Starts a task that can be awaited on. The `supervisor` must be a reference as defined in `Supervisor`. The task won't be linked to the caller, see `Task.async/3` for more information. Raises an error if `supervisor` has reached the maximum number of children. ## Options * `:shutdown` - `:brutal_kill` if the tasks must be killed directly on shutdown or an integer indicating the timeout value, defaults to 5000 milliseconds. ## Compatibility with OTP behaviours If you create a task using `async_nolink` inside an OTP behaviour like `GenServer`, you should match on the message coming from the task inside your `c:GenServer.handle_info/2` callback. The reply sent by the task will be in the format `{ref, result}`, where `ref` is the monitor reference held by the task struct and `result` is the return value of the task function. Keep in mind that, regardless of how the task created with `async_nolink` terminates, the caller's process will always receive a `:DOWN` message with the same `ref` value that is held by the task struct. If the task terminates normally, the reason in the `:DOWN` message will be `:normal`. ## Examples Typically, you use `async_nolink/3` when there is a reasonable expectation that the task may fail, and you don't want it to take down the caller. Let's see an example where a `GenServer` is meant to run a single task and track its status: defmodule MyApp.Server do use GenServer # ... def start_task do GenServer.call(__MODULE__, :start_task) end # In this case the task is already running, so we just return :ok. def handle_call(:start_task, _from, %{ref: ref} = state) when is_reference(ref) do {:reply, :ok, state} end # The task is not running yet, so let's start it. def handle_call(:start_task, _from, %{ref: nil} = state) do task = Task.Supervisor.async_nolink(MyApp.TaskSupervisor, fn -> ... end) # We return :ok and the server will continue running {:reply, :ok, %{state | ref: task.ref}} end # The task completed successfully def handle_info({ref, answer}, %{ref: ref} = state) do # We don't care about the DOWN message now, so let's demonitor and flush it Process.demonitor(ref, [:flush]) # Do something with the result and then return {:noreply, %{state | ref: nil}} end # The task failed def handle_info({:DOWN, ref, :process, _pid, _reason}, %{ref: ref} = state) do # Log and possibly restart the task... {:noreply, %{state | ref: nil}} end end """ @spec async_nolink(Supervisor.supervisor(), (() -> any), Keyword.t()) :: Task.t() def async_nolink(supervisor, fun, options \\ []) do async_nolink(supervisor, :erlang, :apply, [fun, []], options) end @doc """ Starts a task that can be awaited on. The `supervisor` must be a reference as defined in `Supervisor`. The task won't be linked to the caller, see `Task.async/3` for more information. Raises an error if `supervisor` has reached the maximum number of children. Note this function requires the task supervisor to have `:temporary` as the `:restart` option (the default), as `async_nolink/4` keeps a direct reference to the task which is lost if the task is restarted. """ @spec async_nolink(Supervisor.supervisor(), module, atom, [term], Keyword.t()) :: Task.t() def async_nolink(supervisor, module, fun, args, options \\ []) do async(supervisor, :nolink, module, fun, args, options) end @doc """ Returns a stream where the given function (`module` and `function`) is mapped concurrently on each element in `enumerable`. Each element will be prepended to the given `args` and processed by its own task. The tasks will be spawned under the given `supervisor` and linked to the caller process, similarly to `async/4`. When streamed, each task will emit `{:ok, value}` upon successful completion or `{:exit, reason}` if the caller is trapping exits. The order of results depends on the value of the `:ordered` option. The level of concurrency and the time tasks are allowed to run can be controlled via options (see the "Options" section below). If you find yourself trapping exits to handle exits inside the async stream, consider using `async_stream_nolink/6` to start tasks that are not linked to the calling process. ## Options * `:max_concurrency` - sets the maximum number of tasks to run at the same time. Defaults to `System.schedulers_online/0`. * `:ordered` - whether the results should be returned in the same order as the input stream. This option is useful when you have large streams and don't want to buffer results before they are delivered. This is also useful when you're using the tasks for side effects. Defaults to `true`. * `:timeout` - the maximum amount of time to wait (in milliseconds) without receiving a task reply (across all running tasks). Defaults to `5000`. * `:on_timeout` - what do to when a task times out. The possible values are: * `:exit` (default) - the process that spawned the tasks exits. * `:kill_task` - the task that timed out is killed. The value emitted for that task is `{:exit, :timeout}`. * `:shutdown` - `:brutal_kill` if the tasks must be killed directly on shutdown or an integer indicating the timeout value. Defaults to `5000` milliseconds. ## Examples Let's build a stream and then enumerate it: stream = Task.Supervisor.async_stream(MySupervisor, collection, Mod, :expensive_fun, []) Enum.to_list(stream) """ @doc since: "1.4.0" @spec async_stream(Supervisor.supervisor(), Enumerable.t(), module, atom, [term], keyword) :: Enumerable.t() def async_stream(supervisor, enumerable, module, function, args, options \\ []) when is_atom(module) and is_atom(function) and is_list(args) do build_stream(supervisor, :link, enumerable, {module, function, args}, options) end @doc """ Returns a stream that runs the given function `fun` concurrently on each element in `enumerable`. Each element in `enumerable` is passed as argument to the given function `fun` and processed by its own task. The tasks will be spawned under the given `supervisor` and linked to the caller process, similarly to `async/2`. See `async_stream/6` for discussion, options, and examples. """ @doc since: "1.4.0" @spec async_stream(Supervisor.supervisor(), Enumerable.t(), (term -> term), keyword) :: Enumerable.t() def async_stream(supervisor, enumerable, fun, options \\ []) when is_function(fun, 1) do build_stream(supervisor, :link, enumerable, fun, options) end @doc """ Returns a stream where the given function (`module` and `function`) is mapped concurrently on each element in `enumerable`. Each element in `enumerable` will be prepended to the given `args` and processed by its own task. The tasks will be spawned under the given `supervisor` and will not be linked to the caller process, similarly to `async_nolink/4`. See `async_stream/6` for discussion, options, and examples. """ @doc since: "1.4.0" @spec async_stream_nolink( Supervisor.supervisor(), Enumerable.t(), module, atom, [term], keyword ) :: Enumerable.t() def async_stream_nolink(supervisor, enumerable, module, function, args, options \\ []) when is_atom(module) and is_atom(function) and is_list(args) do build_stream(supervisor, :nolink, enumerable, {module, function, args}, options) end @doc """ Returns a stream that runs the given `function` concurrently on each element in `enumerable`. Each element in `enumerable` is passed as argument to the given function `fun` and processed by its own task. The tasks will be spawned under the given `supervisor` and will not be linked to the caller process, similarly to `async_nolink/2`. See `async_stream/6` for discussion and examples. """ @doc since: "1.4.0" @spec async_stream_nolink(Supervisor.supervisor(), Enumerable.t(), (term -> term), keyword) :: Enumerable.t() def async_stream_nolink(supervisor, enumerable, fun, options \\ []) when is_function(fun, 1) do build_stream(supervisor, :nolink, enumerable, fun, options) end @doc """ Terminates the child with the given `pid`. """ @spec terminate_child(Supervisor.supervisor(), pid) :: :ok | {:error, :not_found} def terminate_child(supervisor, pid) when is_pid(pid) do DynamicSupervisor.terminate_child(supervisor, pid) end @doc """ Returns all children PIDs. """ @spec children(Supervisor.supervisor()) :: [pid] def children(supervisor) do for {_, pid, _, _} <- DynamicSupervisor.which_children(supervisor), is_pid(pid), do: pid end @doc """ Starts a task as a child of the given `supervisor`. Task.Supervisor.start_child(MyTaskSupervisor, fn -> IO.puts "I am running in a task" end) Note that the spawned process is not linked to the caller, but only to the supervisor. This command is useful in case the task needs to perform side-effects (like I/O) and you have no interest on its results nor if it completes successfully. ## Options * `:restart` - the restart strategy, may be `:temporary` (the default), `:transient` or `:permanent`. `:temporary` means the task is never restarted, `:transient` means it is restarted if the exit is not `:normal`, `:shutdown` or `{:shutdown, reason}`. A `:permanent` restart strategy means it is always restarted. It defaults to `:temporary`. * `:shutdown` - `:brutal_kill` if the tasks must be killed directly on shutdown or an integer indicating the timeout value, defaults to 5000 milliseconds. """ @spec start_child(Supervisor.supervisor(), (() -> any), keyword) :: DynamicSupervisor.on_start_child() def start_child(supervisor, fun, options \\ []) do restart = options[:restart] shutdown = options[:shutdown] args = [get_owner(self()), get_callers(self()), {:erlang, :apply, [fun, []]}] start_child_with_spec(supervisor, args, restart, shutdown) end @doc """ Starts a task as a child of the given `supervisor`. Similar to `start_child/2` except the task is specified by the given `module`, `fun` and `args`. """ @spec start_child(Supervisor.supervisor(), module, atom, [term], keyword) :: DynamicSupervisor.on_start_child() def start_child(supervisor, module, fun, args, options \\ []) when is_atom(fun) and is_list(args) do restart = options[:restart] shutdown = options[:shutdown] args = [get_owner(self()), get_callers(self()), {module, fun, args}] start_child_with_spec(supervisor, args, restart, shutdown) end defp start_child_with_spec(supervisor, args, restart, shutdown) do # TODO: This only exists because we need to support reading restart/shutdown # from two different places. Remove this, the init function and the associated # clause in DynamicSupervisor on Elixir v2.0 # TODO: Once we do this, we can also make it so the task arguments are never # sent to the supervisor if the restart is temporary GenServer.call(supervisor, {:start_task, args, restart, shutdown}, :infinity) end defp get_owner(pid) do self_or_name = case Process.info(pid, :registered_name) do {:registered_name, name} when is_atom(name) -> name _ -> pid end {node(), self_or_name, pid} end defp get_callers(owner) do case :erlang.get(:"$callers") do [_ | _] = list -> [owner | list] _ -> [owner] end end defp async(supervisor, link_type, module, fun, args, options) do owner = self() args = [get_owner(owner), get_callers(owner), :monitor, {module, fun, args}] shutdown = options[:shutdown] case start_child_with_spec(supervisor, args, :temporary, shutdown) do {:ok, pid} -> if link_type == :link, do: Process.link(pid) ref = Process.monitor(pid) send(pid, {owner, ref}) %Task{pid: pid, ref: ref, owner: owner} {:error, :max_children} -> raise """ reached the maximum number of tasks for this task supervisor. The maximum number \ of tasks that are allowed to run at the same time under this supervisor can be \ configured with the :max_children option passed to Task.Supervisor.start_link/1\ """ end end defp build_stream(supervisor, link_type, enumerable, fun, options) do shutdown = options[:shutdown] &Task.Supervised.stream(enumerable, &1, &2, fun, options, fn [owner | _] = callers, mfa -> args = [get_owner(owner), callers, :monitor, mfa] case start_child_with_spec(supervisor, args, :temporary, shutdown) do {:ok, pid} -> if link_type == :link, do: Process.link(pid) {:ok, link_type, pid} {:error, :max_children} -> {:error, :max_children} end end) end end
36.052738
97
0.677844
9ec8384137b0c633d5fb9e1325b547498433cad4
3,323
ex
Elixir
lib/pigeon/gcm_worker.ex
irybakov/pigeon
8b734de28b14dbeeeb71c9d1ca24840434e429b7
[ "MIT" ]
null
null
null
lib/pigeon/gcm_worker.ex
irybakov/pigeon
8b734de28b14dbeeeb71c9d1ca24840434e429b7
[ "MIT" ]
null
null
null
lib/pigeon/gcm_worker.ex
irybakov/pigeon
8b734de28b14dbeeeb71c9d1ca24840434e429b7
[ "MIT" ]
1
2020-05-22T11:58:11.000Z
2020-05-22T11:58:11.000Z
defmodule Pigeon.GCMWorker do @moduledoc """ Handles all FCM request and response parsing over an HTTP2 connection. """ use Pigeon.GenericH2Worker, ping_interval: 60_000 alias Pigeon.GCM.NotificationResponse require Logger def default_name, do: :gcm_default def host(config) do config[:endpoint] || "fcm.googleapis.com" end def port(config) do config[:port] || 443 end def socket_options(_config) do {:ok, []} end def encode_notification({_registration_ids, notification}) do notification end def req_headers(config, _notification) do [{"authorization", "key=#{config[:key]}"}, {"content-type", "application/json"}, {"accept", "application/json"}] end def req_path(_notification) do "/fcm/send" end defp parse_error(_notification, _headers, body) do {:ok, response} = Poison.decode(body) response["reason"] |> Macro.underscore |> String.to_existing_atom end defp parse_response({registration_ids, payload}, _headers, body) do result = Poison.decode! body parse_result(registration_ids, result) end def parse_result(ids, %{"results" => results}) do parse_result1(ids, results, %NotificationResponse{}) end def parse_result1([], [], result) do result end def parse_result1(regid, results, result) when is_binary(regid) do parse_result1([regid], results, result) end def parse_result1([regid | reg_res], [%{"message_id" => id, "registration_id" => new_regid} | rest_results], %NotificationResponse{ update: update} = resp) do new_updates = [{regid, new_regid} | update] parse_result1(reg_res, rest_results, %{resp | message_id: id, update: new_updates}) end def parse_result1([regid | reg_res], [%{"message_id" => id} | rest_results], %NotificationResponse{ok: ok} = resp) do parse_result1(reg_res, rest_results, %{resp | message_id: id, ok: [regid | ok]}) end def parse_result1([regid | reg_res], [%{"error" => "Unavailable"} | rest_results], %NotificationResponse{retry: retry} = resp) do parse_result1(reg_res, rest_results, %{resp | retry: [regid | retry]}) end def parse_result1([regid | reg_res], [%{"error" => invalid } | rest_results], %NotificationResponse{remove: remove} = resp) when invalid == "NotRegistered" or invalid == "InvalidRegistration" do parse_result1(reg_res, rest_results, %{resp | remove: [regid | remove]}) end def parse_result1([regid | reg_res] = regs, [%{"error" => error} | rest_results] = results, %NotificationResponse{error: regs_in_error} = resp) do case Map.has_key?(regs_in_error, error) do true -> parse_result1(reg_res, rest_results, %{resp | error: %{regs_in_error | error => regid}}) false -> # create map key if required. parse_result1(regs, results, %{resp | error: Map.merge(%{error => []}, regs_in_error)}) end end def error_msg(code, _error) do case code do "400" -> "Malformed JSON" "401" -> "Unauthorized" _ -> "Unknown" end end end
29.936937
106
0.617815
9ec8510d144d50010b3916fd73bb41d919b326c1
1,999
ex
Elixir
clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p2beta1_color_info.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
null
null
null
clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p2beta1_color_info.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
null
null
null
clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p2beta1_color_info.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
null
null
null
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p2beta1ColorInfo do @moduledoc """ Color information consists of RGB channels, score, and the fraction of the image that the color occupies in the image. ## Attributes * `color` (*type:* `GoogleApi.Vision.V1.Model.Color.t`, *default:* `nil`) - RGB components of the color. * `pixelFraction` (*type:* `number()`, *default:* `nil`) - The fraction of pixels the color occupies in the image. Value in range [0, 1]. * `score` (*type:* `number()`, *default:* `nil`) - Image-specific score for this color. Value in range [0, 1]. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :color => GoogleApi.Vision.V1.Model.Color.t(), :pixelFraction => number(), :score => number() } field(:color, as: GoogleApi.Vision.V1.Model.Color) field(:pixelFraction) field(:score) end defimpl Poison.Decoder, for: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p2beta1ColorInfo do def decode(value, options) do GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p2beta1ColorInfo.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p2beta1ColorInfo do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
36.345455
118
0.721861
9ec8649eceaed70bc9570ecddf4d1eb7d5ec00fe
4,412
ex
Elixir
lib/ex_aws/request.ex
Costa-Silva/ex_aws
6a54c3ccf70f58d5c795a4726324d7ab4290d5e4
[ "MIT" ]
1
2021-01-05T11:56:32.000Z
2021-01-05T11:56:32.000Z
lib/ex_aws/request.ex
hetaoapps/ex_aws
1dcf28d13f0cfb384545dc7a068486874c74635a
[ "MIT" ]
null
null
null
lib/ex_aws/request.ex
hetaoapps/ex_aws
1dcf28d13f0cfb384545dc7a068486874c74635a
[ "MIT" ]
2
2020-12-29T13:52:35.000Z
2021-05-11T07:36:26.000Z
defmodule ExAws.Request do require Logger @moduledoc """ Makes requests to AWS. """ @type http_status :: pos_integer @type success_content :: %{body: binary, headers: [{binary, binary}]} @type success_t :: {:ok, success_content} @type error_t :: {:error, {:http_error, http_status, binary}} @type response_t :: success_t | error_t def request(http_method, url, data, headers, config, service) do body = case data do [] -> "{}" d when is_binary(d) -> d _ -> config[:json_codec].encode!(data) end request_and_retry(http_method, url, service, config, headers, body, {:attempt, 1}) end def request_and_retry(_method, _url, _service, _config, _headers, _req_body, {:error, reason}), do: {:error, reason} def request_and_retry(method, url, service, config, headers, req_body, {:attempt, attempt}) do full_headers = ExAws.Auth.headers(method, url, service, config, headers, req_body) with {:ok, full_headers} <- full_headers do safe_url = ExAws.Request.Url.sanitize(url, service) if config[:debug_requests] do Logger.debug( "ExAws: Request URL: #{inspect(safe_url)} HEADERS: #{inspect(full_headers)} BODY: #{ inspect(req_body) } ATTEMPT: #{attempt}" ) end case config[:http_client].request( method, safe_url, req_body, full_headers, Map.get(config, :http_opts, []) ) do {:ok, %{status_code: status} = resp} when status in 200..299 or status == 304 -> {:ok, resp} {:ok, %{status_code: status} = _resp} when status == 301 -> Logger.warn("ExAws: Received redirect, did you specify the correct region?") {:error, {:http_error, status, "redirected"}} {:ok, %{status_code: status} = resp} when status in 400..499 -> case client_error(resp, config[:json_codec]) do {:retry, reason} -> request_and_retry( method, url, service, config, headers, req_body, attempt_again?(attempt, reason, config) ) {:error, reason} -> {:error, reason} end {:ok, %{status_code: status} = resp} when status >= 500 -> body = Map.get(resp, :body) reason = {:http_error, status, body} request_and_retry( method, url, service, config, headers, req_body, attempt_again?(attempt, reason, config) ) {:error, %{reason: reason}} -> Logger.warn( "ExAws: HTTP ERROR: #{inspect(reason)} for URL: #{inspect(safe_url)} ATTEMPT: #{ attempt }" ) request_and_retry( method, url, service, config, headers, req_body, attempt_again?(attempt, reason, config) ) end end end def client_error(%{status_code: status, body: body} = error, json_codec) do case json_codec.decode(body) do {:ok, %{"__type" => error_type, "message" => message} = err} -> error_type |> String.split("#") |> case do [_, type] -> handle_aws_error(type, message) _ -> {:error, {:http_error, status, err}} end _ -> {:error, {:http_error, status, error}} end end def client_error(%{status_code: status} = error, _) do {:error, {:http_error, status, error}} end def handle_aws_error("ProvisionedThroughputExceededException" = type, message) do {:retry, {type, message}} end def handle_aws_error("ThrottlingException" = type, message) do {:retry, {type, message}} end def handle_aws_error(type, message) do {:error, {type, message}} end def attempt_again?(attempt, reason, config) do if attempt >= config[:retries][:max_attempts] do {:error, reason} else attempt |> backoff(config) {:attempt, attempt + 1} end end def backoff(attempt, config) do (config[:retries][:base_backoff_in_ms] * :math.pow(2, attempt)) |> min(config[:retries][:max_backoff_in_ms]) |> trunc |> :rand.uniform() |> :timer.sleep() end end
28.464516
97
0.55417
9ec86ee6981964e609039e142692b19e16d18606
294
exs
Elixir
acronym/acronym.exs
ravanscafi/exercism-elixir
0f5c8c923166a0a795c323c7e2d6ccc9da572fcf
[ "MIT" ]
null
null
null
acronym/acronym.exs
ravanscafi/exercism-elixir
0f5c8c923166a0a795c323c7e2d6ccc9da572fcf
[ "MIT" ]
null
null
null
acronym/acronym.exs
ravanscafi/exercism-elixir
0f5c8c923166a0a795c323c7e2d6ccc9da572fcf
[ "MIT" ]
null
null
null
defmodule Acronym do @doc """ Generate an acronym from a string. "This is a string" => "TIAS" """ @spec abbreviate(String.t()) :: String.t() def abbreviate(string) do String.split(string, ~r{\B[a-z]+|\W}, trim: true) |> Enum.reduce("", &(&2 <> String.upcase(&1))) end end
24.5
53
0.595238
9ec89643a952445bffd09454bd52a04bd811b1ff
1,139
exs
Elixir
config/config.exs
molbrown/currency_formatter
35bd8779700d1508a9f8cde8301f71a81bc461d7
[ "WTFPL" ]
14
2016-03-23T21:51:07.000Z
2022-01-26T18:33:43.000Z
config/config.exs
molbrown/currency_formatter
35bd8779700d1508a9f8cde8301f71a81bc461d7
[ "WTFPL" ]
12
2016-06-24T14:22:22.000Z
2021-08-09T22:58:52.000Z
config/config.exs
molbrown/currency_formatter
35bd8779700d1508a9f8cde8301f71a81bc461d7
[ "WTFPL" ]
13
2016-06-24T09:48:16.000Z
2021-07-15T21:55:08.000Z
# This file is responsible for configuring your application # and its dependencies with the aid of the Mix.Config module. use Mix.Config # This configuration is loaded before any dependency and is restricted # to this project. If another project depends on this project, this # file won't be loaded nor affect the parent project. For this reason, # if you want to provide default values for your application for # 3rd-party users, it should be done in your "mix.exs" file. # You can configure for your application as: # # config :currency_formatter, key: :value # # And access this configuration in your application as: # # Application.get_env(:currency_formatter, :key) # # Or configure a 3rd-party app: # # config :logger, level: :info # # It is also possible to import configuration files, relative to this # directory. For example, you can emulate configuration per environment # by uncommenting the line below and defining dev.exs, test.exs and such. # Configuration from the imported file will override the ones defined # here (which is why it is important to import them last). # # import_config "#{Mix.env}.exs"
36.741935
73
0.755926
9ec9227fd8002e13f058b440ed4807ce0c2250b3
4,697
ex
Elixir
lib/github/parse_owner.ex
gebner/bors-ng
7a88ef7cceb1f112374a426a4bdfffc323b44a61
[ "Apache-2.0" ]
null
null
null
lib/github/parse_owner.ex
gebner/bors-ng
7a88ef7cceb1f112374a426a4bdfffc323b44a61
[ "Apache-2.0" ]
null
null
null
lib/github/parse_owner.ex
gebner/bors-ng
7a88ef7cceb1f112374a426a4bdfffc323b44a61
[ "Apache-2.0" ]
null
null
null
require Logger defmodule BorsNG.CodeOwners do @type tjson :: map @type t :: %BorsNG.CodeOwners{ patterns: [BorsNG.FilePattern] } defstruct(patterns: []) end defmodule BorsNG.FilePattern do @type tjson :: map @type t :: %BorsNG.FilePattern{ file_pattern: String.t, approvers: [String.t] } defstruct( file_pattern: "", approvers: {} ) end defmodule BorsNG.CodeOwnerParser do # Returns a list of lists # Items in the inner lists are joined by an OR statement # Items in the the outer list are joined by an AND statement # [[A], [A, B], [A, C]] -> A and (A or B) and (A or C) @spec list_required_reviews(%BorsNG.CodeOwners{}, [%BorsNG.GitHub.File{}]) :: [[String.t]] def list_required_reviews(code_owners, files) do Logger.debug("Code Owners: #{inspect(code_owners)}") Logger.debug("Files modified: #{inspect(files)}") required_reviewers = Enum.map(files, fn x -> # Convert each file to an array of matching owners pats = Enum.map(code_owners.patterns, fn owner -> cond do String.equivalent?("*", owner.file_pattern) -> owner.approvers String.contains?(owner.file_pattern, "**") && process_double_asterisk(x.filename, owner.file_pattern) -> owner.approvers # If the patterh starts with a slask, only match the root dir String.starts_with?(owner.file_pattern, "/") && :glob.matches("/" <> x.filename, owner.file_pattern) && !:glob.matches(x.filename, owner.file_pattern <> "/*") -> owner.approvers # For patterns that doesn't start with a leading /, the pattern is # the equivalent of "**/{pattern}" !String.starts_with?(owner.file_pattern, "/") && :glob.matches(x.filename, "**" <> owner.file_pattern) && !:glob.matches(x.filename, owner.file_pattern <> "/*") -> owner.approvers # For non glob patterns, if the patterh starts with a slash, only match the root dir String.starts_with?(owner.file_pattern, "/") && String.starts_with?("/" <> x.filename, owner.file_pattern) -> owner.approvers !String.starts_with?(owner.file_pattern, "/") && String.contains?(x.filename, owner.file_pattern) -> owner.approvers true -> nil # if unknown fall through end end) # Remove any nil entries (indicating review not required) # Pick the last matching entry (entries further down in the file have higher priority Enum.reduce(pats, nil, fn x, acc -> if x != nil do x else acc end end) end) required_reviewers = Enum.filter(required_reviewers, fn x -> x != nil end) Logger.debug("Required reviewers: #{inspect(required_reviewers)}") required_reviewers end @spec process_double_asterisk(String.t, String.t) :: boolean def process_double_asterisk(file_name, file_pattern) do double_asterisk = "**" cond do String.starts_with?(file_pattern, double_asterisk) -> pattern = String.trim_leading(file_pattern, double_asterisk) String.contains?(file_name, pattern) String.ends_with?(file_pattern, double_asterisk) -> pattern = String.trim_trailing(file_pattern, double_asterisk) String.starts_with?(file_name, pattern) String.contains?(file_pattern, double_asterisk) -> patterns = String.split(file_pattern, double_asterisk, parts: 2) String.starts_with?(file_name, List.first(patterns)) && String.contains?(file_name, List.last(patterns)) end end @spec parse_file(String.t) :: {:ok, %BorsNG.CodeOwners{}} def parse_file(file_contents) do # Empty codeowners file if file_contents == nil do owners = %BorsNG.CodeOwners{ patterns: [] } {:ok, owners} else lines = String.split(file_contents, "\n") # Remove any comments from the file lines = Enum.map(lines, fn x -> String.replace(x, ~r/#.*/, "") end) # Remove empty lines lines = Enum.filter(lines, fn x -> String.length(String.trim(x)) > 0 end) patterns = Enum.map(lines, fn x -> segments = String.split(x) approvers = Enum.slice(segments, 1, Enum.count(segments) - 1) %BorsNG.FilePattern{ file_pattern: Enum.at(segments, 0), approvers: approvers } end) owners = %BorsNG.CodeOwners{ patterns: patterns } {:ok, owners} end end end
34.036232
127
0.607196
9ec9343cf5da78a8d42c4eb10866959f7081528a
10,161
ex
Elixir
lib/bincode/structs.ex
LeonardBesson/bincode
0e2950e6d09eaf9e9cea439e33cad5397747e331
[ "MIT" ]
null
null
null
lib/bincode/structs.ex
LeonardBesson/bincode
0e2950e6d09eaf9e9cea439e33cad5397747e331
[ "MIT" ]
null
null
null
lib/bincode/structs.ex
LeonardBesson/bincode
0e2950e6d09eaf9e9cea439e33cad5397747e331
[ "MIT" ]
null
null
null
defmodule Bincode.Structs do @moduledoc """ Module defining macros related to structs and enums. """ @doc """ Declares a new struct. This macro generates a struct with serialization and deserialization methods according to the given fields. ## Options * `absolute` - When set to true, the given struct name is interpreted as the absolute module name. When set to false, the given struct name is appended to the caller's module. Defaults to false. ## Example defmodule MyStructs do import Bincode.Structs declare_struct(Person, first_name: :string, last_name: :string, age: :u8 ) end alias MyStructs.Person person = %Person{first_name: "John", last_name: "Doe", age: 44} {:ok, <<4, 0, 0, 0, 0, 0, 0, 0, 74, 111, 104, 110, 3, 0, 0, 0, 0, 0, 0, 0, 68, 111, 101, 44>>} = Bincode.serialize(person, Person) It's also possible to call `serialize` and `deserialize` from the struct module directly. {:ok, {%Person{age: 44, first_name: "John", last_name: "Doe"}, ""}} = Person.deserialize(<<4, 0, 0, 0, 0, 0, 0, 0, 74, 111, 104, 110, 3, 0, 0, 0, 0, 0, 0, 0, 68, 111, 101, 44>>) Structs and enums can be nested. In this case the type is the fully qualified module. For example: defmodule MyStructs do import Bincode.Structs declare_struct(Person, first_name: :string, last_name: :string, age: :u8 ) declare_struct(Employee, employee_number: :u64, person: MyStructs.Person, job_title: :string, ) end """ defmacro declare_struct(struct, fields, options \\ []) when is_list(fields) do %Macro.Env{module: caller_module} = __CALLER__ struct_module = if Keyword.get(options, :absolute, false) do Macro.expand(struct, __CALLER__) else Module.concat([caller_module, Macro.expand(struct, __CALLER__)]) end struct_data = for {field_name, _} <- fields, do: {field_name, nil} field_names = for {field_name, _} <- fields, do: field_name field_types = for {_, field_type} <- fields, do: field_type types = for type <- field_types do case type do # This field is a struct {:__aliases__, _, _} -> Macro.expand(type, __CALLER__) _ -> type end end value_variables = for {field_name, _} <- fields do quote do: var!(struct).unquote(Macro.var(field_name, nil)) end prefix = Keyword.get(options, :prefix, {nil, nil}) quoted_prefix_serialization = case prefix do {nil, nil} -> {:ok, <<>>} {prefix_value, prefix_type} -> quote do: Bincode.serialize(unquote(prefix_value), unquote(prefix_type), var!(opts)) end quoted_prefix_deserialization = case prefix do {nil, nil} -> quote do: {:ok, {<<>>, var!(rest)}} {prefix_value, prefix_type} -> quote do: Bincode.deserialize(var!(rest), unquote(prefix_type), var!(opts)) end quote do defmodule unquote(struct_module) do defstruct unquote(struct_data) def serialize(struct, opts \\ []) def serialize(%__MODULE__{} = var!(struct), var!(opts)) do with {:ok, serialized_prefix} = unquote(quoted_prefix_serialization) do serialized_fields = Enum.reduce_while( Enum.zip([unquote_splicing(value_variables)], [unquote_splicing(types)]), [serialized_prefix], fn {value_var, type}, result -> case Bincode.serialize(value_var, type, var!(opts)) do {:ok, serialized} -> {:cont, [result, serialized]} {:error, msg} -> {:halt, {:error, msg}} end end ) case serialized_fields do {:error, msg} -> {:error, msg} _ -> {:ok, IO.iodata_to_binary(serialized_fields)} end end end def serialize(value, _opts) do {:error, "Cannot serialize value #{inspect(value)} into struct #{unquote(struct_module)}"} end def serialize!(value, opts) do case serialize(value, opts) do {:ok, result} -> result {:error, message} -> raise ArgumentError, message: message end end def deserialize(data, opts \\ []) def deserialize(<<var!(rest)::binary>>, var!(opts)) do with {:ok, {deserialized_variant, rest}} <- unquote(quoted_prefix_deserialization) do deserialized_fields = Enum.reduce_while( Enum.zip([unquote_splicing(field_names)], [unquote_splicing(types)]), {[], rest}, fn {field_name, type}, {fields, rest} -> case Bincode.deserialize(rest, type, var!(opts)) do {:ok, {deserialized, rest}} -> {:cont, {[{field_name, deserialized} | fields], rest}} {:error, msg} -> {:halt, {:error, msg}} end end ) case deserialized_fields do {:error, msg} -> {:error, msg} {fields, rest} -> struct = struct!(unquote(struct_module), fields) {:ok, {struct, rest}} end end end def deserialize(data, _opts) do {:error, "Cannot deserialize value #{inspect(data)} into struct #{unquote(struct_module)}"} end def deserialize!(data, opts) do case deserialize(data, opts) do {:ok, result} -> result {:error, message} -> raise ArgumentError, message: message end end end defimpl Bincode.Serializer, for: unquote(struct_module) do def serialize(term, opts) do unquote(struct_module).serialize(term, opts) end end end end @doc """ Declares a new enum. This macro generates a module for the enum, plus a struct for each variant with serialization and deserialization methods according to the given fields. ## Options * `absolute` - When set to true, the given struct name is interpreted as the absolute module name. When set to false, the given struct name is appended to the caller's module. Defaults to false. ## Example defmodule MyEnums do import Bincode.Structs declare_enum(IpAddr, V4: [tuple: {:u8, :u8, :u8, :u8}], V6: [addr: :string] ) end alias MyEnums.IpAddr ip_v4 = %IpAddr.V4{tuple: {127, 0, 0, 1}} {:ok, <<0, 0, 0, 0, 127, 0, 0, 1>>} = Bincode.serialize(ip_v4, IpAddr) ip_v6 = %IpAddr.V6{addr: "::1"} {:ok, <<1, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 58, 58, 49>>} = Bincode.serialize(ip_v6, IpAddr) It's also possible to call `serialize` and `deserialize` from the struct module directly. {:ok, {%IpAddr.V4{tuple: {127, 0, 0, 1}}, ""}} = IpAddr.deserialize(<<0, 0, 0, 0, 127, 0, 0, 1>>) {:ok, {%IpAddr.V6{addr: "::1"}, ""}} = IpAddr.deserialize(<<1, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 58, 58, 49>>) Enums can be nested and contain structs. See `Bincode.Structs.declare_struct/3`. """ defmacro declare_enum(enum, variants, options \\ []) when is_list(variants) do %Macro.Env{module: caller_module} = __CALLER__ enum_module = if Keyword.get(options, :absolute, false) do Macro.expand(enum, __CALLER__) else Module.concat([caller_module, Macro.expand(enum, __CALLER__)]) end quote do defmodule unquote(enum_module) do unquote do variants_definition = for {{variant, fields}, i} <- Enum.with_index(variants) do variant_module = Module.concat([enum_module, Macro.expand(variant, __CALLER__)]) quote do Bincode.Structs.declare_struct( unquote(variant), unquote(fields), prefix: {unquote(i), :u32} ) def serialize(%unquote(variant_module){} = variant, opts) do unquote(variant_module).serialize(variant, opts) end defp deserialize(unquote(i), <<data::binary>>, opts) do unquote(variant_module).deserialize(data, opts) end end end quote do unquote(variants_definition) def serialize(value, _opts) do {:error, "Cannot serialize variant #{inspect(value)} into enum #{unquote(enum_module)}"} end def serialize!(value, opts) do case serialize(value, opts) do {:ok, result} -> result {:error, message} -> raise ArgumentError, message: message end end def deserialize(<<data::binary>>, opts) do case Bincode.deserialize(data, :u32, opts) do {:ok, {deserialized_variant, _}} -> deserialize(deserialized_variant, data, opts) {:error, _} -> {:error, "Cannot serialize variant #{inspect(data)} into enum #{unquote(enum_module)}"} end end def deserialize(data, _opts) do {:error, "Cannot deserialize #{inspect(data)} into enum #{unquote(enum_module)} variant"} end defp deserialize(_unknown_variant, data, _opts) do {:error, "Cannot deserialize #{inspect(data)} into enum #{unquote(enum_module)} variant"} end def deserialize!(data, opts) do case deserialize(data, opts) do {:ok, result} -> result {:error, message} -> raise ArgumentError, message: message end end end end end end end end
32.672026
183
0.549749
9ec93af32fd42c642826d2fe1f6a00f0ef1ec18a
1,798
exs
Elixir
mix.exs
zamith/hound
2fcddb59e1e71ac77bbebfcb97bc6d74ae11d116
[ "MIT" ]
null
null
null
mix.exs
zamith/hound
2fcddb59e1e71ac77bbebfcb97bc6d74ae11d116
[ "MIT" ]
null
null
null
mix.exs
zamith/hound
2fcddb59e1e71ac77bbebfcb97bc6d74ae11d116
[ "MIT" ]
null
null
null
defmodule Hound.Mixfile do use Mix.Project def project do [ app: :hound, version: "0.5.10", elixir: ">= 1.0.0", description: description, deps: deps(Mix.env), package: package, docs: [readme: true, main: "README"] ] end # Configuration for the OTP application def application do [ applications: [:jsex, :ibrowse], mod: { Hound, [] }, description: 'Browser automation library', ] end # Returns the list of dependencies in the format: # { :foobar, git: "https://github.com/elixir-lang/foobar.git", tag: "0.1" } # # To specify particular versions, regardless of the tag, do: # { :barbat, "~> 0.1", github: "elixir-lang/barbat.git" } defp deps do [ { :ibrowse, github: "cmullaparthi/ibrowse", tag: "v4.1.1"}, { :jsex, "~> 2.0.0" } ] end defp deps(:docs) do deps ++ [ { :ex_doc, github: "elixir-lang/ex_doc" }, { :earmark, github: "pragdave/earmark" } ] end defp deps(_) do deps end defp package do [ contributors: ["Akash Manohar J"], licenses: ["MIT"], links: %{ "GitHub" => "https://github.com/HashNuke/hound" } ] end defp description do """ Elixir library for browser automation and writing integration tests in Elixir. ## Features * Can run __multiple browser sessions__ simultaneously. [See example](https://github.com/HashNuke/hound/blob/master/test/multiple_browser_session_test.exs). * Supports Selenium (Firefox, Chrome), ChromeDriver and PhantomJs. * Supports Javascript-heavy apps. Retries a few times before reporting error. * Implements the WebDriver Wire Protocol. **Internet Explorer may work under Selenium, but hasn't been tested. """ end end
22.475
160
0.619021
9ec96b75dfded0aa6375fb1302908e53e19971dd
1,583
exs
Elixir
test/movement/persisters/new_slave_test.exs
isshindev/accent
ae4c13139b0a0dfd64ff536b94c940a4e2862150
[ "BSD-3-Clause" ]
806
2018-04-07T20:40:33.000Z
2022-03-30T01:39:57.000Z
test/movement/persisters/new_slave_test.exs
isshindev/accent
ae4c13139b0a0dfd64ff536b94c940a4e2862150
[ "BSD-3-Clause" ]
194
2018-04-07T13:49:37.000Z
2022-03-30T19:58:45.000Z
test/movement/persisters/new_slave_test.exs
doc-ai/accent
e337e16f3658cc0728364f952c0d9c13710ebb06
[ "BSD-3-Clause" ]
89
2018-04-09T13:55:49.000Z
2022-03-24T07:09:31.000Z
defmodule AccentTest.Movement.Persisters.NewSlave do use Accent.RepoCase alias Accent.{ Language, ProjectCreator, Repo, User } alias Movement.Persisters.NewSlave, as: NewSlavePersister @user %User{email: "[email protected]"} setup do user = Repo.insert!(@user) language = Repo.insert!(%Language{name: "English", slug: Ecto.UUID.generate()}) {:ok, project} = ProjectCreator.create(params: %{main_color: "#f00", name: "My project", language_id: language.id}, user: user) revision = project |> Repo.preload(:revisions) |> Map.get(:revisions) |> hd() {:ok, [project: project, revision: revision]} end test "create revision success", %{project: project, revision: master_revision} do new_language = Repo.insert!(%Language{name: "French", slug: Ecto.UUID.generate()}) {:ok, {context, _}} = %Movement.Context{assigns: %{project: project, language: new_language, master_revision: master_revision}} |> NewSlavePersister.persist() revision = context.assigns[:revision] assert revision.language_id == new_language.id assert revision.project_id == project.id assert revision.master_revision_id == master_revision.id assert revision.master == false end test "create revision error", %{project: project, revision: revision} do {:error, changeset} = %Movement.Context{assigns: %{project: project, language: %Language{}, master_revision: revision}} |> NewSlavePersister.persist() assert changeset.errors == [language_id: {"can't be blank", [validation: :required]}] end end
33.680851
131
0.692356
9ec9704e83f167cf3e959ee8f4f98fe7b025d344
1,827
ex
Elixir
clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/model/dimension_dimension_value.ex
ericrwolfe/elixir-google-api
3dc0f17edd5e2d6843580c16ddae3bf84b664ffd
[ "Apache-2.0" ]
null
null
null
clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/model/dimension_dimension_value.ex
ericrwolfe/elixir-google-api
3dc0f17edd5e2d6843580c16ddae3bf84b664ffd
[ "Apache-2.0" ]
null
null
null
clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/model/dimension_dimension_value.ex
ericrwolfe/elixir-google-api
3dc0f17edd5e2d6843580c16ddae3bf84b664ffd
[ "Apache-2.0" ]
null
null
null
# Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the &quot;License&quot;); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an &quot;AS IS&quot; BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This class is auto generated by the swagger code generator program. # https://github.com/swagger-api/swagger-codegen.git # Do not edit the class manually. defmodule GoogleApi.AdExchangeBuyer.V14.Model.DimensionDimensionValue do @moduledoc """ Value of the dimension. ## Attributes - id (Integer): Id of the dimension. Defaults to: `null`. - name (String): Name of the dimension mainly for debugging purposes, except for the case of CREATIVE_SIZE. For CREATIVE_SIZE, strings are used instead of ids. Defaults to: `null`. - percentage (Integer): Percent of total impressions for a dimension type. e.g. {dimension_type: &#39;GENDER&#39;, [{dimension_value: {id: 1, name: &#39;MALE&#39;, percentage: 60}}]} Gender MALE is 60% of all impressions which have gender. Defaults to: `null`. """ defstruct [ :id, :name, :percentage ] end defimpl Poison.Decoder, for: GoogleApi.AdExchangeBuyer.V14.Model.DimensionDimensionValue do def decode(value, _options) do value end end defimpl Poison.Encoder, for: GoogleApi.AdExchangeBuyer.V14.Model.DimensionDimensionValue do def encode(value, options) do GoogleApi.AdExchangeBuyer.V14.Deserializer.serialize_non_nil(value, options) end end
38.0625
262
0.749863
9ec9760b3d00e0faea41c4709243038910709134
8,981
ex
Elixir
lib/circular/accounts.ex
cblock/circular
fb784d638bde2b1f125a03c403783f93533f5892
[ "Apache-2.0" ]
null
null
null
lib/circular/accounts.ex
cblock/circular
fb784d638bde2b1f125a03c403783f93533f5892
[ "Apache-2.0" ]
3
2020-09-04T09:32:55.000Z
2022-03-06T02:17:06.000Z
lib/circular/accounts.ex
cblock/circular
fb784d638bde2b1f125a03c403783f93533f5892
[ "Apache-2.0" ]
null
null
null
defmodule Circular.Accounts do @moduledoc """ The Accounts context. """ import Ecto.Query, warn: false alias Circular.Repo alias Circular.Accounts.{User, UserToken, UserNotifier} ## Database getters @doc """ Gets a user by email. ## Examples iex> get_user_by_email("[email protected]") %User{} iex> get_user_by_email("[email protected]") nil """ def get_user_by_email(email) when is_binary(email) do Repo.get_by(User, email: email) end @doc """ Gets a user by email and password. ## Examples iex> get_user_by_email_and_password("[email protected]", "correct_password") %User{} iex> get_user_by_email_and_password("[email protected]", "invalid_password") nil """ def get_user_by_email_and_password(email, password) when is_binary(email) and is_binary(password) do user = Repo.get_by(User, email: email) if User.valid_password?(user, password), do: user end @doc """ Gets a single user. Raises `Ecto.NoResultsError` if the User does not exist. ## Examples iex> get_user!(123) %User{} iex> get_user!(456) ** (Ecto.NoResultsError) """ def get_user!(id), do: Repo.get!(User, id) ## User registration @doc """ Registers a user. ## Examples iex> register_user(%{field: value}) {:ok, %User{}} iex> register_user(%{field: bad_value}) {:error, %Ecto.Changeset{}} """ def register_user(attrs) do %User{} |> User.registration_changeset(attrs) |> Repo.insert() end @doc """ Returns an `%Ecto.Changeset{}` for tracking user changes. ## Examples iex> change_user_registration(user) %Ecto.Changeset{data: %User{}} """ def change_user_registration(%User{} = user, attrs \\ %{}) do User.registration_changeset(user, attrs) end ## Settings @doc """ Returns an `%Ecto.Changeset{}` for changing the user e-mail. ## Examples iex> change_user_email(user) %Ecto.Changeset{data: %User{}} """ def change_user_email(user, attrs \\ %{}) do User.email_changeset(user, attrs) end @doc """ Emulates that the e-mail will change without actually changing it in the database. ## Examples iex> apply_user_email(user, "valid password", %{email: ...}) {:ok, %User{}} iex> apply_user_email(user, "invalid password", %{email: ...}) {:error, %Ecto.Changeset{}} """ def apply_user_email(user, password, attrs) do user |> User.email_changeset(attrs) |> User.validate_current_password(password) |> Ecto.Changeset.apply_action(:update) end @doc """ Updates the user e-mail in token. If the token matches, the user email is updated and the token is deleted. The confirmed_at date is also updated to the current time. """ def update_user_email(user, token) do context = "change:#{user.email}" with {:ok, query} <- UserToken.verify_change_email_token_query(token, context), %UserToken{sent_to: email} <- Repo.one(query), {:ok, _} <- Repo.transaction(user_email_multi(user, email, context)) do :ok else _ -> :error end end defp user_email_multi(user, email, context) do changeset = user |> User.email_changeset(%{email: email}) |> User.confirm_changeset() Ecto.Multi.new() |> Ecto.Multi.update(:user, changeset) |> Ecto.Multi.delete_all(:tokens, UserToken.user_and_contexts_query(user, [context])) end @doc """ Delivers the update e-mail instructions to the given user. ## Examples iex> deliver_update_email_instructions(user, current_email, &Routes.user_update_email_url(conn, :edit, &1)) {:ok, %{to: ..., body: ...}} """ def deliver_update_email_instructions(%User{} = user, current_email, update_email_url_fun) when is_function(update_email_url_fun, 1) do {encoded_token, user_token} = UserToken.build_email_token(user, "change:#{current_email}") Repo.insert!(user_token) UserNotifier.deliver_update_email_instructions(user, update_email_url_fun.(encoded_token)) end @doc """ Returns an `%Ecto.Changeset{}` for changing the user password. ## Examples iex> change_user_password(user) %Ecto.Changeset{data: %User{}} """ def change_user_password(user, attrs \\ %{}) do User.password_changeset(user, attrs) end @doc """ Updates the user password. ## Examples iex> update_user_password(user, "valid password", %{password: ...}) {:ok, %User{}} iex> update_user_password(user, "invalid password", %{password: ...}) {:error, %Ecto.Changeset{}} """ def update_user_password(user, password, attrs) do changeset = user |> User.password_changeset(attrs) |> User.validate_current_password(password) Ecto.Multi.new() |> Ecto.Multi.update(:user, changeset) |> Ecto.Multi.delete_all(:tokens, UserToken.user_and_contexts_query(user, :all)) |> Repo.transaction() |> case do {:ok, %{user: user}} -> {:ok, user} {:error, :user, changeset, _} -> {:error, changeset} end end ## Session @doc """ Generates a session token. """ def generate_user_session_token(user) do {token, user_token} = UserToken.build_session_token(user) Repo.insert!(user_token) token end @doc """ Gets the user with the given signed token. """ def get_user_by_session_token(token) do {:ok, query} = UserToken.verify_session_token_query(token) Repo.one(query) end @doc """ Deletes the signed token with the given context. """ def delete_session_token(token) do Repo.delete_all(UserToken.token_and_context_query(token, "session")) :ok end ## Confirmation @doc """ Delivers the confirmation e-mail instructions to the given user. ## Examples iex> deliver_user_confirmation_instructions(user, &Routes.user_confirmation_url(conn, :confirm, &1)) {:ok, %{to: ..., body: ...}} iex> deliver_user_confirmation_instructions(confirmed_user, &Routes.user_confirmation_url(conn, :confirm, &1)) {:error, :already_confirmed} """ def deliver_user_confirmation_instructions(%User{} = user, confirmation_url_fun) when is_function(confirmation_url_fun, 1) do if user.confirmed_at do {:error, :already_confirmed} else {encoded_token, user_token} = UserToken.build_email_token(user, "confirm") Repo.insert!(user_token) UserNotifier.deliver_confirmation_instructions(user, confirmation_url_fun.(encoded_token)) end end @doc """ Confirms a user by the given token. If the token matches, the user account is marked as confirmed and the token is deleted. """ def confirm_user(token) do with {:ok, query} <- UserToken.verify_email_token_query(token, "confirm"), %User{} = user <- Repo.one(query), {:ok, %{user: user}} <- Repo.transaction(confirm_user_multi(user)) do {:ok, user} else _ -> :error end end defp confirm_user_multi(user) do Ecto.Multi.new() |> Ecto.Multi.update(:user, User.confirm_changeset(user)) |> Ecto.Multi.delete_all(:tokens, UserToken.user_and_contexts_query(user, ["confirm"])) end ## Reset password @doc """ Delivers the reset password e-mail to the given user. ## Examples iex> deliver_user_reset_password_instructions(user, &Routes.user_reset_password_url(conn, :edit, &1)) {:ok, %{to: ..., body: ...}} """ def deliver_user_reset_password_instructions(%User{} = user, reset_password_url_fun) when is_function(reset_password_url_fun, 1) do {encoded_token, user_token} = UserToken.build_email_token(user, "reset_password") Repo.insert!(user_token) UserNotifier.deliver_reset_password_instructions(user, reset_password_url_fun.(encoded_token)) end @doc """ Gets the user by reset password token. ## Examples iex> get_user_by_reset_password_token("validtoken") %User{} iex> get_user_by_reset_password_token("invalidtoken") nil """ def get_user_by_reset_password_token(token) do with {:ok, query} <- UserToken.verify_email_token_query(token, "reset_password"), %User{} = user <- Repo.one(query) do user else _ -> nil end end @doc """ Resets the user password. ## Examples iex> reset_user_password(user, %{password: "new long password", password_confirmation: "new long password"}) {:ok, %User{}} iex> reset_user_password(user, %{password: "valid", password_confirmation: "not the same"}) {:error, %Ecto.Changeset{}} """ def reset_user_password(user, attrs) do Ecto.Multi.new() |> Ecto.Multi.update(:user, User.password_changeset(user, attrs)) |> Ecto.Multi.delete_all(:tokens, UserToken.user_and_contexts_query(user, :all)) |> Repo.transaction() |> case do {:ok, %{user: user}} -> {:ok, user} {:error, :user, changeset, _} -> {:error, changeset} end end end
25.66
116
0.66251
9ec9833239e9a0fb8906cbe6042da447b1285aa3
8,777
exs
Elixir
test/quantum/execution_broadcaster_test.exs
happysalada/quantum-core
a4758de2edac7758f879a7e3f3fde5c3aca7bf71
[ "Apache-2.0" ]
null
null
null
test/quantum/execution_broadcaster_test.exs
happysalada/quantum-core
a4758de2edac7758f879a7e3f3fde5c3aca7bf71
[ "Apache-2.0" ]
null
null
null
test/quantum/execution_broadcaster_test.exs
happysalada/quantum-core
a4758de2edac7758f879a7e3f3fde5c3aca7bf71
[ "Apache-2.0" ]
1
2022-03-06T10:24:39.000Z
2022-03-06T10:24:39.000Z
defmodule Quantum.ExecutionBroadcasterTest do @moduledoc false use ExUnit.Case, async: true import Crontab.CronExpression import ExUnit.CaptureLog import Quantum.CaptureLogExtend alias Quantum.ClockBroadcaster.Event, as: ClockEvent alias Quantum.ExecutionBroadcaster alias Quantum.ExecutionBroadcaster.Event, as: ExecuteEvent alias Quantum.ExecutionBroadcaster.StartOpts alias Quantum.Job alias Quantum.Storage.Test, as: TestStorage alias Quantum.{TestConsumer, TestProducer} # Allow max 10% Latency @max_timeout 1_100 doctest ExecutionBroadcaster defmodule TestScheduler do @moduledoc false use Quantum.Scheduler, otp_app: :execution_broadcaster_test end setup tags do if tags[:listen_storage] do Process.put(:test_pid, self()) end if tags[:manual_dispatch] do :ok else producer = start_supervised!({TestProducer, []}) {broadcaster, _} = capture_log_with_return(fn -> start_supervised!( {ExecutionBroadcaster, %StartOpts{ name: __MODULE__, job_broadcaster_reference: producer, clock_broadcaster_reference: producer, storage: TestStorage, scheduler: TestScheduler, debug_logging: true }} ) end) start_supervised!({TestConsumer, [broadcaster, self()]}) {:ok, %{producer: producer, broadcaster: broadcaster, debug_logging: true}} end end describe "add" do test "reboot triggers", %{producer: producer} do reboot_job = TestScheduler.new_job() |> Job.set_schedule(~e[@reboot]) # Some schedule that is valid but will not trigger the next 10 years non_reboot_job = TestScheduler.new_job() |> Job.set_schedule(~e[* * * * * #{NaiveDateTime.utc_now().year + 1}]) capture_log(fn -> TestProducer.send(producer, {:add, reboot_job}) TestProducer.send(producer, {:add, non_reboot_job}) assert_receive {:received, %ExecuteEvent{job: ^reboot_job}}, @max_timeout refute_receive {:received, %ExecuteEvent{job: ^non_reboot_job}}, @max_timeout end) end test "normal schedule triggers once per second", %{producer: producer} do job = TestScheduler.new_job() |> Job.set_schedule(~e[*]e) capture_log(fn -> TestProducer.send(producer, {:add, job}) spawn(fn -> now = %{NaiveDateTime.utc_now() | microsecond: {0, 0}} TestProducer.send(producer, %ClockEvent{time: now, catch_up: false}) Process.sleep(1_000) TestProducer.send(producer, %ClockEvent{ time: NaiveDateTime.add(now, 1, :second), catch_up: false }) end) assert_receive {:received, %ExecuteEvent{job: ^job}}, @max_timeout assert_receive {:received, %ExecuteEvent{job: ^job}}, @max_timeout end) end @tag listen_storage: true test "saves new last execution time in storage", %{producer: producer} do job = TestScheduler.new_job() |> Job.set_schedule(~e[*]e) capture_log(fn -> TestProducer.send(producer, {:add, job}) now = %{NaiveDateTime.utc_now() | microsecond: {0, 0}} TestProducer.send(producer, %ClockEvent{time: now, catch_up: false}) assert_receive {:update_last_execution_date, {TestScheduler, %NaiveDateTime{}}, _}, @max_timeout assert_receive {:received, %ExecuteEvent{job: ^job}}, @max_timeout end) end test "normal schedule in other timezone triggers once per second", %{producer: producer} do job = TestScheduler.new_job() |> Job.set_schedule(~e[*]e) |> Job.set_timezone("Europe/Zurich") capture_log(fn -> TestProducer.send(producer, {:add, job}) spawn(fn -> now = %{NaiveDateTime.utc_now() | microsecond: {0, 0}} add1 = NaiveDateTime.add(now, 1, :second) TestProducer.send(producer, %ClockEvent{time: now, catch_up: false}) Process.sleep(1_000) TestProducer.send(producer, %ClockEvent{time: add1, catch_up: false}) end) assert_receive {:received, %ExecuteEvent{job: ^job}}, @max_timeout assert_receive {:received, %ExecuteEvent{job: ^job}}, @max_timeout end) end test "impossible schedule will not create a crash", %{producer: producer} do # Some schedule that will never trigger job = TestScheduler.new_job() |> Job.set_schedule(~e[1 1 1 1 1 2000]) assert capture_log(fn -> TestProducer.send(producer, {:add, job}) now = %{NaiveDateTime.utc_now() | microsecond: {0, 0}} TestProducer.send(producer, %ClockEvent{time: now, catch_up: false}) refute_receive {:received, %ExecuteEvent{job: ^job}}, @max_timeout end) =~ """ Invalid Schedule #{inspect(job.schedule)} provided for job #{inspect(job.name)}. No matching dates found. The job was removed. """ end test "invalid timezone will not create a crash", %{producer: producer} do job = TestScheduler.new_job() |> Job.set_schedule(~e[*]e) |> Job.set_timezone("Foobar") assert capture_log(fn -> TestProducer.send(producer, {:add, job}) now = %{NaiveDateTime.utc_now() | microsecond: {0, 0}} TestProducer.send(producer, %ClockEvent{time: now, catch_up: false}) refute_receive {:received, %ExecuteEvent{job: ^job}}, @max_timeout end) =~ "Invalid Timezone #{inspect(job.timezone)} provided for job #{inspect(job.name)}." end test "will continue to send after new job is added", %{producer: producer} do job = TestScheduler.new_job() |> Job.set_schedule(~e[*]e) job_new = TestScheduler.new_job() |> Job.set_schedule(~e[*]) capture_log(fn -> TestProducer.send(producer, {:add, job}) now = %{NaiveDateTime.utc_now() | microsecond: {0, 0}} TestProducer.send(producer, %ClockEvent{time: now, catch_up: false}) assert_receive {:received, %ExecuteEvent{job: ^job}}, @max_timeout TestProducer.send(producer, {:add, job_new}) TestProducer.send(producer, %ClockEvent{ time: NaiveDateTime.add(now, 1, :second), catch_up: false }) assert_receive {:received, %ExecuteEvent{job: ^job}}, @max_timeout end) end test "will recalculate execution timer when a new job is added", %{producer: producer} do job = TestScheduler.new_job() |> Job.set_schedule(~e[1 1 1 1 1]) job_new = TestScheduler.new_job() |> Job.set_schedule(~e[*]e) capture_log(fn -> TestProducer.send(producer, {:add, job}) TestProducer.send(producer, {:add, job_new}) now = %{NaiveDateTime.utc_now() | microsecond: {0, 0}} TestProducer.send(producer, %ClockEvent{time: now, catch_up: false}) assert_receive {:received, %ExecuteEvent{job: ^job_new}}, @max_timeout end) end end describe "remove" do test "stops triggering after remove", %{producer: producer} do job = TestScheduler.new_job() |> Job.set_schedule(~e[*]e) capture_log(fn -> TestProducer.send(producer, {:add, job}) now = %{NaiveDateTime.utc_now() | microsecond: {0, 0}} TestProducer.send(producer, %ClockEvent{time: now, catch_up: false}) assert_receive {:received, %ExecuteEvent{job: ^job}}, @max_timeout TestProducer.send(producer, {:remove, job.name}) TestProducer.send(producer, %ClockEvent{ time: NaiveDateTime.add(now, 1, :second), catch_up: false }) refute_receive {:received, %ExecuteEvent{job: ^job}}, @max_timeout end) end test "remove inexistent will not crash", %{producer: producer} do job = TestScheduler.new_job() |> Job.set_schedule(~e[*]e) capture_log(fn -> TestProducer.send(producer, {:add, job}) now = %{NaiveDateTime.utc_now() | microsecond: {0, 0}} TestProducer.send(producer, %ClockEvent{time: now, catch_up: false}) assert_receive {:received, %ExecuteEvent{job: ^job}}, @max_timeout TestProducer.send(producer, {:remove, make_ref()}) TestProducer.send(producer, %ClockEvent{ time: NaiveDateTime.add(now, 1, :second), catch_up: false }) assert_receive {:received, %ExecuteEvent{job: ^job}}, @max_timeout end) end end end
31.458781
97
0.613194
9ec98de25980faca2afdcabea5d62212ddafad64
198
ex
Elixir
lib/phone/pk.ex
net/phone
18e1356d2f8d32fe3f95638c3c44bceab0164fb2
[ "Apache-2.0" ]
null
null
null
lib/phone/pk.ex
net/phone
18e1356d2f8d32fe3f95638c3c44bceab0164fb2
[ "Apache-2.0" ]
null
null
null
lib/phone/pk.ex
net/phone
18e1356d2f8d32fe3f95638c3c44bceab0164fb2
[ "Apache-2.0" ]
null
null
null
defmodule Phone.PK do @moduledoc false use Helper.Country def regex, do: ~r/^(92)()(.+'')/ def country, do: "Pakistan" def a2, do: "PK" def a3, do: "PAK" matcher :regex, ["92"] end
15.230769
34
0.590909
9ec9a27c1f61fa3f3edaacf090717679070cce5c
463
ex
Elixir
lib/ueberauth/failure/error.ex
gseddon/ueberauth
a914cbb1894224bdc1ff5653011af5966838cab2
[ "MIT" ]
1,456
2015-11-18T05:27:24.000Z
2022-03-24T15:07:59.000Z
lib/ueberauth/failure/error.ex
gseddon/ueberauth
a914cbb1894224bdc1ff5653011af5966838cab2
[ "MIT" ]
139
2015-11-18T18:34:03.000Z
2022-03-30T18:22:01.000Z
lib/ueberauth/failure/error.ex
gseddon/ueberauth
a914cbb1894224bdc1ff5653011af5966838cab2
[ "MIT" ]
120
2015-11-19T16:25:02.000Z
2022-02-06T01:31:45.000Z
defmodule Ueberauth.Failure.Error do @moduledoc """ A specific error for a failed authentication attempt. The message_key may be used to identify fields or other machine interpreted methods like translation. The message field is for a human readable message indicating the cause of the error. """ @type t :: %__MODULE__{ message_key: binary, message: binary } defstruct message_key: nil, message: nil end
27.235294
77
0.695464
9ec9c2c2662f9213c88c100e585d82f967f5bea2
2,090
ex
Elixir
clients/admin/lib/google_api/admin/directory_v1/api/users.ex
dereksweet/elixir-google-api
9ddd171f77a2efdcc6a968a0c001e503cb34dbb3
[ "Apache-2.0" ]
null
null
null
clients/admin/lib/google_api/admin/directory_v1/api/users.ex
dereksweet/elixir-google-api
9ddd171f77a2efdcc6a968a0c001e503cb34dbb3
[ "Apache-2.0" ]
null
null
null
clients/admin/lib/google_api/admin/directory_v1/api/users.ex
dereksweet/elixir-google-api
9ddd171f77a2efdcc6a968a0c001e503cb34dbb3
[ "Apache-2.0" ]
null
null
null
# Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the &quot;License&quot;); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an &quot;AS IS&quot; BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. defmodule GoogleApi.Admin.Directory_v1.Api.Users do @moduledoc """ API calls for all endpoints tagged `Users`. """ alias GoogleApi.Admin.Directory_v1.Connection alias GoogleApi.Gax.{Request, Response} @doc """ Retrieves a users profile and any custom fields ## Parameters - connection (GoogleApi.Admin.Directory_v1.Connection): Connection to server - userKey (String.t): The users email to get - optional_params (KeywordList): [optional] Optional parameters - :projection (String.t): The projection to cast on the query. - :customFieldMask (String.t): The mask of custom fields to request on the query. ## Returns {:ok, %GoogleApi.Admin.Directory_v1.Model.User{}} on success {:error, info} on failure """ @spec get_user(Tesla.Env.client(), String.t()) :: {:ok, GoogleApi.Admin.Directory_v1.Model.User.t()} | {:error, Tesla.Env.t()} def get_user( connection, userKey, optional_params \\ [], opts \\ [] ) do optional_params_config = %{ :projection => :query, :customFieldMask => :query } request = Request.new() |> Request.method(:get) |> Request.url("/users/{userKey}", %{ "userKey" => URI.encode_www_form(userKey) }) |> Request.add_optional_params(optional_params_config, optional_params) connection |> Connection.execute(request) |> Response.decode(opts ++ [struct: %GoogleApi.Admin.Directory_v1.Model.User{}]) end end
32.153846
86
0.686603
9ec9d72906733a10d44cb4ebca0f1969d38bb4af
8,294
ex
Elixir
lib/chat_api_web/router.ex
Tiamat-Tech/papercups
f17d2b0ce080c0edab92a4b2e6d4afcef04aa291
[ "MIT" ]
null
null
null
lib/chat_api_web/router.ex
Tiamat-Tech/papercups
f17d2b0ce080c0edab92a4b2e6d4afcef04aa291
[ "MIT" ]
null
null
null
lib/chat_api_web/router.ex
Tiamat-Tech/papercups
f17d2b0ce080c0edab92a4b2e6d4afcef04aa291
[ "MIT" ]
null
null
null
defmodule ChatApiWeb.Router do use ChatApiWeb, :router pipeline :browser do plug(:accepts, ["html"]) plug(:fetch_session) plug(:fetch_flash) plug(:protect_from_forgery) plug(:put_secure_browser_headers) end pipeline :api do plug(ChatApiWeb.IPAddressPlug) plug(:accepts, ["json"]) plug(ChatApiWeb.APIAuthPlug, otp_app: :chat_api) end pipeline :api_protected do plug(Pow.Plug.RequireAuthenticated, error_handler: ChatApiWeb.APIAuthErrorHandler) plug(ChatApiWeb.EnsureUserEnabledPlug) end pipeline :public_api do plug(ChatApiWeb.IPAddressPlug) plug(:accepts, ["json"]) plug(ChatApiWeb.PublicAPIAuthPlug, otp_app: :chat_api) end # Swagger scope "/api/swagger" do forward("/", PhoenixSwagger.Plug.SwaggerUI, otp_app: :chat_api, swagger_file: "swagger.json") end # Public routes scope "/api", ChatApiWeb do pipe_through(:api) get("/ping", PingController, :ping) post("/ping", PingController, :ping) resources("/registration", RegistrationController, singleton: true, only: [:create]) resources("/session", SessionController, singleton: true, only: [:create, :delete]) resources("/upload", UploadController, only: [:create, :show, :delete]) post("/session/renew", SessionController, :renew) # TODO: figure out a way to secure these methods so they aren't abused post("/accounts", AccountController, :create) post("/conversations", ConversationController, :create) post("/customers", CustomerController, :create) get("/customers/identify", CustomerController, :identify) get("/customers/:id/exists", CustomerController, :exists) put("/customers/:id/metadata", CustomerController, :update_metadata) get("/widget_settings", WidgetSettingsController, :show) put("/widget_settings/metadata", WidgetSettingsController, :update_metadata) post("/verify_email", UserController, :verify_email) post("/reset_password", UserController, :create_password_reset) put("/reset_password", UserController, :reset_password) post("/browser_sessions", BrowserSessionController, :create) # TODO: figure out how to design these APIs post("/browser_sessions/:id/finish", BrowserSessionController, :finish) post("/browser_sessions/:id/restart", BrowserSessionController, :restart) post("/browser_sessions/:id/identify", BrowserSessionController, :identify) get("/browser_sessions/:id/exists", BrowserSessionController, :exists) # TODO: figure out a better name? get("/conversations/customer", ConversationController, :find_by_customer) get("/conversations/shared", ConversationController, :shared) post("/slack/webhook", SlackController, :webhook) post("/slack/actions", SlackController, :actions) post("/mattermost/webhook", MattermostController, :webhook) post("/twilio/webhook", TwilioController, :webhook) # TODO: move to protected route after testing? get("/hubspot/oauth", HubspotController, :oauth) post("/newsletters/:newsletter/subscribe", NewsletterController, :subscribe) end # Protected routes scope "/api", ChatApiWeb do pipe_through([:api, :api_protected]) get("/me", SessionController, :me) get("/accounts/me", AccountController, :me) get("/messages/count", MessageController, :count) get("/billing", BillingController, :show) post("/billing", BillingController, :create) put("/billing", BillingController, :update) get("/reporting", ReportingController, :index) get("/slack/oauth", SlackController, :oauth) get("/slack/authorization", SlackController, :authorization) delete("/slack/authorizations/:id", SlackController, :delete) get("/slack/channels", SlackController, :channels) post("/mattermost/auth", MattermostController, :auth) get("/mattermost/channels", MattermostController, :channels) get("/mattermost/authorization", MattermostController, :authorization) delete("/mattermost/authorizations/:id", MattermostController, :delete) post("/twilio/auth", TwilioController, :auth) get("/twilio/authorization", TwilioController, :authorization) delete("/twilio/authorizations/:id", TwilioController, :delete) get("/google/auth", GoogleController, :auth) get("/google/oauth", GoogleController, :callback) get("/google/authorization", GoogleController, :authorization) delete("/google/authorization/:id", GoogleController, :delete) post("/gmail/send", GmailController, :send) put("/widget_settings", WidgetSettingsController, :update) get("/profile", UserProfileController, :show) put("/profile", UserProfileController, :update) get("/user_settings", UserSettingsController, :show) put("/user_settings", UserSettingsController, :update) delete("/users/:id", UserController, :delete) post("/users/:id/disable", UserController, :disable) post("/users/:id/enable", UserController, :enable) post("/payment_methods", PaymentMethodController, :create) get("/payment_methods", PaymentMethodController, :show) get("/browser_sessions/count", BrowserSessionController, :count) resources("/user_invitations", UserInvitationController, except: [:new, :edit]) resources("/user_invitation_emails", UserInvitationEmailController, only: [:create]) resources("/accounts", AccountController, only: [:update, :delete]) resources("/messages", MessageController, except: [:new, :edit]) resources("/conversations", ConversationController, except: [:new, :edit, :create]) resources("/companies", CompanyController, except: [:new, :edit]) resources("/customers", CustomerController, except: [:new, :edit, :create]) resources("/notes", NoteController, except: [:new, :edit]) resources("/event_subscriptions", EventSubscriptionController, except: [:new, :edit]) resources("/tags", TagController, except: [:new, :edit]) resources("/browser_sessions", BrowserSessionController, except: [:create, :new, :edit]) resources("/personal_api_keys", PersonalApiKeyController, except: [:new, :edit, :update]) resources("/canned_responses", CannedResponseController, except: [:new, :edit]) get("/slack_conversation_threads", SlackConversationThreadController, :index) get("/conversations/:conversation_id/previous", ConversationController, :previous) get("/conversations/:conversation_id/related", ConversationController, :related) post("/conversations/:conversation_id/share", ConversationController, :share) post("/conversations/:conversation_id/tags", ConversationController, :add_tag) delete("/conversations/:conversation_id/tags/:tag_id", ConversationController, :remove_tag) post("/customers/:customer_id/tags", CustomerController, :add_tag) delete("/customers/:customer_id/tags/:tag_id", CustomerController, :remove_tag) post("/event_subscriptions/verify", EventSubscriptionController, :verify) end scope "/api/v1", ChatApiWeb do pipe_through([:public_api, :api_protected]) get("/me", SessionController, :me) resources("/messages", MessageController, except: [:new, :edit]) resources("/conversations", ConversationController, except: [:new, :edit]) resources("/customers", CustomerController, except: [:new, :edit]) end # Enables LiveDashboard only for development # # If you want to use the LiveDashboard in production, you should put # it behind authentication and allow only admins to access it. # If your application does not have an admins-only section yet, # you can use Plug.BasicAuth to set up some basic authentication # as long as you are also using SSL (which you should anyway). if Mix.env() in [:dev, :test] do import Phoenix.LiveDashboard.Router scope "/" do pipe_through([:fetch_session, :protect_from_forgery]) live_dashboard("/dashboard", metrics: ChatApiWeb.Telemetry) end end scope "/", ChatApiWeb do pipe_through(:browser) get("/", PageController, :index) # TODO: move somewhere else? get("/google/auth", GoogleController, :index) # Fallback to index, which renders React app get("/*path", PageController, :index) end def swagger_info do %{ info: %{ version: "1.0", title: "Papercups API" } } end end
43.652632
97
0.719074
9ec9e3299c663f9fb1229dbc28e9a505f3879cdc
51
exs
Elixir
bench/bench_helper.exs
elixir-bench/demo
fffdb4ba4728649ffb74e8cd8588151ddc3a1188
[ "Apache-2.0" ]
null
null
null
bench/bench_helper.exs
elixir-bench/demo
fffdb4ba4728649ffb74e8cd8588151ddc3a1188
[ "Apache-2.0" ]
null
null
null
bench/bench_helper.exs
elixir-bench/demo
fffdb4ba4728649ffb74e8cd8588151ddc3a1188
[ "Apache-2.0" ]
null
null
null
Code.load_file("benchmarks/flat_map.exs", __DIR__)
25.5
50
0.803922
9ec9ff5c61d43835f04742c4b9bda6f64710c00a
2,687
ex
Elixir
clients/health_care/lib/google_api/health_care/v1beta1/model/info_type_config.ex
MasashiYokota/elixir-google-api
975dccbff395c16afcb62e7a8e411fbb58e9ab01
[ "Apache-2.0" ]
null
null
null
clients/health_care/lib/google_api/health_care/v1beta1/model/info_type_config.ex
MasashiYokota/elixir-google-api
975dccbff395c16afcb62e7a8e411fbb58e9ab01
[ "Apache-2.0" ]
1
2020-12-18T09:25:12.000Z
2020-12-18T09:25:12.000Z
clients/health_care/lib/google_api/health_care/v1beta1/model/info_type_config.ex
MasashiYokota/elixir-google-api
975dccbff395c16afcb62e7a8e411fbb58e9ab01
[ "Apache-2.0" ]
1
2020-10-04T10:12:44.000Z
2020-10-04T10:12:44.000Z
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.HealthCare.V1beta1.Model.InfoTypeConfig do @moduledoc """ Specifies how to use infoTypes for evaluation. For example, a user might only want to evaluate `PERSON`, `LOCATION`, and `AGE`. ## Attributes * `evaluateList` (*type:* `GoogleApi.HealthCare.V1beta1.Model.FilterList.t`, *default:* `nil`) - * `ignoreList` (*type:* `GoogleApi.HealthCare.V1beta1.Model.FilterList.t`, *default:* `nil`) - * `strictMatching` (*type:* `boolean()`, *default:* `nil`) - If `TRUE`, infoTypes described by `filter` are used for evaluation. Otherwise, infoTypes are not considered for evaluation. For example: * Annotated text: "Toronto is a location" * Finding 1: `{"infoType": "PERSON", "quote": "Toronto", "start": 0, "end": 7}` * Finding 2: `{"infoType": "CITY", "quote": "Toronto", "start": 0, "end": 7}` * Finding 3: `{}` * Ground truth: `{"infoType": "LOCATION", "quote": "Toronto", "start": 0, "end": 7}` When `strict_matching` is `TRUE`: * Finding 1: 1 false positive * Finding 2: 1 false positive * Finding 3: 1 false negative When `strict_matching` is `FALSE`: * Finding 1: 1 true positive * Finding 2: 1 true positive * Finding 3: 1 false negative """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :evaluateList => GoogleApi.HealthCare.V1beta1.Model.FilterList.t(), :ignoreList => GoogleApi.HealthCare.V1beta1.Model.FilterList.t(), :strictMatching => boolean() } field(:evaluateList, as: GoogleApi.HealthCare.V1beta1.Model.FilterList) field(:ignoreList, as: GoogleApi.HealthCare.V1beta1.Model.FilterList) field(:strictMatching) end defimpl Poison.Decoder, for: GoogleApi.HealthCare.V1beta1.Model.InfoTypeConfig do def decode(value, options) do GoogleApi.HealthCare.V1beta1.Model.InfoTypeConfig.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.HealthCare.V1beta1.Model.InfoTypeConfig do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
50.698113
751
0.719017
9eca3ca4ea8423af1984f4605e94c36ca77e7780
331
ex
Elixir
lib/application.ex
geonnave/felix
f770af9db656978450ae3cf75573559957f203c8
[ "MIT" ]
5
2019-02-10T03:33:23.000Z
2019-02-11T12:25:50.000Z
lib/application.ex
geonnave/felix
f770af9db656978450ae3cf75573559957f203c8
[ "MIT" ]
null
null
null
lib/application.ex
geonnave/felix
f770af9db656978450ae3cf75573559957f203c8
[ "MIT" ]
null
null
null
defmodule Felix.Application do use Application def start(_type, _args) do import Supervisor.Spec, warn: false children = [ {Felix.Server, 2222}, {Task.Supervisor, name: Felix.Handler.TaskSupervisor}, {ForceApp.People, []} ] Supervisor.start_link(children, strategy: :one_for_one) end end
20.6875
60
0.676737
9eca477fcdfc56d338fd734f04608b158deacf3e
1,056
ex
Elixir
lib/reactive/raw_db_access.ex
ReactiveWeb/reactive_db
764eda0b59156fd92fd424141b050937dcc8983a
[ "MIT" ]
1
2016-02-16T11:48:37.000Z
2016-02-16T11:48:37.000Z
lib/reactive/raw_db_access.ex
ReactiveWeb/reactive_db
764eda0b59156fd92fd424141b050937dcc8983a
[ "MIT" ]
null
null
null
lib/reactive/raw_db_access.ex
ReactiveWeb/reactive_db
764eda0b59156fd92fd424141b050937dcc8983a
[ "MIT" ]
null
null
null
defmodule Reactive.RawDbAccess do defp get_db(name) do case name do [] -> Reactive.Entities.get_db() end end def binary_to_json(binary) do try do term = :erlang.binary_to_term(binary) %{ type: "term", data: to_string(:lists.flatten(:io_lib.format("~80tp", [term]))) } rescue e -> %{ type: "string", data: binary } end end def api_request(:scan,[_ | db_name],_contexts,args) do rargs=Map.put(args,:fetch,case Map.get(args,:fetch,"key_value") do "key_value" -> :key_value "key" -> :key "value" -> :value end) db=get_db(db_name) res=Reactive.Db.scan(db,rargs) case Map.get(rargs,:fetch,"key_value") do :key_value -> Enum.map(res,fn({x,y}) -> %{key: x, value: binary_to_json(y)} end) :key -> Enum.map(res,fn(x) -> %{key: x} end) :value -> Enum.map(res,fn(x) -> %{value: binary_to_json(x)} end) end end def api_request(:delete_scan,[_ | db_name],_contexts,args) do db=get_db(db_name) Reactive.Db.delete_scan(db,args) end end
26.4
89
0.609848
9eca4ab3be4ff63b853357a474c6336807b9bd06
5,704
ex
Elixir
lib/neo4j_sips_models/model.ex
szTheory/neo4j_sips_models
70c7ad45da4aec97dc56dcf951df878b9e801e47
[ "MIT" ]
8
2016-04-22T03:13:02.000Z
2021-01-01T02:49:28.000Z
lib/neo4j_sips_models/model.ex
szTheory/neo4j_sips_models
70c7ad45da4aec97dc56dcf951df878b9e801e47
[ "MIT" ]
2
2016-10-06T07:20:19.000Z
2020-03-05T02:04:52.000Z
lib/neo4j_sips_models/model.ex
szTheory/neo4j_sips_models
70c7ad45da4aec97dc56dcf951df878b9e801e47
[ "MIT" ]
5
2016-07-21T04:31:24.000Z
2020-03-03T20:23:01.000Z
defmodule Neo4j.Sips.Model do @doc false @moduledoc """ Base class for the Neo4j.Sips models. """ defmacro __using__(_opts) do quote do import Kernel, except: [def: 1, def: 2, defp: 1, defp: 2] import Neo4j.Sips.Model Module.register_attribute(__MODULE__ , :fields , accumulate: true) Module.register_attribute(__MODULE__ , :relationships , accumulate: true) Module.register_attribute(__MODULE__ , :functions , accumulate: true) Module.register_attribute(__MODULE__ , :callbacks , accumulate: true) Module.register_attribute(__MODULE__ , :validation_functions , accumulate: true) @label "#{Mix.env |> Atom.to_string |> String.capitalize}:#{String.replace(Macro.to_string(__MODULE__), ".", ":")}" @before_compile Neo4j.Sips.Model field :id, type: :integer field :errors, transient: true field :created_at, type: :date field :updated_at, type: :date field :validated, type: :boolean, default: false, transient: true field :enable_validations, type: :boolean, default: true, transient: true end end @doc false defmacro __before_compile__(env) do metadata = Neo4j.Sips.Models.Metadata.new(env.module) quote do unquote Neo4j.Sips.Models.Struct.generate(metadata) unquote Neo4j.Sips.Models.ParseNodeMethod.generate(metadata) unquote Neo4j.Sips.Models.BuildMethod.generate(metadata) unquote Neo4j.Sips.Models.SaveMethod.generate(metadata) unquote Neo4j.Sips.Models.CreateMethod.generate(metadata) unquote Neo4j.Sips.Models.UpdateMethod.generate(metadata) unquote Neo4j.Sips.Models.FindMethod.generate(metadata) unquote Neo4j.Sips.Models.DeleteMethod.generate(metadata) unquote Neo4j.Sips.Models.Serialization.generate(metadata) unquote Neo4j.Sips.Models.Validations.generate(metadata) @doc """ returns the label of the model """ def label do @label end @doc """ returns the metadata for the model """ def metadata do unquote Macro.escape(metadata) end unquote generate_functions(metadata.functions) end end defp generate_functions(functions) do Enum.map functions, fn {:public, call, expr} -> quote do Kernel.def unquote(call), unquote(expr) end {:private, call, expr} -> quote do Kernel.defp unquote(call), unquote(expr) end end end defmacro def(call, expr \\ nil) do call = Macro.escape(call) expr = Macro.escape(expr) quote do @functions {:public, unquote(call), unquote(expr)} end end defmacro defp(call, expr \\ nil) do call = Macro.escape(call) expr = Macro.escape(expr) quote do @functions {:private, unquote(call), unquote(expr)} end end @doc """ Defines a field for the model ## Example defmodule User do use Neo4j.Sips.Model field :name field :email end """ defmacro field(name, attributes \\ []) do quote do @fields {unquote(name), unquote(attributes)} end end @doc """ Defines a relationship for the model ## Example defmodule User do use Neo4j.Sips.Model field :name field :email relationship :FRIEND_OF, User end """ defmacro relationship(name, related_model) do field_name = name |> Atom.to_string |> String.downcase |> String.to_atom field_attributes = [relationship: true, type: :integer] quote do @fields {unquote(field_name), unquote(field_attributes)} @relationships {unquote(name), unquote(related_model)} end end defmacro validate_with(method_name) when is_atom(method_name) do quote do @validation_functions unquote(method_name) end end @doc """ declare a before_save callback """ defmacro before_save(method_name) when is_atom(method_name) do quote do @callbacks {:before_save, unquote(method_name)} end end @doc """ declare a before_create callback """ defmacro before_create(method_name) when is_atom(method_name) do quote do @callbacks {:before_create, unquote(method_name)} end end @doc """ declare a before_update callback """ defmacro before_update(method_name) when is_atom(method_name) do quote do @callbacks {:before_update, unquote(method_name)} end end @doc """ declare a before_validation callback """ defmacro before_validation(method_name) when is_atom(method_name) do quote do @callbacks {:before_validation, unquote(method_name)} end end @doc """ declare an after_save callback """ defmacro after_save(method_name) when is_atom(method_name) do quote do @callbacks {:after_save, unquote(method_name)} end end @doc """ declare an after_create callback """ defmacro after_create(method_name) when is_atom(method_name) do quote do @callbacks {:after_create, unquote(method_name)} end end @doc """ declare an after_update callback """ defmacro after_update(method_name) when is_atom(method_name) do quote do @callbacks {:after_update, unquote(method_name)} end end @doc """ declare an after_validation callback """ defmacro after_validation(method_name) when is_atom(method_name) do quote do @callbacks {:after_validation, unquote(method_name)} end end @doc """ declare an after_find callback """ defmacro after_find(method_name) when is_atom(method_name) do quote do @callbacks {:after_find, unquote(method_name)} end end end
26.045662
121
0.667076
9eca4cb9ac3ebc93bdae05f08f935c9be50113b3
67
ex
Elixir
jobsPortalService/lib/models/company.ex
andraspatka/jobportal-ms
006c8ca212f88566113c4b5c00dfe1d4e421c034
[ "MIT" ]
1
2021-05-25T18:24:27.000Z
2021-05-25T18:24:27.000Z
jobsPortalService/lib/models/company.ex
andraspatka/jobportal-ms
006c8ca212f88566113c4b5c00dfe1d4e421c034
[ "MIT" ]
1
2021-05-23T09:50:10.000Z
2021-05-23T09:50:10.000Z
jobsPortalService/lib/models/company.ex
andraspatka/jobportal-ms
006c8ca212f88566113c4b5c00dfe1d4e421c034
[ "MIT" ]
null
null
null
defmodule Models.Company do defstruct name: nil, admin: nil end
22.333333
35
0.761194
9eca8561c0429cd5259a22b83f620c62c130f783
2,218
ex
Elixir
lib/checker_mal_web/controllers/unapproved_controller.ex
Hiyori-API/checker_mal
c59f38c4ce72776b401f823dfb2000934acfc509
[ "MIT" ]
10
2020-06-12T18:36:58.000Z
2022-02-20T11:07:49.000Z
lib/checker_mal_web/controllers/unapproved_controller.ex
Hiyori-API/checker_mal
c59f38c4ce72776b401f823dfb2000934acfc509
[ "MIT" ]
7
2020-05-08T06:03:08.000Z
2022-01-24T02:57:16.000Z
lib/checker_mal_web/controllers/unapproved_controller.ex
Hiyori-API/checker_mal
c59f38c4ce72776b401f823dfb2000934acfc509
[ "MIT" ]
1
2020-12-03T03:49:27.000Z
2020-12-03T03:49:27.000Z
defmodule CheckerMalWeb.UnapprovedController do use CheckerMalWeb, :controller require Logger @html_basepath Application.get_env(:checker_mal, :unapproved_html_basepath, "/mal_unapproved") @error_msg "Page is currently being updated, this page will automatically refresh when its done..." defp get_data(type, conn) do stype = Atom.to_string(type) # last_updated_at returns :error when server is still booting data = case GenServer.call(CheckerMal.Core.Unapproved, :last_updated_at) do {:ok, last_updated_naive} -> ids = try do GenServer.call(CheckerMal.UnapprovedHtml.Cache, type) catch :exit, {:timeout, _err} -> [] end %{ :since_update_mins => div(NaiveDateTime.diff(NaiveDateTime.utc_now(), last_updated_naive), 60), :ids => ids } {:error, :uninitialized} -> %{ids: [], refresh_equiv: true} end # flash error if page is initializing/updating conn = cond do Enum.empty?(data[:ids]) -> conn |> put_flash(:error, @error_msg) true -> conn end # get entry info (name/type/nsfw) entryinfo = GenServer.call( CheckerMal.UnapprovedHtml.EntryCache, {:get_info, stype, data[:ids]}, :timer.seconds(10) ) |> Map.to_list() |> Enum.map(fn {id, {name, etype, nsfw}} -> {id, %{ :name => name, :type => etype, :nsfw => nsfw }} end) |> Enum.into(Map.new()) # map so that its easier to use in eex data = Map.put(data, :info, entryinfo) |> Map.put( :title, "Unapproved MAL Entries - #{stype |> String.capitalize()}" ) |> Map.put(:basepath, @html_basepath) |> Map.put(:type, stype) {conn, data} end def controller(conn, type) when is_atom(type) do {conn, data} = get_data(type, conn) render(conn, "unapproved.html", data: data) end def anime(conn, _params), do: controller(conn, :anime) def manga(conn, _params), do: controller(conn, :manga) end
26.094118
101
0.565374
9eca9ade483f33992530d2588dd0b52cae6d4d77
958
ex
Elixir
lib/serum.ex
igalic/Serum
352f80ebcc8ed160230e7775aac3590787c912c6
[ "MIT" ]
1
2021-08-03T11:16:36.000Z
2021-08-03T11:16:36.000Z
lib/serum.ex
igalic/Serum
352f80ebcc8ed160230e7775aac3590787c912c6
[ "MIT" ]
null
null
null
lib/serum.ex
igalic/Serum
352f80ebcc8ed160230e7775aac3590787c912c6
[ "MIT" ]
null
null
null
defmodule Serum do @moduledoc """ Defines Serum OTP application. Serum is a simple static website generator written in Elixir programming language. The goal of this project is to provide the way to create awesome static websites with little effort. This documentation is for developers and advanced users. For the getting started guide and the user manual, please visit [the official Serum website](https://dalgona.github.io/Serum). """ use Application alias Serum.GlobalBindings alias Serum.Plugin alias Serum.Template @doc """ Starts the `:serum` application. This starts a supervisor process which manages some children maintaining states or data required for execution of Serum. """ def start(_type, _args) do children = [ Template, GlobalBindings, Plugin ] opts = [strategy: :one_for_one, name: Serum.Supervisor] {:ok, _pid} = Supervisor.start_link(children, opts) end end
26.611111
76
0.72547
9ecab0a0228673e32f900079777e4883e81e2e5c
938
ex
Elixir
lib/mail_slurp_api/model/create_webhook_options.ex
sumup-bank/mailslurp-client-elixir
87ccdedf2f0f4cd3e50f5781ffb088142e3cf4e8
[ "MIT" ]
null
null
null
lib/mail_slurp_api/model/create_webhook_options.ex
sumup-bank/mailslurp-client-elixir
87ccdedf2f0f4cd3e50f5781ffb088142e3cf4e8
[ "MIT" ]
null
null
null
lib/mail_slurp_api/model/create_webhook_options.ex
sumup-bank/mailslurp-client-elixir
87ccdedf2f0f4cd3e50f5781ffb088142e3cf4e8
[ "MIT" ]
null
null
null
# NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). # https://openapi-generator.tech # Do not edit the class manually. defmodule MailSlurpAPI.Model.CreateWebhookOptions do @moduledoc """ Options for creating a webhook. Webhooks can be attached to inboxes and MailSlurp will POST a webhook payload to the URL specified whenever the inbox receives an email. Webhooks are great for processing many inbound emails. """ @derive [Poison.Encoder] defstruct [ :"basicAuth", :"name", :"url" ] @type t :: %__MODULE__{ :"basicAuth" => BasicAuthOptions | nil, :"name" => String.t | nil, :"url" => String.t | nil } end defimpl Poison.Decoder, for: MailSlurpAPI.Model.CreateWebhookOptions do import MailSlurpAPI.Deserializer def decode(value, options) do value |> deserialize(:"basicAuth", :struct, MailSlurpAPI.Model.BasicAuthOptions, options) end end
29.3125
225
0.716418
9ecadfa0c09bc7ec9381eb09d2e2f971b6c5f681
245
ex
Elixir
lib/hello_web/controllers/hello_controller.ex
loretoparisi/phoenix-elixir-boilerplate
1fd0afa7c1cefbb82d3080f151fef269332d8e14
[ "MIT" ]
4
2019-04-07T17:43:10.000Z
2020-07-07T21:32:44.000Z
lib/hello_web/controllers/hello_controller.ex
loretoparisi/phoenix-elixir-boilerplate
1fd0afa7c1cefbb82d3080f151fef269332d8e14
[ "MIT" ]
null
null
null
lib/hello_web/controllers/hello_controller.ex
loretoparisi/phoenix-elixir-boilerplate
1fd0afa7c1cefbb82d3080f151fef269332d8e14
[ "MIT" ]
3
2019-09-09T15:21:41.000Z
2020-07-07T21:35:07.000Z
defmodule HelloWeb.HelloController do use HelloWeb, :controller def index(conn, _params) do render(conn, "index.html") end def show(conn, %{"messenger" => messenger}) do render(conn, "show.html", messenger: messenger) end end
22.272727
51
0.697959
9ecaedcbae86eb25a5be0bfdab61c56447032aac
347
ex
Elixir
lib/tradehub/extended_key/binary.ex
anhmv/tradehub-api-elixir
6ec87c2b07188d4140506011e2b28db4d372ac6d
[ "MIT" ]
5
2021-05-04T16:54:25.000Z
2021-12-15T06:53:24.000Z
lib/tradehub/extended_key/binary.ex
anhmv/tradehub-api-elixir
6ec87c2b07188d4140506011e2b28db4d372ac6d
[ "MIT" ]
5
2021-05-19T04:49:00.000Z
2021-06-01T13:36:50.000Z
lib/tradehub/extended_key/binary.ex
anhmv/tradehub-elixir
6ec87c2b07188d4140506011e2b28db4d372ac6d
[ "MIT" ]
null
null
null
defmodule Tradehub.ExtendedKey.Binary do @moduledoc false def take(binary, count) when is_binary(binary) and is_integer(count) and count in 0..byte_size(binary) do <<bin::bytes-size(count), _rest::binary>> = binary bin end def unsigned_sum(bin1, bin2), do: :binary.decode_unsigned(bin1) + :binary.decode_unsigned(bin2) end
28.916667
97
0.723343
9ecb02d1ed5a1fcb732743925ac7e56a13d1edd7
2,365
exs
Elixir
test/ex_venture/rooms_test.exs
oestrich/exventure
8e8062f9411656f4ec49cc2c35a1e4a9dd5d4190
[ "MIT" ]
610
2017-08-09T15:20:25.000Z
2022-03-27T15:49:07.000Z
test/ex_venture/rooms_test.exs
oestrich/ex_mud
8e8062f9411656f4ec49cc2c35a1e4a9dd5d4190
[ "MIT" ]
69
2017-09-23T04:02:30.000Z
2022-03-19T21:08:21.000Z
test/ex_venture/rooms_test.exs
oestrich/ex_mud
8e8062f9411656f4ec49cc2c35a1e4a9dd5d4190
[ "MIT" ]
85
2017-09-23T04:07:11.000Z
2021-11-20T06:44:56.000Z
defmodule ExVenture.RoomsTest do use ExVenture.DataCase alias ExVenture.Rooms describe "creating rooms" do test "successfully" do {:ok, zone} = TestHelpers.create_zone() {:ok, room} = Rooms.create(zone, %{ key: "room", name: "Room", description: "Description", listen: "Listen text", x: 0, y: 0, z: 0 }) assert room.name == "Room" assert room.description == "Description" end test "unsuccessful" do {:ok, zone} = TestHelpers.create_zone() {:error, changeset} = Rooms.create(zone, %{ key: "room", name: nil, description: "Description", listen: "Listen text", x: 0, y: 0, z: 0 }) assert changeset.errors[:name] end end describe "updating rooms - not live" do test "successfully" do {:ok, zone} = TestHelpers.create_zone() {:ok, room} = TestHelpers.create_room(zone, %{name: "Room"}) {:ok, room} = Rooms.update(room, %{ name: "New Room" }) assert room.name == "New Room" end test "unsuccessful" do {:ok, zone} = TestHelpers.create_zone() {:ok, room} = TestHelpers.create_room(zone, %{name: "Room"}) {:error, changeset} = Rooms.update(room, %{ name: nil }) assert changeset.errors[:name] end end describe "updating rooms - live" do test "successfully" do {:ok, zone} = TestHelpers.create_zone() {:ok, room} = TestHelpers.create_room(zone, %{name: "Room"}) {:ok, room} = TestHelpers.publish_room(room) {:ok, room} = Rooms.update(room, %{ name: "New Room" }) assert room.name == "Room" assert Enum.count(room.staged_changes) end test "unsuccessful" do {:ok, zone} = TestHelpers.create_zone() {:ok, room} = TestHelpers.create_room(zone, %{name: "Room"}) {:ok, room} = TestHelpers.publish_room(room) {:error, changeset} = Rooms.update(room, %{ name: nil }) assert changeset.errors[:name] end end describe "available rooms" do test "pulls in the available SVGs names" do assert Enum.member?(Rooms.available_map_icons(), "shop") end end end
22.740385
66
0.544609
9ecb381ae2c0929e3ee05d1d1801b5f9765b32ee
8,109
ex
Elixir
clients/logging/lib/google_api/logging/v2/model/log_entry.ex
mocknen/elixir-google-api
dac4877b5da2694eca6a0b07b3bd0e179e5f3b70
[ "Apache-2.0" ]
null
null
null
clients/logging/lib/google_api/logging/v2/model/log_entry.ex
mocknen/elixir-google-api
dac4877b5da2694eca6a0b07b3bd0e179e5f3b70
[ "Apache-2.0" ]
null
null
null
clients/logging/lib/google_api/logging/v2/model/log_entry.ex
mocknen/elixir-google-api
dac4877b5da2694eca6a0b07b3bd0e179e5f3b70
[ "Apache-2.0" ]
null
null
null
# Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the &quot;License&quot;); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an &quot;AS IS&quot; BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This class is auto generated by the swagger code generator program. # https://github.com/swagger-api/swagger-codegen.git # Do not edit the class manually. defmodule GoogleApi.Logging.V2.Model.LogEntry do @moduledoc """ An individual entry in a log. ## Attributes - httpRequest (HttpRequest): Optional. Information about the HTTP request associated with this log entry, if applicable. Defaults to: `null`. - insertId (String.t): Optional. A unique identifier for the log entry. If you provide a value, then Logging considers other log entries in the same project, with the same timestamp, and with the same insert_id to be duplicates which can be removed. If omitted in new log entries, then Logging assigns its own unique identifier. The insert_id is also used to order log entries that have the same timestamp value. Defaults to: `null`. - jsonPayload (%{optional(String.t) &#x3D;&gt; String.t}): The log entry payload, represented as a structure that is expressed as a JSON object. Defaults to: `null`. - labels (%{optional(String.t) &#x3D;&gt; String.t}): Optional. A set of user-defined (key, value) data that provides additional information about the log entry. Defaults to: `null`. - logName (String.t): Required. The resource name of the log to which this log entry belongs: \&quot;projects/[PROJECT_ID]/logs/[LOG_ID]\&quot; \&quot;organizations/[ORGANIZATION_ID]/logs/[LOG_ID]\&quot; \&quot;billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]\&quot; \&quot;folders/[FOLDER_ID]/logs/[LOG_ID]\&quot; A project number may optionally be used in place of PROJECT_ID. The project number is translated to its corresponding PROJECT_ID internally and the log_name field will contain PROJECT_ID in queries and exports.[LOG_ID] must be URL-encoded within log_name. Example: \&quot;organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity\&quot;. [LOG_ID] must be less than 512 characters long and can only include the following characters: upper and lower case alphanumeric characters, forward-slash, underscore, hyphen, and period.For backward compatibility, if log_name begins with a forward-slash, such as /projects/..., then the log entry is ingested as usual but the forward-slash is removed. Listing the log entry will not show the leading slash and filtering for a log name with a leading slash will never return any results. Defaults to: `null`. - metadata (MonitoredResourceMetadata): Output only. Additional metadata about the monitored resource. Only k8s_container, k8s_pod, and k8s_node MonitoredResources have this field populated. Defaults to: `null`. - operation (LogEntryOperation): Optional. Information about an operation associated with the log entry, if applicable. Defaults to: `null`. - protoPayload (%{optional(String.t) &#x3D;&gt; String.t}): The log entry payload, represented as a protocol buffer. Some Google Cloud Platform services use this field for their log entry payloads. Defaults to: `null`. - receiveTimestamp (DateTime.t): Output only. The time the log entry was received by Logging. Defaults to: `null`. - resource (MonitoredResource): Required. The primary monitored resource associated with this log entry. Example: a log entry that reports a database error would be associated with the monitored resource designating the particular database that reported the error. Defaults to: `null`. - severity (String.t): Optional. The severity of the log entry. The default value is LogSeverity.DEFAULT. Defaults to: `null`. - Enum - one of [DEFAULT, DEBUG, INFO, NOTICE, WARNING, ERROR, CRITICAL, ALERT, EMERGENCY] - sourceLocation (LogEntrySourceLocation): Optional. Source code location information associated with the log entry, if any. Defaults to: `null`. - spanId (String.t): Optional. The span ID within the trace associated with the log entry. For Trace spans, this is the same format that the Trace API v2 uses: a 16-character hexadecimal encoding of an 8-byte array, such as &lt;code&gt;\&quot;000000000000004a\&quot;&lt;/code&gt;. Defaults to: `null`. - textPayload (String.t): The log entry payload, represented as a Unicode string (UTF-8). Defaults to: `null`. - timestamp (DateTime.t): Optional. The time the event described by the log entry occurred. This time is used to compute the log entry&#39;s age and to enforce the logs retention period. If this field is omitted in a new log entry, then Logging assigns it the current time. Timestamps have nanosecond accuracy, but trailing zeros in the fractional seconds might be omitted when the timestamp is displayed.Incoming log entries should have timestamps that are no more than the logs retention period in the past, and no more than 24 hours in the future. Log entries outside those time boundaries will not be available when calling entries.list, but those log entries can still be exported with LogSinks. Defaults to: `null`. - trace (String.t): Optional. Resource name of the trace associated with the log entry, if any. If it contains a relative resource name, the name is assumed to be relative to //tracing.googleapis.com. Example: projects/my-projectid/traces/06796866738c859f2f19b7cfb3214824 Defaults to: `null`. - traceSampled (boolean()): Optional. The sampling decision of the trace associated with the log entry. True means that the trace resource name in the trace field was sampled for storage in a trace backend. False means that the trace was not sampled for storage when this log entry was written, or the sampling decision was unknown at the time. A non-sampled trace value is still useful as a request correlation identifier. The default is False. Defaults to: `null`. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :httpRequest => GoogleApi.Logging.V2.Model.HttpRequest.t(), :insertId => any(), :jsonPayload => map(), :labels => map(), :logName => any(), :metadata => GoogleApi.Logging.V2.Model.MonitoredResourceMetadata.t(), :operation => GoogleApi.Logging.V2.Model.LogEntryOperation.t(), :protoPayload => map(), :receiveTimestamp => DateTime.t(), :resource => GoogleApi.Logging.V2.Model.MonitoredResource.t(), :severity => any(), :sourceLocation => GoogleApi.Logging.V2.Model.LogEntrySourceLocation.t(), :spanId => any(), :textPayload => any(), :timestamp => DateTime.t(), :trace => any(), :traceSampled => any() } field(:httpRequest, as: GoogleApi.Logging.V2.Model.HttpRequest) field(:insertId) field(:jsonPayload, type: :map) field(:labels, type: :map) field(:logName) field(:metadata, as: GoogleApi.Logging.V2.Model.MonitoredResourceMetadata) field(:operation, as: GoogleApi.Logging.V2.Model.LogEntryOperation) field(:protoPayload, type: :map) field(:receiveTimestamp, as: DateTime) field(:resource, as: GoogleApi.Logging.V2.Model.MonitoredResource) field(:severity) field(:sourceLocation, as: GoogleApi.Logging.V2.Model.LogEntrySourceLocation) field(:spanId) field(:textPayload) field(:timestamp, as: DateTime) field(:trace) field(:traceSampled) end defimpl Poison.Decoder, for: GoogleApi.Logging.V2.Model.LogEntry do def decode(value, options) do GoogleApi.Logging.V2.Model.LogEntry.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.Logging.V2.Model.LogEntry do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
83.597938
1,185
0.750031
9ecb4ebe0dda1ab1c755846bd5ee53182e2aac7e
687
exs
Elixir
test/vutuv_web/controllers/api/vcard_controller_test.exs
vutuv/vutuv
174706cdaf28cef24e1cc06bec0884c25f2412be
[ "MIT" ]
309
2016-05-03T17:16:23.000Z
2022-03-01T09:30:22.000Z
test/vutuv_web/controllers/api/vcard_controller_test.exs
vutuv/vutuv
174706cdaf28cef24e1cc06bec0884c25f2412be
[ "MIT" ]
662
2016-04-27T07:45:18.000Z
2022-01-05T07:29:19.000Z
test/vutuv_web/controllers/api/vcard_controller_test.exs
vutuv/vutuv
174706cdaf28cef24e1cc06bec0884c25f2412be
[ "MIT" ]
40
2016-04-27T07:46:22.000Z
2021-12-31T05:54:34.000Z
defmodule VutuvWeb.Api.VcardControllerTest do use VutuvWeb.ConnCase import Vutuv.Factory test "shows vcard for valid user", %{conn: conn} do user = insert(:user) [email_address] = user.email_addresses conn = get(conn, Routes.api_user_vcard_path(conn, :vcard, user)) assert conn.status == 200 vcard = conn.resp_body assert vcard =~ "BEGIN:VCARD\nVERSION:3.0" assert vcard =~ "#{user.full_name}" assert vcard =~ "#{email_address.value}" assert vcard =~ "\nEND:VCARD" end test "returns 404 when no user found", %{conn: conn} do assert_error_sent 404, fn -> get(conn, Routes.api_user_vcard_path(conn, :vcard, -1)) end end end
28.625
68
0.676856
9ecb6f5bfb8a7e8bc37ea506c3b68057eee9eb78
61
ex
Elixir
elixir/auction_umbrella/apps/auction_web/lib/auction_web/views/user_view.ex
kendru/darwin
67096eb900bc36d30bf5ce36d38aaa6db86a29a2
[ "MIT" ]
5
2021-11-17T04:37:39.000Z
2022-01-02T06:43:23.000Z
elixir/auction_umbrella/apps/auction_web/lib/auction_web/views/user_view.ex
kendru/darwin
67096eb900bc36d30bf5ce36d38aaa6db86a29a2
[ "MIT" ]
3
2021-05-21T21:50:11.000Z
2021-11-21T14:34:53.000Z
elixir/auction_umbrella/apps/auction_web/lib/auction_web/views/user_view.ex
kendru/darwin
67096eb900bc36d30bf5ce36d38aaa6db86a29a2
[ "MIT" ]
2
2021-11-16T14:14:05.000Z
2021-12-31T02:01:06.000Z
defmodule AuctionWeb.UserView do use AuctionWeb, :view end
15.25
32
0.803279
9ecb8e9fab3026e2896587388f6264e40c0c3255
894
ex
Elixir
bowling_game/lib/iteration_02/bowling_game/day09.ex
alex-dukhno/elixir-tdd-katas
57e25fc275c4274c889f2b3760276cc8a393de9e
[ "MIT" ]
null
null
null
bowling_game/lib/iteration_02/bowling_game/day09.ex
alex-dukhno/elixir-tdd-katas
57e25fc275c4274c889f2b3760276cc8a393de9e
[ "MIT" ]
null
null
null
bowling_game/lib/iteration_02/bowling_game/day09.ex
alex-dukhno/elixir-tdd-katas
57e25fc275c4274c889f2b3760276cc8a393de9e
[ "MIT" ]
null
null
null
defmodule BowlingGame.Day09 do def new_game() do {:ok, pid} = Task.start_link(fn -> loop(%{rolls: []}) end) pid end defp loop(%{rolls: rolls}) do receive do {:score, caller} -> send caller, {:score, score(Enum.reverse(rolls), 1)} loop(%{rolls: rolls}) {:roll, pins} -> loop(%{rolls: [pins | rolls]}) end end defp score(_rolls, 11), do: 0 defp score([10 | rest], index) do score(rest, index + 1) + 10 + (rest |> Enum.take(2) |> Enum.sum()) end defp score([first, second | rest], index) do if first + second == 10, do: score(rest, index + 1) + 10 + List.first(rest), else: score(rest, index + 1) + first + second end def roll(game, pins) do send game, {:roll, pins} game end def score(game) do send game, {:score, self()} receive do {:score, score} -> score end end end
22.923077
70
0.558166
9ecb9a3ec16c55083088c353bdd45fcded237084
631
ex
Elixir
lib/jsonapi/plugs/format_required.ex
jnylen/jsonapi
eda4774c99689afadf5a6e8228b88bdb3a98dd86
[ "MIT" ]
null
null
null
lib/jsonapi/plugs/format_required.ex
jnylen/jsonapi
eda4774c99689afadf5a6e8228b88bdb3a98dd86
[ "MIT" ]
null
null
null
lib/jsonapi/plugs/format_required.ex
jnylen/jsonapi
eda4774c99689afadf5a6e8228b88bdb3a98dd86
[ "MIT" ]
null
null
null
defmodule JSONAPI.FormatRequired do @moduledoc """ Enforces the JSONAPI format of {"data" => {"attributes" => ...}} for request bodies """ import JSONAPI.ErrorView def init(opts), do: opts def call(%{method: method} = conn, _opts) when method in ["DELETE", "GET", "HEAD"], do: conn def call(%{params: %{"data" => %{"relationships" => _}}} = conn, _), do: conn def call(%{params: %{"data" => %{"attributes" => _}}} = conn, _), do: conn def call(%{params: %{"data" => _}} = conn, _), do: send_error(conn, missing_data_attributes_param()) def call(conn, _), do: send_error(conn, missing_data_param()) end
33.210526
94
0.622821
9ecb9e21de03a6be77558ed9fac7ccb4c6794c0a
1,131
exs
Elixir
config/config.exs
undr/entitiex
c6666909290b4077b47659ce11891659226e3b88
[ "MIT" ]
null
null
null
config/config.exs
undr/entitiex
c6666909290b4077b47659ce11891659226e3b88
[ "MIT" ]
null
null
null
config/config.exs
undr/entitiex
c6666909290b4077b47659ce11891659226e3b88
[ "MIT" ]
null
null
null
# This file is responsible for configuring your application # and its dependencies with the aid of the Mix.Config module. use Mix.Config # This configuration is loaded before any dependency and is restricted # to this project. If another project depends on this project, this # file won't be loaded nor affect the parent project. For this reason, # if you want to provide default values for your application for # third-party users, it should be done in your "mix.exs" file. # You can configure your application as: # # config :entitiex, key: :value # # and access this configuration in your application as: # # Application.get_env(:entitiex, :key) # # You can also configure a third-party app: # # config :logger, level: :info # # It is also possible to import configuration files, relative to this # directory. For example, you can emulate configuration per environment # by uncommenting the line below and defining dev.exs, test.exs and such. # Configuration from the imported file will override the ones defined # here (which is why it is important to import them last). # # import_config "#{Mix.env()}.exs"
36.483871
73
0.751547
9ecba0ca482846c3ab7e5abf3c1728abd85dd74c
429
exs
Elixir
test/models/role_test.exs
melbystyle/jw_ministry_api
2065d628a84e829b805c71b7e73fb0bee3d0dd44
[ "Apache-2.0" ]
1
2017-06-19T18:18:11.000Z
2017-06-19T18:18:11.000Z
test/models/role_test.exs
melbystyle/jw_ministry_api
2065d628a84e829b805c71b7e73fb0bee3d0dd44
[ "Apache-2.0" ]
null
null
null
test/models/role_test.exs
melbystyle/jw_ministry_api
2065d628a84e829b805c71b7e73fb0bee3d0dd44
[ "Apache-2.0" ]
null
null
null
defmodule JwMinistryApi.RoleTest do use JwMinistryApi.ModelCase alias JwMinistryApi.Role @valid_attrs %{name: "some content"} @invalid_attrs %{} test "changeset with valid attributes" do changeset = Role.changeset(%Role{}, @valid_attrs) assert changeset.valid? end test "changeset with invalid attributes" do changeset = Role.changeset(%Role{}, @invalid_attrs) refute changeset.valid? end end
22.578947
55
0.729604
9ecbb553e00d7b4987765cbd61b944011a5cb0a2
3,162
exs
Elixir
mix.exs
gmcintire/papercups
c518a787dfb44547d9a057a8782c40c42614062d
[ "MIT" ]
null
null
null
mix.exs
gmcintire/papercups
c518a787dfb44547d9a057a8782c40c42614062d
[ "MIT" ]
null
null
null
mix.exs
gmcintire/papercups
c518a787dfb44547d9a057a8782c40c42614062d
[ "MIT" ]
null
null
null
defmodule ChatApi.MixProject do use Mix.Project def project do [ app: :chat_api, version: "0.1.0", elixir: "~> 1.10", elixirc_paths: elixirc_paths(Mix.env()), compilers: [:phoenix, :gettext] ++ Mix.compilers() ++ [:phoenix_swagger], start_permanent: Mix.env() == :prod, aliases: aliases(), deps: deps(), releases: [ papercups: [ include_executables_for: [:unix], applications: [chat_api: :permanent] ] ] ] end # Configuration for the OTP application. # # Type `mix help compile.app` for more information. def application do [ mod: {ChatApi.Application, []}, extra_applications: [:logger, :runtime_tools] ] end # Specifies which paths to compile per environment. defp elixirc_paths(:test), do: ["lib", "test/support"] defp elixirc_paths(_), do: ["lib"] # Specifies your project dependencies. # # Type `mix help deps` for examples and options. defp deps do [ {:credo, "~> 1.4", only: [:dev, :test], runtime: false}, {:dialyxir, "~> 1.0", only: [:dev], runtime: false}, {:ex_machina, "~> 2.4", only: [:test]}, {:mock, "~> 0.3.0", only: :test}, {:customerio, "~> 0.2"}, {:ex_aws, "~> 2.1"}, {:ex_aws_s3, "~> 2.0"}, {:ex_aws_lambda, "~> 2.0"}, {:ex_aws_ses, "~> 2.0"}, {:swoosh, "~> 1.0"}, {:gen_smtp, "~> 0.13"}, {:phoenix, "~> 1.5.5"}, {:phoenix_ecto, "~> 4.1"}, {:ecto_sql, "~> 3.4"}, {:postgrex, ">= 0.0.0"}, {:phoenix_live_dashboard, "~> 0.2.0"}, {:telemetry_metrics, "~> 0.4"}, {:telemetry_poller, "~> 0.4"}, {:gettext, "~> 0.11"}, {:tesla, "~> 1.3"}, {:hackney, "~> 1.17"}, {:jason, "~> 1.0"}, {:joken, "~> 2.0"}, {:plug_cowboy, "~> 2.0"}, {:corsica, "~> 1.0"}, {:pow, "~> 1.0.18"}, {:stripity_stripe, "~> 2.0"}, {:oban, "~> 2.1.0"}, {:sentry, "8.0.0"}, {:google_api_gmail, "~> 0.13"}, {:oauth2, "~> 0.9"}, {:mail, "~> 0.2"}, {:phoenix_swagger, "~> 0.8"}, {:uuid, "~> 1.1"}, {:ex_json_schema, "~> 0.5"}, {:pow_postgres_store, "~> 1.0.0-rc2"}, {:tzdata, "~> 1.0.5"}, {:scrivener_ecto, "~> 2.0"}, {:floki, "~> 0.30.0"}, {:paginator, "~> 1.0.3"}, {:phoenix_pubsub_redis, "~> 3.0.0"}, {:appsignal_phoenix, "~> 2.0.0"}, {:earmark, "~> 1.4.15"}, {:sweet_xml, "~> 0.7.1"}, {:httpoison, "~> 1.8"}, {:exponent_server_sdk, "~> 0.2.0"}, {:mix_test_watch, "~> 1.0", only: :dev, runtime: false} ] end # Aliases are shortcuts or tasks specific to the current project. # For example, to install project dependencies and perform other setup tasks, run: # # $ mix setup # # See the documentation for `Mix` for more info on aliases. defp aliases do [ setup: ["deps.get", "ecto.setup"], "ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"], "ecto.reset": ["ecto.drop", "ecto.setup"], test: ["ecto.create --quiet", "ecto.migrate --quiet", "test"] ] end end
29.551402
84
0.506958
9ecbc2075e9b798d93c8462629afe68356a40074
38,693
ex
Elixir
lib/aws/generated/sts.ex
andrewhr/aws-elixir
861dc2fafca50a2b2f83badba4cdcb44b5b0c171
[ "Apache-2.0" ]
null
null
null
lib/aws/generated/sts.ex
andrewhr/aws-elixir
861dc2fafca50a2b2f83badba4cdcb44b5b0c171
[ "Apache-2.0" ]
null
null
null
lib/aws/generated/sts.ex
andrewhr/aws-elixir
861dc2fafca50a2b2f83badba4cdcb44b5b0c171
[ "Apache-2.0" ]
null
null
null
# WARNING: DO NOT EDIT, AUTO-GENERATED CODE! # See https://github.com/aws-beam/aws-codegen for more details. defmodule AWS.STS do @moduledoc """ Security Token Service Security Token Service (STS) enables you to request temporary, limited-privilege credentials for Identity and Access Management (IAM) users or for users that you authenticate (federated users). This guide provides descriptions of the STS API. For more information about using this service, see [Temporary Security Credentials](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp.html). """ alias AWS.Client alias AWS.Request def metadata do %AWS.ServiceMetadata{ abbreviation: "AWS STS", api_version: "2011-06-15", content_type: "application/x-www-form-urlencoded", credential_scope: nil, endpoint_prefix: "sts", global?: false, protocol: "query", service_id: "STS", signature_version: "v4", signing_name: "sts", target_prefix: nil } end @doc """ Returns a set of temporary security credentials that you can use to access Amazon Web Services resources that you might not normally have access to. These temporary credentials consist of an access key ID, a secret access key, and a security token. Typically, you use `AssumeRole` within your account or for cross-account access. For a comparison of `AssumeRole` with other API operations that produce temporary credentials, see [Requesting Temporary Security Credentials](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html) and [Comparing the Amazon Web Services STS API operations](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html#stsapi_comparison) in the *IAM User Guide*. ## Permissions The temporary security credentials created by `AssumeRole` can be used to make API calls to any Amazon Web Services service with the following exception: You cannot call the Amazon Web Services STS `GetFederationToken` or `GetSessionToken` API operations. (Optional) You can pass inline or managed [session policies](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies.html#policies_session) to this operation. You can pass a single JSON policy document to use as an inline session policy. You can also specify up to 10 managed policies to use as managed session policies. The plaintext that you use for both inline and managed session policies can't exceed 2,048 characters. Passing policies to this operation returns new temporary credentials. The resulting session's permissions are the intersection of the role's identity-based policy and the session policies. You can use the role's temporary credentials in subsequent Amazon Web Services API calls to access resources in the account that owns the role. You cannot use session policies to grant more permissions than those allowed by the identity-based policy of the role that is being assumed. For more information, see [Session Policies](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies.html#policies_session) in the *IAM User Guide*. When you create a role, you create two policies: A role trust policy that specifies *who* can assume the role and a permissions policy that specifies *what* can be done with the role. You specify the trusted principal who is allowed to assume the role in the role trust policy. To assume a role from a different account, your Amazon Web Services account must be trusted by the role. The trust relationship is defined in the role's trust policy when the role is created. That trust policy states which accounts are allowed to delegate that access to users in the account. A user who wants to access a role in a different account must also have permissions that are delegated from the user account administrator. The administrator must attach a policy that allows the user to call `AssumeRole` for the ARN of the role in the other account. To allow a user to assume a role in the same account, you can do either of the following: * Attach a policy to the user that allows the user to call `AssumeRole` (as long as the role's trust policy trusts the account). * Add the user as a principal directly in the role's trust policy. You can do either because the role’s trust policy acts as an IAM resource-based policy. When a resource-based policy grants access to a principal in the same account, no additional identity-based policy is required. For more information about trust policies and resource-based policies, see [IAM Policies](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies.html) in the *IAM User Guide*. ## Tags (Optional) You can pass tag key-value pairs to your session. These tags are called session tags. For more information about session tags, see [Passing Session Tags in STS](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_session-tags.html) in the *IAM User Guide*. An administrator must grant you the permissions necessary to pass session tags. The administrator can also create granular permissions to allow you to pass only specific session tags. For more information, see [Tutorial: Using Tags for Attribute-Based Access Control](https://docs.aws.amazon.com/IAM/latest/UserGuide/tutorial_attribute-based-access-control.html) in the *IAM User Guide*. You can set the session tags as transitive. Transitive tags persist during role chaining. For more information, see [Chaining Roles with Session Tags](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_session-tags.html#id_session-tags_role-chaining) in the *IAM User Guide*. ## Using MFA with AssumeRole (Optional) You can include multi-factor authentication (MFA) information when you call `AssumeRole`. This is useful for cross-account scenarios to ensure that the user that assumes the role has been authenticated with an Amazon Web Services MFA device. In that scenario, the trust policy of the role being assumed includes a condition that tests for MFA authentication. If the caller does not include valid MFA information, the request to assume the role is denied. The condition in a trust policy that tests for MFA authentication might look like the following example. `"Condition": {"Bool": {"aws:MultiFactorAuthPresent": true}}` For more information, see [Configuring MFA-Protected API Access](https://docs.aws.amazon.com/IAM/latest/UserGuide/MFAProtectedAPI.html) in the *IAM User Guide* guide. To use MFA with `AssumeRole`, you pass values for the `SerialNumber` and `TokenCode` parameters. The `SerialNumber` value identifies the user's hardware or virtual MFA device. The `TokenCode` is the time-based one-time password (TOTP) that the MFA device produces. """ def assume_role(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "AssumeRole", input, options) end @doc """ Returns a set of temporary security credentials for users who have been authenticated via a SAML authentication response. This operation provides a mechanism for tying an enterprise identity store or directory to role-based Amazon Web Services access without user-specific credentials or configuration. For a comparison of `AssumeRoleWithSAML` with the other API operations that produce temporary credentials, see [Requesting Temporary Security Credentials](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html) and [Comparing the Amazon Web Services STS API operations](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html#stsapi_comparison) in the *IAM User Guide*. The temporary security credentials returned by this operation consist of an access key ID, a secret access key, and a security token. Applications can use these temporary security credentials to sign calls to Amazon Web Services services. ## Session Duration By default, the temporary security credentials created by `AssumeRoleWithSAML` last for one hour. However, you can use the optional `DurationSeconds` parameter to specify the duration of your session. Your role session lasts for the duration that you specify, or until the time specified in the SAML authentication response's `SessionNotOnOrAfter` value, whichever is shorter. You can provide a `DurationSeconds` value from 900 seconds (15 minutes) up to the maximum session duration setting for the role. This setting can have a value from 1 hour to 12 hours. To learn how to view the maximum value for your role, see [View the Maximum Session Duration Setting for a Role](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html#id_roles_use_view-role-max-session) in the *IAM User Guide*. The maximum session duration limit applies when you use the `AssumeRole*` API operations or the `assume-role*` CLI commands. However the limit does not apply when you use those operations to create a console URL. For more information, see [Using IAM Roles](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html) in the *IAM User Guide*. [Role chaining](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_terms-and-concepts.html#iam-term-role-chaining) limits your CLI or Amazon Web Services API role session to a maximum of one hour. When you use the `AssumeRole` API operation to assume a role, you can specify the duration of your role session with the `DurationSeconds` parameter. You can specify a parameter value of up to 43200 seconds (12 hours), depending on the maximum session duration setting for your role. However, if you assume a role using role chaining and provide a `DurationSeconds` parameter value greater than one hour, the operation fails. ## Permissions The temporary security credentials created by `AssumeRoleWithSAML` can be used to make API calls to any Amazon Web Services service with the following exception: you cannot call the STS `GetFederationToken` or `GetSessionToken` API operations. (Optional) You can pass inline or managed [session policies](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies.html#policies_session) to this operation. You can pass a single JSON policy document to use as an inline session policy. You can also specify up to 10 managed policies to use as managed session policies. The plaintext that you use for both inline and managed session policies can't exceed 2,048 characters. Passing policies to this operation returns new temporary credentials. The resulting session's permissions are the intersection of the role's identity-based policy and the session policies. You can use the role's temporary credentials in subsequent Amazon Web Services API calls to access resources in the account that owns the role. You cannot use session policies to grant more permissions than those allowed by the identity-based policy of the role that is being assumed. For more information, see [Session Policies](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies.html#policies_session) in the *IAM User Guide*. Calling `AssumeRoleWithSAML` does not require the use of Amazon Web Services security credentials. The identity of the caller is validated by using keys in the metadata document that is uploaded for the SAML provider entity for your identity provider. Calling `AssumeRoleWithSAML` can result in an entry in your CloudTrail logs. The entry includes the value in the `NameID` element of the SAML assertion. We recommend that you use a `NameIDType` that is not associated with any personally identifiable information (PII). For example, you could instead use the persistent identifier (`urn:oasis:names:tc:SAML:2.0:nameid-format:persistent`). ## Tags (Optional) You can configure your IdP to pass attributes into your SAML assertion as session tags. Each session tag consists of a key name and an associated value. For more information about session tags, see [Passing Session Tags in STS](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_session-tags.html) in the *IAM User Guide*. You can pass up to 50 session tags. The plaintext session tag keys can’t exceed 128 characters and the values can’t exceed 256 characters. For these and additional limits, see [IAM and STS Character Limits](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_iam-limits.html#reference_iam-limits-entity-length) in the *IAM User Guide*. An Amazon Web Services conversion compresses the passed session policies and session tags into a packed binary format that has a separate limit. Your request can fail for this limit even if your plaintext meets the other requirements. The `PackedPolicySize` response element indicates by percentage how close the policies and tags for your request are to the upper size limit. You can pass a session tag with the same key as a tag that is attached to the role. When you do, session tags override the role's tags with the same key. An administrator must grant you the permissions necessary to pass session tags. The administrator can also create granular permissions to allow you to pass only specific session tags. For more information, see [Tutorial: Using Tags for Attribute-Based Access Control](https://docs.aws.amazon.com/IAM/latest/UserGuide/tutorial_attribute-based-access-control.html) in the *IAM User Guide*. You can set the session tags as transitive. Transitive tags persist during role chaining. For more information, see [Chaining Roles with Session Tags](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_session-tags.html#id_session-tags_role-chaining) in the *IAM User Guide*. ## SAML Configuration Before your application can call `AssumeRoleWithSAML`, you must configure your SAML identity provider (IdP) to issue the claims required by Amazon Web Services. Additionally, you must use Identity and Access Management (IAM) to create a SAML provider entity in your Amazon Web Services account that represents your identity provider. You must also create an IAM role that specifies this SAML provider in its trust policy. For more information, see the following resources: * [About SAML 2.0-based Federation](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_providers_saml.html) in the *IAM User Guide*. * [Creating SAML Identity Providers](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_providers_create_saml.html) in the *IAM User Guide*. * [Configuring a Relying Party and Claims](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_providers_create_saml_relying-party.html) in the *IAM User Guide*. * [Creating a Role for SAML 2.0 Federation](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-idp_saml.html) in the *IAM User Guide*. """ def assume_role_with_saml(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "AssumeRoleWithSAML", input, options) end @doc """ Returns a set of temporary security credentials for users who have been authenticated in a mobile or web application with a web identity provider. Example providers include Amazon Cognito, Login with Amazon, Facebook, Google, or any OpenID Connect-compatible identity provider. For mobile applications, we recommend that you use Amazon Cognito. You can use Amazon Cognito with the [Amazon Web Services SDK for iOS Developer Guide](http://aws.amazon.com/sdkforios/) and the [Amazon Web Services SDK for Android Developer Guide](http://aws.amazon.com/sdkforandroid/) to uniquely identify a user. You can also supply the user with a consistent identity throughout the lifetime of an application. To learn more about Amazon Cognito, see [Amazon Cognito Overview](https://docs.aws.amazon.com/mobile/sdkforandroid/developerguide/cognito-auth.html#d0e840) in *Amazon Web Services SDK for Android Developer Guide* and [Amazon Cognito Overview](https://docs.aws.amazon.com/mobile/sdkforios/developerguide/cognito-auth.html#d0e664) in the *Amazon Web Services SDK for iOS Developer Guide*. Calling `AssumeRoleWithWebIdentity` does not require the use of Amazon Web Services security credentials. Therefore, you can distribute an application (for example, on mobile devices) that requests temporary security credentials without including long-term Amazon Web Services credentials in the application. You also don't need to deploy server-based proxy services that use long-term Amazon Web Services credentials. Instead, the identity of the caller is validated by using a token from the web identity provider. For a comparison of `AssumeRoleWithWebIdentity` with the other API operations that produce temporary credentials, see [Requesting Temporary Security Credentials](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html) and [Comparing the Amazon Web Services STS API operations](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html#stsapi_comparison) in the *IAM User Guide*. The temporary security credentials returned by this API consist of an access key ID, a secret access key, and a security token. Applications can use these temporary security credentials to sign calls to Amazon Web Services service API operations. ## Session Duration By default, the temporary security credentials created by `AssumeRoleWithWebIdentity` last for one hour. However, you can use the optional `DurationSeconds` parameter to specify the duration of your session. You can provide a value from 900 seconds (15 minutes) up to the maximum session duration setting for the role. This setting can have a value from 1 hour to 12 hours. To learn how to view the maximum value for your role, see [View the Maximum Session Duration Setting for a Role](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html#id_roles_use_view-role-max-session) in the *IAM User Guide*. The maximum session duration limit applies when you use the `AssumeRole*` API operations or the `assume-role*` CLI commands. However the limit does not apply when you use those operations to create a console URL. For more information, see [Using IAM Roles](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html) in the *IAM User Guide*. ## Permissions The temporary security credentials created by `AssumeRoleWithWebIdentity` can be used to make API calls to any Amazon Web Services service with the following exception: you cannot call the STS `GetFederationToken` or `GetSessionToken` API operations. (Optional) You can pass inline or managed [session policies](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies.html#policies_session) to this operation. You can pass a single JSON policy document to use as an inline session policy. You can also specify up to 10 managed policies to use as managed session policies. The plaintext that you use for both inline and managed session policies can't exceed 2,048 characters. Passing policies to this operation returns new temporary credentials. The resulting session's permissions are the intersection of the role's identity-based policy and the session policies. You can use the role's temporary credentials in subsequent Amazon Web Services API calls to access resources in the account that owns the role. You cannot use session policies to grant more permissions than those allowed by the identity-based policy of the role that is being assumed. For more information, see [Session Policies](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies.html#policies_session) in the *IAM User Guide*. ## Tags (Optional) You can configure your IdP to pass attributes into your web identity token as session tags. Each session tag consists of a key name and an associated value. For more information about session tags, see [Passing Session Tags in STS](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_session-tags.html) in the *IAM User Guide*. You can pass up to 50 session tags. The plaintext session tag keys can’t exceed 128 characters and the values can’t exceed 256 characters. For these and additional limits, see [IAM and STS Character Limits](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_iam-limits.html#reference_iam-limits-entity-length) in the *IAM User Guide*. An Amazon Web Services conversion compresses the passed session policies and session tags into a packed binary format that has a separate limit. Your request can fail for this limit even if your plaintext meets the other requirements. The `PackedPolicySize` response element indicates by percentage how close the policies and tags for your request are to the upper size limit. You can pass a session tag with the same key as a tag that is attached to the role. When you do, the session tag overrides the role tag with the same key. An administrator must grant you the permissions necessary to pass session tags. The administrator can also create granular permissions to allow you to pass only specific session tags. For more information, see [Tutorial: Using Tags for Attribute-Based Access Control](https://docs.aws.amazon.com/IAM/latest/UserGuide/tutorial_attribute-based-access-control.html) in the *IAM User Guide*. You can set the session tags as transitive. Transitive tags persist during role chaining. For more information, see [Chaining Roles with Session Tags](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_session-tags.html#id_session-tags_role-chaining) in the *IAM User Guide*. ## Identities Before your application can call `AssumeRoleWithWebIdentity`, you must have an identity token from a supported identity provider and create a role that the application can assume. The role that your application assumes must trust the identity provider that is associated with the identity token. In other words, the identity provider must be specified in the role's trust policy. Calling `AssumeRoleWithWebIdentity` can result in an entry in your CloudTrail logs. The entry includes the [Subject](http://openid.net/specs/openid-connect-core-1_0.html#Claims) of the provided web identity token. We recommend that you avoid using any personally identifiable information (PII) in this field. For example, you could instead use a GUID or a pairwise identifier, as [suggested in the OIDC specification](http://openid.net/specs/openid-connect-core-1_0.html#SubjectIDTypes). For more information about how to use web identity federation and the `AssumeRoleWithWebIdentity` API, see the following resources: * [Using Web Identity Federation API Operations for Mobile Apps](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_providers_oidc_manual.html) and [Federation Through a Web-based Identity Provider](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html#api_assumerolewithwebidentity). * [ Web Identity Federation Playground](https://aws.amazon.com/blogs/aws/the-aws-web-identity-federation-playground/). Walk through the process of authenticating through Login with Amazon, Facebook, or Google, getting temporary security credentials, and then using those credentials to make a request to Amazon Web Services. * [Amazon Web Services SDK for iOS Developer Guide](http://aws.amazon.com/sdkforios/) and [Amazon Web Services SDK for Android Developer Guide](http://aws.amazon.com/sdkforandroid/). These toolkits contain sample apps that show how to invoke the identity providers. The toolkits then show how to use the information from these providers to get and use temporary security credentials. * [Web Identity Federation with Mobile Applications](http://aws.amazon.com/articles/web-identity-federation-with-mobile-applications). This article discusses web identity federation and shows an example of how to use web identity federation to get access to content in Amazon S3. """ def assume_role_with_web_identity(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "AssumeRoleWithWebIdentity", input, options) end @doc """ Decodes additional information about the authorization status of a request from an encoded message returned in response to an Amazon Web Services request. For example, if a user is not authorized to perform an operation that he or she has requested, the request returns a `Client.UnauthorizedOperation` response (an HTTP 403 response). Some Amazon Web Services operations additionally return an encoded message that can provide details about this authorization failure. Only certain Amazon Web Services operations return an encoded authorization message. The documentation for an individual operation indicates whether that operation returns an encoded message in addition to returning an HTTP code. The message is encoded because the details of the authorization status can contain privileged information that the user who requested the operation should not see. To decode an authorization status message, a user must be granted permissions through an IAM [policy](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies.html) to request the `DecodeAuthorizationMessage` (`sts:DecodeAuthorizationMessage`) action. The decoded message includes the following type of information: * Whether the request was denied due to an explicit deny or due to the absence of an explicit allow. For more information, see [Determining Whether a Request is Allowed or Denied](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_evaluation-logic.html#policy-eval-denyallow) in the *IAM User Guide*. * The principal who made the request. * The requested action. * The requested resource. * The values of condition keys in the context of the user's request. """ def decode_authorization_message(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DecodeAuthorizationMessage", input, options) end @doc """ Returns the account identifier for the specified access key ID. Access keys consist of two parts: an access key ID (for example, `AKIAIOSFODNN7EXAMPLE`) and a secret access key (for example, `wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY`). For more information about access keys, see [Managing Access Keys for IAM Users](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_access-keys.html) in the *IAM User Guide*. When you pass an access key ID to this operation, it returns the ID of the Amazon Web Services account to which the keys belong. Access key IDs beginning with `AKIA` are long-term credentials for an IAM user or the Amazon Web Services account root user. Access key IDs beginning with `ASIA` are temporary credentials that are created using STS operations. If the account in the response belongs to you, you can sign in as the root user and review your root user access keys. Then, you can pull a [credentials report](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_getting-report.html) to learn which IAM user owns the keys. To learn who requested the temporary credentials for an `ASIA` access key, view the STS events in your [CloudTrail logs](https://docs.aws.amazon.com/IAM/latest/UserGuide/cloudtrail-integration.html) in the *IAM User Guide*. This operation does not indicate the state of the access key. The key might be active, inactive, or deleted. Active keys might not have permissions to perform an operation. Providing a deleted access key might return an error that the key doesn't exist. """ def get_access_key_info(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetAccessKeyInfo", input, options) end @doc """ Returns details about the IAM user or role whose credentials are used to call the operation. No permissions are required to perform this operation. If an administrator adds a policy to your IAM user or role that explicitly denies access to the `sts:GetCallerIdentity` action, you can still perform this operation. Permissions are not required because the same information is returned when an IAM user or role is denied access. To view an example response, see [I Am Not Authorized to Perform: iam:DeleteVirtualMFADevice](https://docs.aws.amazon.com/IAM/latest/UserGuide/troubleshoot_general.html#troubleshoot_general_access-denied-delete-mfa) in the *IAM User Guide*. """ def get_caller_identity(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetCallerIdentity", input, options) end @doc """ Returns a set of temporary security credentials (consisting of an access key ID, a secret access key, and a security token) for a federated user. A typical use is in a proxy application that gets temporary security credentials on behalf of distributed applications inside a corporate network. You must call the `GetFederationToken` operation using the long-term security credentials of an IAM user. As a result, this call is appropriate in contexts where those credentials can be safely stored, usually in a server-based application. For a comparison of `GetFederationToken` with the other API operations that produce temporary credentials, see [Requesting Temporary Security Credentials](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html) and [Comparing the Amazon Web Services STS API operations](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html#stsapi_comparison) in the *IAM User Guide*. You can create a mobile-based or browser-based app that can authenticate users using a web identity provider like Login with Amazon, Facebook, Google, or an OpenID Connect-compatible identity provider. In this case, we recommend that you use [Amazon Cognito](http://aws.amazon.com/cognito/) or `AssumeRoleWithWebIdentity`. For more information, see [Federation Through a Web-based Identity Provider](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html#api_assumerolewithwebidentity) in the *IAM User Guide*. You can also call `GetFederationToken` using the security credentials of an Amazon Web Services account root user, but we do not recommend it. Instead, we recommend that you create an IAM user for the purpose of the proxy application. Then attach a policy to the IAM user that limits federated users to only the actions and resources that they need to access. For more information, see [IAM Best Practices](https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html) in the *IAM User Guide*. ## Session duration The temporary credentials are valid for the specified duration, from 900 seconds (15 minutes) up to a maximum of 129,600 seconds (36 hours). The default session duration is 43,200 seconds (12 hours). Temporary credentials obtained by using the Amazon Web Services account root user credentials have a maximum duration of 3,600 seconds (1 hour). ## Permissions You can use the temporary credentials created by `GetFederationToken` in any Amazon Web Services service except the following: * You cannot call any IAM operations using the CLI or the Amazon Web Services API. * You cannot call any STS operations except `GetCallerIdentity`. You must pass an inline or managed [session policy](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies.html#policies_session) to this operation. You can pass a single JSON policy document to use as an inline session policy. You can also specify up to 10 managed policies to use as managed session policies. The plaintext that you use for both inline and managed session policies can't exceed 2,048 characters. Though the session policy parameters are optional, if you do not pass a policy, then the resulting federated user session has no permissions. When you pass session policies, the session permissions are the intersection of the IAM user policies and the session policies that you pass. This gives you a way to further restrict the permissions for a federated user. You cannot use session policies to grant more permissions than those that are defined in the permissions policy of the IAM user. For more information, see [Session Policies](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies.html#policies_session) in the *IAM User Guide*. For information about using `GetFederationToken` to create temporary security credentials, see [GetFederationToken—Federation Through a Custom Identity Broker](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html#api_getfederationtoken). You can use the credentials to access a resource that has a resource-based policy. If that policy specifically references the federated user session in the `Principal` element of the policy, the session has the permissions allowed by the policy. These permissions are granted in addition to the permissions granted by the session policies. ## Tags (Optional) You can pass tag key-value pairs to your session. These are called session tags. For more information about session tags, see [Passing Session Tags in STS](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_session-tags.html) in the *IAM User Guide*. You can create a mobile-based or browser-based app that can authenticate users using a web identity provider like Login with Amazon, Facebook, Google, or an OpenID Connect-compatible identity provider. In this case, we recommend that you use [Amazon Cognito](http://aws.amazon.com/cognito/) or `AssumeRoleWithWebIdentity`. For more information, see [Federation Through a Web-based Identity Provider](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html#api_assumerolewithwebidentity) in the *IAM User Guide*. An administrator must grant you the permissions necessary to pass session tags. The administrator can also create granular permissions to allow you to pass only specific session tags. For more information, see [Tutorial: Using Tags for Attribute-Based Access Control](https://docs.aws.amazon.com/IAM/latest/UserGuide/tutorial_attribute-based-access-control.html) in the *IAM User Guide*. Tag key–value pairs are not case sensitive, but case is preserved. This means that you cannot have separate `Department` and `department` tag keys. Assume that the user that you are federating has the `Department`=`Marketing` tag and you pass the `department`=`engineering` session tag. `Department` and `department` are not saved as separate tags, and the session tag passed in the request takes precedence over the user tag. """ def get_federation_token(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetFederationToken", input, options) end @doc """ Returns a set of temporary credentials for an Amazon Web Services account or IAM user. The credentials consist of an access key ID, a secret access key, and a security token. Typically, you use `GetSessionToken` if you want to use MFA to protect programmatic calls to specific Amazon Web Services API operations like Amazon EC2 `StopInstances`. MFA-enabled IAM users would need to call `GetSessionToken` and submit an MFA code that is associated with their MFA device. Using the temporary security credentials that are returned from the call, IAM users can then make programmatic calls to API operations that require MFA authentication. If you do not supply a correct MFA code, then the API returns an access denied error. For a comparison of `GetSessionToken` with the other API operations that produce temporary credentials, see [Requesting Temporary Security Credentials](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html) and [Comparing the Amazon Web Services STS API operations](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html#stsapi_comparison) in the *IAM User Guide*. ## Session Duration The `GetSessionToken` operation must be called by using the long-term Amazon Web Services security credentials of the Amazon Web Services account root user or an IAM user. Credentials that are created by IAM users are valid for the duration that you specify. This duration can range from 900 seconds (15 minutes) up to a maximum of 129,600 seconds (36 hours), with a default of 43,200 seconds (12 hours). Credentials based on account credentials can range from 900 seconds (15 minutes) up to 3,600 seconds (1 hour), with a default of 1 hour. ## Permissions The temporary security credentials created by `GetSessionToken` can be used to make API calls to any Amazon Web Services service with the following exceptions: * You cannot call any IAM API operations unless MFA authentication information is included in the request. * You cannot call any STS API *except* `AssumeRole` or `GetCallerIdentity`. We recommend that you do not call `GetSessionToken` with Amazon Web Services account root user credentials. Instead, follow our [best practices](https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html#create-iam-users) by creating one or more IAM users, giving them the necessary permissions, and using IAM users for everyday interaction with Amazon Web Services. The credentials that are returned by `GetSessionToken` are based on permissions associated with the user whose credentials were used to call the operation. If `GetSessionToken` is called using Amazon Web Services account root user credentials, the temporary credentials have root user permissions. Similarly, if `GetSessionToken` is called using the credentials of an IAM user, the temporary credentials have the same permissions as the IAM user. For more information about using `GetSessionToken` to create temporary credentials, go to [Temporary Credentials for Users in Untrusted Environments](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html#api_getsessiontoken) in the *IAM User Guide*. """ def get_session_token(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetSessionToken", input, options) end end
58.983232
221
0.780968
9ecbdaecee2f7474e4c04f606cec28ced76fddec
940
ex
Elixir
lib/my_app/application.ex
bigbassroller/ueberauth_example
5e889abaf060b6a37add2eb8a3cf1938f394c4af
[ "MIT" ]
null
null
null
lib/my_app/application.ex
bigbassroller/ueberauth_example
5e889abaf060b6a37add2eb8a3cf1938f394c4af
[ "MIT" ]
null
null
null
lib/my_app/application.ex
bigbassroller/ueberauth_example
5e889abaf060b6a37add2eb8a3cf1938f394c4af
[ "MIT" ]
null
null
null
defmodule MyApp.Application do # See https://hexdocs.pm/elixir/Application.html # for more information on OTP Applications @moduledoc false use Application def start(_type, _args) do children = [ # Start the Telemetry supervisor MyAppWeb.Telemetry, # Start the PubSub system {Phoenix.PubSub, name: MyApp.PubSub}, # Start the Endpoint (http/https) MyAppWeb.Endpoint # Start a worker by calling: MyApp.Worker.start_link(arg) # {MyApp.Worker, arg} ] # See https://hexdocs.pm/elixir/Supervisor.html # for other strategies and supported options opts = [strategy: :one_for_one, name: MyApp.Supervisor] Supervisor.start_link(children, opts) end # Tell Phoenix to update the endpoint configuration # whenever the application is updated. def config_change(changed, _new, removed) do MyAppWeb.Endpoint.config_change(changed, removed) :ok end end
28.484848
63
0.702128
9ecbed7456fb6a0756b2b9bd6d109a04ecd55994
1,343
ex
Elixir
lib/exavier/mutators/negate_conditionals.ex
Cantido/exavier
4fb6796597f487e901cda1ac0b726d501a28e591
[ "MIT" ]
93
2019-07-22T09:21:31.000Z
2022-02-02T05:49:29.000Z
lib/exavier/mutators/negate_conditionals.ex
Cantido/exavier
4fb6796597f487e901cda1ac0b726d501a28e591
[ "MIT" ]
21
2019-07-26T12:54:37.000Z
2022-03-10T10:56:47.000Z
lib/exavier/mutators/negate_conditionals.ex
Cantido/exavier
4fb6796597f487e901cda1ac0b726d501a28e591
[ "MIT" ]
7
2019-11-05T08:32:51.000Z
2020-11-25T19:37:35.000Z
defmodule Exavier.Mutators.NegateConditionals do @moduledoc """ Mutates conditional operators into their opposite. Conditionals are replaced according to the table below. | Original | Mutation | |----------|----------| | == | != | | != | == | | <= | > | | >= | < | | < | >= | | > | <= | For example: if a == b { // do something } will be mutated into if a != b { // do something } """ @behaviour Exavier.Mutators.Mutator @mutations %{ :== => :!=, :!= => :==, :<= => :>, :>= => :<, :< => :>=, :> => :<= } @impl Exavier.Mutators.Mutator def operators, do: Map.keys(@mutations) @impl Exavier.Mutators.Mutator def mutate({operator, meta, args}, lines_to_mutate) do mutated_operator = mutate_operator(operator, args) do_mutate({mutated_operator, meta, args}, lines_to_mutate) end defp mutate_operator(:-, args) when length(args) == 1, do: :- defp mutate_operator(operator, _args), do: @mutations[operator] defp do_mutate({nil, _, _}, _), do: :skip defp do_mutate({mutated_op, meta, args}, lines_to_mutate) do {_, mutated_args} = Exavier.mutate_all(args, __MODULE__, lines_to_mutate) {mutated_op, meta, mutated_args} end end
22.383333
77
0.543559
9ecbf3fb9fbe022f179168eab4967998b2210263
3,019
exs
Elixir
test/bimultimap_properties_test.exs
mat-hek/elixir-bimap
c3dcdcf64d6022052cce28a0f5c39e2c171714e5
[ "MIT" ]
15
2017-08-10T20:26:36.000Z
2022-03-12T16:10:22.000Z
test/bimultimap_properties_test.exs
mat-hek/elixir-bimap
c3dcdcf64d6022052cce28a0f5c39e2c171714e5
[ "MIT" ]
10
2018-09-03T07:17:06.000Z
2021-06-14T17:25:58.000Z
test/bimultimap_properties_test.exs
mat-hek/elixir-bimap
c3dcdcf64d6022052cce28a0f5c39e2c171714e5
[ "MIT" ]
4
2019-05-22T02:51:33.000Z
2020-03-24T18:35:18.000Z
defmodule BiMultiMapPropertiesTest do use ExUnit.Case, async: true use ExUnitProperties require BiMultiMap property "finds present items in bimultimap" do check all key_set <- nonempty(list_of(term())), value_set <- list_of(term(), length: Enum.count(key_set)) do kv_list = Enum.zip(key_set, value_set) |> MapSet.new() bimultimap = BiMultiMap.new(kv_list) {random_key, random_value} = Enum.random(bimultimap) kv_list_values = kv_list |> Enum.filter(fn {k, _v} -> k === random_key end) |> Enum.map(fn {_k, v} -> v end) kv_list_keys = kv_list |> Enum.filter(fn {_k, v} -> v === random_value end) |> Enum.map(fn {k, _v} -> k end) {:ok, bimultimap_values} = BiMultiMap.fetch(bimultimap, random_key) {:ok, bimultimap_keys} = BiMultiMap.fetch_keys(bimultimap, random_value) assert bimultimap_values |> Enum.sort() == kv_list_values |> Enum.sort() assert bimultimap_keys |> Enum.sort() == kv_list_keys |> Enum.sort() end end property "deletes items from bimultimap" do check all key_set <- nonempty(list_of(term())), value_set <- list_of(term(), length: Enum.count(key_set)) do kv_list = Enum.zip(key_set, value_set) |> MapSet.new() bimultimap = BiMultiMap.new(kv_list) {random_key, random_value} = Enum.random(bimultimap) bimultimap_comparison_delete_keys = kv_list |> Enum.reject(fn {k, _v} -> k === random_key end) |> BiMultiMap.new() bimultimap_comparison_delete_values = kv_list |> Enum.reject(fn {_k, v} -> v === random_value end) |> BiMultiMap.new() deleted_key_bimultimap = BiMultiMap.delete_key(bimultimap, random_key) deleted_value_bimultimap = BiMultiMap.delete_value(bimultimap, random_value) assert BiMultiMap.equal?(deleted_value_bimultimap, bimultimap_comparison_delete_values) assert BiMultiMap.equal?(deleted_key_bimultimap, bimultimap_comparison_delete_keys) end end property "it turns bimultimaps into lists" do check all key_set <- nonempty(uniq_list_of(term())), value_set <- list_of(term(), length: Enum.count(key_set)) do kv_list = Enum.zip(key_set, value_set) |> MapSet.new() bimultimap = BiMultiMap.new(kv_list) assert BiMultiMap.to_list(bimultimap) |> MapSet.new() == kv_list end end property "it puts items into bimultimaps" do check all key_set <- nonempty(uniq_list_of(term())), value_set <- list_of(term(), length: Enum.count(key_set)), random_key <- term(), random_value <- term() do kv_list = Enum.zip(key_set, value_set) |> MapSet.new() bimultimap = BiMultiMap.new(kv_list) put_kv_list = MapSet.put(kv_list, {random_key, random_value}) put_bimultimap = BiMultiMap.put(bimultimap, random_key, random_value) assert BiMultiMap.equal?(put_bimultimap, BiMultiMap.new(put_kv_list)) end end end
37.271605
93
0.661477
9ecbf469727f466fc9e4a560147db9d83464c372
1,533
ex
Elixir
lib/exkml/stage.ex
EdsonGermano/exkml
21feff5fa0cbc2322f6a60e90352705fe9e47dbe
[ "Apache-2.0" ]
null
null
null
lib/exkml/stage.ex
EdsonGermano/exkml
21feff5fa0cbc2322f6a60e90352705fe9e47dbe
[ "Apache-2.0" ]
null
null
null
lib/exkml/stage.ex
EdsonGermano/exkml
21feff5fa0cbc2322f6a60e90352705fe9e47dbe
[ "Apache-2.0" ]
null
null
null
defmodule Exkml.Stage do alias Experimental.GenStage use GenStage def start_link(binstream, chunk_size) do GenStage.start_link(__MODULE__, [binstream, chunk_size]) end def init([binstream, chunk_size]) do ref = make_ref() Exkml.setup(binstream, chunk_size, ref) buf = [] demand = 0 {:producer, {:started, ref, buf, demand, nil}} end def handle_demand(more_demand, {status, ref, buf, demand, from}) do new_demand = demand + more_demand {emit, keep} = Enum.split(buf, new_demand) less_demand = new_demand - length(emit) new_state = {status, ref, keep, less_demand, from} ack(ref, from) maybe_end(new_state) {:noreply, emit, new_state} end def handle_info({:placemarks, ref, from, pms}, {status, ref, buf, 0, _}) do {:noreply, [], {status, ref, buf ++ pms, 0, from}} end def handle_info({:placemarks, ref, from, pms}, {status, ref, buf, demand, _}) do {emit, keep} = Enum.split(buf ++ pms, demand) new_demand = demand - length(emit) ack(ref, from) {:noreply, emit, {status, ref, keep, new_demand, from}} end def handle_info({:done, ref}, {_, ref, buf, demand, from}) do new_state = {:done, ref, buf, demand, from} maybe_end(new_state) {:noreply, [], new_state} end def handle_info(:stop, state) do {:stop, :normal, state} end defp ack(_, nil), do: :ok defp ack(ref, from), do: send from, {:ack, ref} defp maybe_end({:done, _, [], _, _}), do: send self(), :stop defp maybe_end(_), do: :nope end
25.983051
82
0.635356
9ecbfaa3083ea60ad8e4b52e24f8c8a03a1d2e2a
5,550
exs
Elixir
test/guardian/token/jwe_test.exs
jlauemoeller/guardian_jwe
8d2708daf570eb2bd8d7e8f3c3166fccfef606aa
[ "MIT" ]
2
2018-03-29T15:25:26.000Z
2018-05-26T05:47:25.000Z
test/guardian/token/jwe_test.exs
jlauemoeller/guardian_jwe
8d2708daf570eb2bd8d7e8f3c3166fccfef606aa
[ "MIT" ]
null
null
null
test/guardian/token/jwe_test.exs
jlauemoeller/guardian_jwe
8d2708daf570eb2bd8d7e8f3c3166fccfef606aa
[ "MIT" ]
null
null
null
defmodule Guardian.Token.JweTest do use ExUnit.Case defmodule Impl do use Guardian, otp_app: :guardian_test, token_module: Guardian.Token.Jwe def subject_for_token(resource, _claims) do sub = to_string(resource.id) {:ok, sub} end def resource_from_claims(claims) do {:ok, claims} end end alias Guardian.Token.Jwe describe "A128GCMKW" do test "decrypts token successfully" do secret = JOSE.JWK.from_oct(<<0::128>>) {:ok, token} = create_token(secret, "A128GCMKW") {:ok, claims} = decode_token(token, secret, "A128GCMKW") assert claims["id"] == 1 end test "returns invalid if secret is wrong" do secret = JOSE.JWK.from_oct(<<0::128>>) {:ok, token} = create_token(secret, "A128GCMKW") bad_secret = JOSE.JWK.from_oct("aaaaaaaaaaaaaaaa") {:error, :invalid_token} = decode_token(token, bad_secret, "A128GCMKW") end end describe "A192GCMKW" do test "successfully decrypts a token" do secret = JOSE.JWK.from_oct(<<0::192>>) {:ok, token} = create_token(secret, "A192GCMKW") {:ok, claims} = decode_token(token, secret, "A192GCMKW") assert claims["id"] == 1 end test "returns error when token is invalid" do secret = JOSE.JWK.from_oct(<<0::192>>) {:ok, token} = create_token(secret, "A192GCMKW") bad_secret = JOSE.JWK.from_oct("aaaaaaaaaaaaaaaaaaaaaaaa") {:error, :invalid_token} = decode_token(token, bad_secret, "A192GCMKW") end end describe "A256GCMKW" do test "successfully decrypts a token" do secret = JOSE.JWK.from_oct(<<0::256>>) {:ok, token} = create_token(secret, "A256GCMKW") {:ok, claims} = decode_token(token, secret, "A256GCMKW") assert claims["id"] == 1 end test "returns error when token is invalid" do secret = JOSE.JWK.from_oct(<<0::256>>) {:ok, token} = create_token(secret, "A256GCMKW") bad_secret = JOSE.JWK.from_oct("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa") {:error, :invalid_token} = decode_token(token, bad_secret, "A256GCMKW") end end @pbes_secret "gBMuMSI8o+gOoGP04iy/cXiGGwUyyNJNJLe7OA+uEkKZz6b8kDJ9y4to4Nf9umlG" describe "PBES2-HS256+A128KW" do test "successfully decrypts a token" do secret = JOSE.JWK.from_oct(@pbes_secret) {:ok, token} = create_token(secret, "PBES2-HS256+A128KW") {:ok, claims} = decode_token(token, secret, "PBES2-HS256+A128KW") assert claims["id"] == 1 end test "returns error when decryption fails" do secret = JOSE.JWK.from_oct(@pbes_secret) {:ok, token} = create_token(secret, "PBES2-HS256+A128KW") bad_secret = JOSE.JWK.from_oct("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa") {:error, :invalid_token} = decode_token(token, bad_secret, "PBES2-HS256+A128KW") end end describe "PBES2-HS384+A192KW" do test "success" do secret = JOSE.JWK.from_oct(@pbes_secret) {:ok, token} = create_token(secret, "PBES2-HS256+A128KW") {:ok, claims} = decode_token(token, secret, "PBES2-HS256+A128KW") assert claims["id"] == 1 end test "decryption failure" do secret = JOSE.JWK.from_oct(@pbes_secret) {:ok, token} = create_token(secret, "PBES2-HS256+A128KW") bad_secret = JOSE.JWK.from_oct("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa") {:error, :invalid_token} = decode_token(token, bad_secret, "PBES2-HS256+A128KW") end end describe "PBES2-HS512+A256KW" do test "success" do secret = JOSE.JWK.from_oct(@pbes_secret) {:ok, token} = create_token(secret, "PBES2-HS512+A256KW") {:ok, claims} = decode_token(token, secret, "PBES2-HS512+A256KW") assert claims["id"] == 1 end test "decryption failure" do secret = JOSE.JWK.from_oct(@pbes_secret) {:ok, token} = create_token(secret, "PBES2-HS512+A256KW") bad_secret = JOSE.JWK.from_oct("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa") {:error, :invalid_token} = decode_token(token, bad_secret, "PBES2-HS512+A256KW") end end describe "exchange" do test "it refreshes the JWE exp" do secret = JOSE.JWK.from_oct(@pbes_secret) old_claims = %{ "jti" => UUID.uuid4(), "aud" => "MyApp", "typ" => "access", "exp" => Guardian.timestamp + 10_000, "iat" => Guardian.timestamp, "iss" => "MyApp", "sub" => "User:1", "something_else" => "foo" } {:ok, token} = Jwe.create_token( __MODULE__.Impl, old_claims, secret: secret, allowed_algos: ["PBES2-HS512+A256KW"]) {:ok, {^token = old_t, ^old_claims = old_c}, {new_t, new_c}} = Jwe.exchange( __MODULE__.Impl, token, "access", "refresh", [secret: secret, allowed_algos: ["PBES2-HS512+A256KW"]]) refute old_t == new_t assert new_c["sub"] == old_c["sub"] assert new_c["aud"] == old_c["aud"] refute new_c["jti"] == old_c["jti"] refute new_c["nbf"] == old_c["nbf"] refute new_c["exp"] == old_c["exp"] end end defp create_token(secret, algo) do Jwe.create_token( __MODULE__.Impl, %{id: 1}, secret: secret, allowed_algos: [algo]) end defp decode_token(token, secret, algo) do Jwe.decode_token( __MODULE__.Impl, token, secret: secret, allowed_algos: [algo]) end end
25.227273
81
0.610631
9ecc03801274f358c03d48d90c32692777a4bee2
2,694
ex
Elixir
lib/rop.ex
kalys/rop
2189768517158cab3aaf6f6086c834862f634160
[ "MIT" ]
null
null
null
lib/rop.ex
kalys/rop
2189768517158cab3aaf6f6086c834862f634160
[ "MIT" ]
null
null
null
lib/rop.ex
kalys/rop
2189768517158cab3aaf6f6086c834862f634160
[ "MIT" ]
null
null
null
# https://gist.github.com/zabirauf/17ced02bdf9829b6956e # https://github.com/remiq/railway-oriented-programming-elixir defmodule Rop do defmacro __using__(_) do quote do import Rop end end @doc ~s""" Extracts the value from a tagged tuple like {:ok, value} Raises the value from a tagged tuple like {:error, value} Raise the arguments else For example: iex> ok({:ok, 1}) 1 iex> ok({:error, "some"}) ** (RuntimeError) some iex> ok({:anything, "some"}) ** (ArgumentError) raise/1 expects an alias, string or exception as the first argument, got: {:anything, "some"} """ def ok({:ok, x}), do: x def ok({:error, x}), do: raise x def ok(x), do: raise x @doc ~s""" No need to stop pipelining in case of an error somewhere in the middle Example: iex> inc = fn(x)-> {:ok, x+1} end iex> 1 |> (inc).() >>> (inc).() {:ok, 3} """ defmacro left >>> right do quote do (fn -> case unquote(left) do {:ok, x} -> x |> unquote(right) {:error, _} = expr -> expr end end).() end end @doc ~s""" Wraps a simple function to return a tagged tuple with `:ok` to comply to the protocol `{:ok, result}` Example: iex> 1 |> Integer.to_string "1" iex> 1 |> bind(Integer.to_string) {:ok, "1"} iex> inc = fn(x)-> x+1 end iex> 1 |> bind((inc).()) >>> (inc).() 3 iex> 1 |> bind((inc).()) >>> bind((inc).()) {:ok, 3} """ defmacro bind(args, func) do quote do (fn -> result = unquote(args) |> unquote(func) {:ok, result} end).() end end @doc ~s""" Wraps raising functions to return a tagged tuple `{:error, ErrorMessage}` to comply with the protocol Example: iex> r = fn(_)-> raise "some" end iex> inc = fn(x)-> x + 1 end iex> 1 |> bind((inc).()) >>> try_catch((r).()) >>> bind((inc).()) {:error, %RuntimeError{message: "some"}} """ defmacro try_catch(args, func) do quote do (fn -> try do unquote(args) |> unquote(func) rescue e -> {:error, e} end end).() end end @doc ~s""" Like a similar Unix utility it does some work and returns the input. See [tee (command), Unix](https://en.wikipedia.org/wiki/Tee_(command)). Example: iex> inc = fn(x)-> IO.inspect(x); {:ok, x + 1} end iex> 1 |> tee((inc).()) >>> tee((inc).()) >>> tee((inc).()) {:ok, 1} """ defmacro tee(args, func) do quote do (fn -> unquote(args) |> unquote(func) {:ok, unquote(args)} end).() end end end
23.025641
118
0.525612
9ecc092abe7b55dceee12f2e45e9ad6819e58c5c
2,662
ex
Elixir
clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v1/model/search_organizations_request.ex
linjunpop/elixir-google-api
444cb2b2fb02726894535461a474beddd8b86db4
[ "Apache-2.0" ]
null
null
null
clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v1/model/search_organizations_request.ex
linjunpop/elixir-google-api
444cb2b2fb02726894535461a474beddd8b86db4
[ "Apache-2.0" ]
null
null
null
clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v1/model/search_organizations_request.ex
linjunpop/elixir-google-api
444cb2b2fb02726894535461a474beddd8b86db4
[ "Apache-2.0" ]
null
null
null
# Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the &quot;License&quot;); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an &quot;AS IS&quot; BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This class is auto generated by the swagger code generator program. # https://github.com/swagger-api/swagger-codegen.git # Do not edit the class manually. defmodule GoogleApi.CloudResourceManager.V1.Model.SearchOrganizationsRequest do @moduledoc """ The request sent to the &#x60;SearchOrganizations&#x60; method. ## Attributes - filter (String.t): An optional query string used to filter the Organizations to return in the response. Filter rules are case-insensitive. Organizations may be filtered by &#x60;owner.directoryCustomerId&#x60; or by &#x60;domain&#x60;, where the domain is a G Suite domain, for example: | Filter | Description | |-------------------------------------|----------------------------------| | owner.directorycustomerid:123456789 | Organizations with &#x60;owner.directory_customer_id&#x60; equal to &#x60;123456789&#x60;.| | domain:google.com | Organizations corresponding to the domain &#x60;google.com&#x60;.| This field is optional. Defaults to: `null`. - pageSize (integer()): The maximum number of Organizations to return in the response. This field is optional. Defaults to: `null`. - pageToken (String.t): A pagination token returned from a previous call to &#x60;SearchOrganizations&#x60; that indicates from where listing should continue. This field is optional. Defaults to: `null`. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :filter => any(), :pageSize => any(), :pageToken => any() } field(:filter) field(:pageSize) field(:pageToken) end defimpl Poison.Decoder, for: GoogleApi.CloudResourceManager.V1.Model.SearchOrganizationsRequest do def decode(value, options) do GoogleApi.CloudResourceManager.V1.Model.SearchOrganizationsRequest.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.CloudResourceManager.V1.Model.SearchOrganizationsRequest do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
49.296296
728
0.710368
9ecc28f14b01b7461c8934c44cc0ee1c9ce6f40e
1,076
ex
Elixir
web_finngen_r8/test/support/conn_case.ex
vincent-octo/risteys
5bb1e70b78988770048b91b42fad025faf98d84a
[ "MIT" ]
null
null
null
web_finngen_r8/test/support/conn_case.ex
vincent-octo/risteys
5bb1e70b78988770048b91b42fad025faf98d84a
[ "MIT" ]
null
null
null
web_finngen_r8/test/support/conn_case.ex
vincent-octo/risteys
5bb1e70b78988770048b91b42fad025faf98d84a
[ "MIT" ]
null
null
null
defmodule RisteysWeb.ConnCase do @moduledoc """ This module defines the test case to be used by tests that require setting up a connection. Such tests rely on `Phoenix.ConnTest` and also import other functionality to make it easier to build common data structures and query the data layer. Finally, if the test case interacts with the database, it cannot be async. For this reason, every test runs inside a transaction which is reset at the beginning of the test unless the test case is marked as async. """ use ExUnit.CaseTemplate using do quote do # Import conveniences for testing with connections import Plug.Conn import Phoenix.ConnTest alias RisteysWeb.Router.Helpers, as: Routes # The default endpoint for testing @endpoint RisteysWeb.Endpoint end end setup tags do :ok = Ecto.Adapters.SQL.Sandbox.checkout(Risteys.Repo) unless tags[:async] do Ecto.Adapters.SQL.Sandbox.mode(Risteys.Repo, {:shared, self()}) end {:ok, conn: Phoenix.ConnTest.build_conn()} end end
26.9
69
0.716543
9ecc29c480d7a168701712598d23e0a519c934bd
1,677
exs
Elixir
mix.exs
KosyanMedia/ueberauth_vk
0a73979802a83b60d59f632f0d8ac0d398fae23d
[ "MIT" ]
null
null
null
mix.exs
KosyanMedia/ueberauth_vk
0a73979802a83b60d59f632f0d8ac0d398fae23d
[ "MIT" ]
null
null
null
mix.exs
KosyanMedia/ueberauth_vk
0a73979802a83b60d59f632f0d8ac0d398fae23d
[ "MIT" ]
null
null
null
defmodule UeberauthVK.Mixfile do use Mix.Project @version "0.2.6" @url "https://github.com/sobolevn/ueberauth_vk" def project do [ app: :ueberauth_vk, version: @version, name: "Ueberauth VK Strategy", package: package(), elixir: "~> 1.2", build_embedded: Mix.env == :prod, start_permanent: Mix.env == :prod, source_url: @url, homepage_url: @url, description: description(), deps: deps(), # Docs name: "ueberauth_vk", docs: docs(), # Test coverage: test_coverage: [tool: ExCoveralls], preferred_cli_env: [ "coveralls": :test, "coveralls.detail": :test, "coveralls.post": :test, "coveralls.html": :test, ], ] end def application do [applications: [:logger, :oauth2, :ueberauth]] end defp deps do [ # Auth: {:ueberauth, "~> 0.2"}, {:oauth2, "~> 0.8.0"}, # Tests: {:exvcr, "~> 0.8.4", only: :test}, {:excoveralls, "~> 0.6", only: :test}, {:poison, "~> 3.0", only: :test}, # is needed for tests # Docs: {:ex_doc, "~> 0.14", only: :dev}, # Lint: {:credo, "~> 0.6", only: [:dev, :test]}, ] end defp docs do # Docs [source_ref: "v#{@version}", main: "README", canonical: "http://hexdocs.pm/ueberauth_vk", source_url: @url, extras: ["README.md"]] end defp description do "An Uberauth strategy for VK authentication." end defp package do [files: ["lib", "mix.exs", "README.md", "LICENSE.md"], maintainers: ["Sobolev Nikita"], licenses: ["MIT"], links: %{"GitHub": @url}] end end
20.703704
60
0.538462
9ecc932981fe2038ec834c54d9d69176ca8814a1
7,004
ex
Elixir
lib/broadway/topology/batcher_stage.ex
isaacsanders/broadway
14fce62cec6ea1f89ca14689adf67c67b69739db
[ "Apache-2.0" ]
null
null
null
lib/broadway/topology/batcher_stage.ex
isaacsanders/broadway
14fce62cec6ea1f89ca14689adf67c67b69739db
[ "Apache-2.0" ]
null
null
null
lib/broadway/topology/batcher_stage.ex
isaacsanders/broadway
14fce62cec6ea1f89ca14689adf67c67b69739db
[ "Apache-2.0" ]
null
null
null
defmodule Broadway.Topology.BatcherStage do @moduledoc false use GenStage alias Broadway.BatchInfo @all_batches __MODULE__.All @spec start_link(term, GenServer.options()) :: GenServer.on_start() def start_link(args, stage_options) do Broadway.Topology.Subscriber.start_link( __MODULE__, args[:processors], args, [max_demand: args[:batch_size]], stage_options ) end @impl true def init(args) do Process.put(@all_batches, %{}) {dispatcher, partition_by} = case args[:partition_by] do nil -> {GenStage.DemandDispatcher, nil} func -> concurrency = args[:concurrency] hash_fun = fn {_, %{partition: partition}} = payload -> {payload, partition} end dispatcher = {GenStage.PartitionDispatcher, partitions: 0..(concurrency - 1), hash: hash_fun} {dispatcher, fn msg -> rem(func.(msg), concurrency) end} end state = %{ topology_name: args[:topology_name], name: args[:name], batcher: args[:batcher], batch_size: args[:batch_size], batch_timeout: args[:batch_timeout], partition_by: partition_by, context: args[:context] } {:producer_consumer, state, dispatcher: dispatcher} end @impl true def handle_events(events, _from, state) do start_time = System.monotonic_time() emit_start_event(state, start_time, events) batches = handle_events_per_batch_key(events, [], state) emit_stop_event(state, start_time) {:noreply, batches, state} end defp emit_start_event(state, start_time, events) do metadata = %{ topology_name: state.topology_name, name: state.name, batcher_key: state.batcher, messages: events, context: state.context } measurements = %{time: start_time} :telemetry.execute([:broadway, :batcher, :start], measurements, metadata) end defp emit_stop_event(state, start_time) do stop_time = System.monotonic_time() measurements = %{time: stop_time, duration: stop_time - start_time} metadata = %{ topology_name: state.topology_name, name: state.name, batcher_key: state.batcher, context: state.context } :telemetry.execute([:broadway, :batcher, :stop], measurements, metadata) end @impl true def handle_info({:timeout, timer, batch_key}, state) do case get_timed_out_batch(batch_key, timer) do {current, pending_count, _} -> delete_batch(batch_key) {:noreply, [wrap_for_delivery(batch_key, current, pending_count, state)], state} :error -> {:noreply, [], state} end end def handle_info(:cancel_consumers, state) do events = for {batch_key, _} <- all_batches() do {current, pending_count, timer} = delete_batch(batch_key) cancel_batch_timeout(timer) wrap_for_delivery(batch_key, current, pending_count, state) end {:noreply, events, state} end def handle_info(_msg, state) do {:noreply, [], state} end ## Default batch handling defp handle_events_per_batch_key([], acc, _state) do Enum.reverse(acc) end defp handle_events_per_batch_key([event | _] = events, acc, state) do %{partition_by: partition_by} = state batch_key = batch_key(event, partition_by) {current, pending_count, timer} = init_or_get_batch(batch_key, state) {current, pending_count, events, flush?} = split_counting(batch_key, events, pending_count, false, current, partition_by) acc = deliver_or_update_batch(batch_key, current, pending_count, flush?, timer, acc, state) handle_events_per_batch_key(events, acc, state) end defp split_counting(batch_key, events, count, flush?, acc, partition_by) do with [event | events] when count > 0 <- events, ^batch_key <- batch_key(event, partition_by) do flush? = flush? or event.batch_mode == :flush split_counting(batch_key, events, count - 1, flush?, [event | acc], partition_by) else _ -> {acc, count, events, flush?} end end defp deliver_or_update_batch(batch_key, current, pending_count, true, timer, acc, state) do deliver_batch(batch_key, current, pending_count, timer, acc, state) end defp deliver_or_update_batch(batch_key, current, 0, _flush?, timer, acc, state) do deliver_batch(batch_key, current, 0, timer, acc, state) end defp deliver_or_update_batch(batch_key, current, pending_count, _flush?, timer, acc, _state) do put_batch(batch_key, {current, pending_count, timer}) acc end defp deliver_batch(batch_key, current, pending_count, timer, acc, state) do delete_batch(batch_key) cancel_batch_timeout(timer) [wrap_for_delivery(batch_key, current, pending_count, state) | acc] end ## General batch handling @compile {:inline, batch_key: 2} defp batch_key(%{batch_key: batch_key}, nil), do: batch_key defp batch_key(%{batch_key: batch_key} = event, partition_by), do: [batch_key | partition_by.(event)] defp init_or_get_batch(batch_key, state) do if batch = Process.get(batch_key) do batch else %{batch_size: batch_size, batch_timeout: batch_timeout} = state timer = schedule_batch_timeout(batch_key, batch_timeout) update_all_batches(&Map.put(&1, batch_key, true)) {[], batch_size, timer} end end defp get_timed_out_batch(batch_key, timer) do case Process.get(batch_key) do {_, _, ^timer} = batch -> batch _ -> :error end end defp put_batch(batch_key, {_, _, _} = batch) do Process.put(batch_key, batch) end defp delete_batch(batch_key) do update_all_batches(&Map.delete(&1, batch_key)) Process.delete(batch_key) end defp all_batches do Process.get(@all_batches) end defp update_all_batches(fun) do Process.put(@all_batches, fun.(Process.get(@all_batches))) end defp schedule_batch_timeout(batch_key, batch_timeout) do :erlang.start_timer(batch_timeout, self(), batch_key) end defp cancel_batch_timeout(timer) do case :erlang.cancel_timer(timer) do false -> receive do {:timeout, ^timer, _} -> :ok after 0 -> raise "unknown timer #{inspect(timer)}" end _ -> :ok end end defp wrap_for_delivery(batch_key, reversed_events, pending, %{partition_by: nil} = state) do wrap_for_delivery(batch_key, nil, reversed_events, pending, state) end defp wrap_for_delivery([batch_key | partition], reversed_events, pending, state) do wrap_for_delivery(batch_key, partition, reversed_events, pending, state) end defp wrap_for_delivery(batch_key, partition, reversed_events, pending, state) do %{batcher: batcher, batch_size: batch_size} = state batch_info = %BatchInfo{ batcher: batcher, batch_key: batch_key, partition: partition, size: batch_size - pending } {Enum.reverse(reversed_events), batch_info} end end
28.471545
97
0.677184
9ecc962958d59f896f1b4231c048ebd4934eb5bc
570
ex
Elixir
lib/bitpal_phx/process_registry.ex
bitpal/bitpal_demo
989df08f60dadc5d4e340fef91890cf8bb3106ad
[ "BSD-3-Clause-Clear" ]
null
null
null
lib/bitpal_phx/process_registry.ex
bitpal/bitpal_demo
989df08f60dadc5d4e340fef91890cf8bb3106ad
[ "BSD-3-Clause-Clear" ]
1
2021-06-04T13:53:01.000Z
2021-06-04T13:53:01.000Z
lib/bitpal_phx/process_registry.ex
bitpal/bitpal_demo
989df08f60dadc5d4e340fef91890cf8bb3106ad
[ "BSD-3-Clause-Clear" ]
null
null
null
defmodule BitPalPhx.ProcessRegistry do def start_link do Registry.start_link(keys: :unique, name: __MODULE__) end def via_tuple(key) do {:via, Registry, {__MODULE__, key}} end @spec get_process(any) :: {:ok, pid} | {:error, :not_found} def get_process(key) do case Registry.lookup(__MODULE__, key) do [{pid, _}] -> {:ok, pid} [] -> {:error, :not_found} end end def child_spec(_) do Supervisor.child_spec( Registry, id: __MODULE__, start: {__MODULE__, :start_link, []} ) end end
19.655172
61
0.603509
9ecca13a341133660e5ac1b63cf0dbde3e66efcd
15,899
ex
Elixir
lib/iex/lib/iex/pry.ex
kenichi/elixir
8c27da88c70623cbe516d5310c885943395a82a2
[ "Apache-2.0" ]
1
2019-08-13T23:22:33.000Z
2019-08-13T23:22:33.000Z
lib/iex/lib/iex/pry.ex
kenichi/elixir
8c27da88c70623cbe516d5310c885943395a82a2
[ "Apache-2.0" ]
1
2015-06-09T15:52:43.000Z
2015-06-09T15:52:43.000Z
lib/iex/lib/iex/pry.ex
kenichi/elixir
8c27da88c70623cbe516d5310c885943395a82a2
[ "Apache-2.0" ]
null
null
null
defmodule IEx.Pry do @moduledoc """ The low-level API for prying sessions and setting up breakpoints. """ @doc false use GenServer @table __MODULE__ @server __MODULE__ @timeout :infinity @initial_counter 1 @type id :: integer() @type break :: {id, module, {function, arity}, pending :: non_neg_integer} @type break_error :: :recompilation_failed | :no_beam_file | :unknown_function_arity | :missing_debug_info | :outdated_debug_info | :non_elixir_module @doc """ Callback for `IEx.pry/0`. You can invoke this function directly when you are not able to invoke `IEx.pry/0` as a macro. This function expects the binding (from `Kernel.binding/0`) and the environment (from `__ENV__/0`). """ def pry(binding, %Macro.Env{} = env) do self = self() %{file: file, line: line, module: module, function: function_arity} = env {:current_stacktrace, stacktrace} = Process.info(self, :current_stacktrace) opts = [ binding: binding, dot_iex_path: "", env: env, prefix: "pry", stacktrace: prune_stacktrace(stacktrace) ] location = case function_arity do {function, arity} -> "#{Exception.format_mfa(module, function, arity)} (#{Path.relative_to_cwd(file)}:#{line})" _ -> "#{Path.relative_to_cwd(file)}:#{line}" end whereami = case whereami(file, line, 2) do {:ok, lines} -> [?\n, ?\n, lines] :error -> [] end # If we are the current evaluator, it is because we just # reached a pry/breakpoint and the user hit continue(). # In both cases, we are safe to print and the request will # succeed. request = if Process.get(:iex_evaluator) do IO.puts(IEx.color(:eval_interrupt, "Break reached: #{location}#{whereami}")) "Prying #{inspect(self)} at #{location}" else "Request to pry #{inspect(self)} at #{location}#{whereami}" end # We cannot use colors because IEx may be off case IEx.Broker.take_over(request, [evaluator: self()] ++ opts) do {:ok, server, group_leader} -> IEx.Evaluator.init(:no_ack, server, group_leader, opts) {:error, :no_iex} -> extra = if match?({:win32, _}, :os.type()) do " If you are using Windows, you may need to start IEx with the --werl option." else "" end message = "Cannot pry #{inspect(self)} at #{location}. Is an IEx shell running?" <> extra IO.puts(:stdio, message) {:error, :no_iex} {:error, _} = error -> error end end def pry(binding, opts) when is_list(opts) do vars = for {k, _} when is_atom(k) <- binding, do: {k, nil} pry(binding, %{:elixir.env_for_eval(opts) | vars: vars}) end @elixir_internals [:elixir, :erl_eval, IEx.Evaluator, IEx.Pry] defp prune_stacktrace([{mod, _, _, _} | t]) when mod in @elixir_internals do prune_stacktrace(t) end defp prune_stacktrace([{Process, :info, 2, _} | t]) do prune_stacktrace(t) end defp prune_stacktrace([h | t]) do [h | prune_stacktrace(t)] end defp prune_stacktrace([]) do [] end @doc """ Formats the location for `whereami/3` prying. It receives the `file`, `line` and the snippet `radius` and returns `{:ok, lines}`, where lines is a list of chardata containing each formatted line, or `:error`. The actual line is especially formatted in bold. """ def whereami(file, line, radius) when is_binary(file) and is_integer(line) and is_integer(radius) and radius > 0 do with true <- File.regular?(file), [_ | _] = lines <- whereami_lines(file, line, radius) do {:ok, lines} else _ -> :error end end defp whereami_lines(file, line, radius) do min = max(line - radius - 1, 0) max = line + radius - 1 file |> File.stream!() |> Enum.slice(min..max) |> Enum.with_index(min + 1) |> Enum.map(&whereami_format_line(&1, line)) end defp whereami_format_line({line_text, line_number}, line) do gutter = String.pad_leading(Integer.to_string(line_number), 5, " ") if line_number == line do IO.ANSI.format_fragment([:bright, gutter, ": ", line_text, :normal]) else [gutter, ": ", line_text] end end @doc """ Sets up a breakpoint on the given module/function/arity. """ @spec break(module, function, arity, pos_integer) :: {:ok, id()} | {:error, break_error()} def break(module, function, arity, breaks \\ 1) when is_atom(module) and is_atom(function) and is_integer(arity) and arity >= 0 and is_integer(breaks) and breaks > 0 do break_call(module, function, arity, quote(do: _), breaks) end @doc """ Sets up a breakpoint on the given module/function/args with the given `guard`. It requires an `env` to be given to make the expansion of the guards. """ @spec break(module, function, [Macro.t()], Macro.t(), Macro.Env.t(), pos_integer) :: {:ok, id()} | {:error, break_error()} def break(module, function, args, guard, env, breaks \\ 1) when is_atom(module) and is_atom(function) and is_list(args) and is_integer(breaks) and breaks > 0 do condition = build_args_guard_condition(args, guard, env) break_call(module, function, length(args), condition, breaks) end defp break_call(module, function, arity, condition, breaks) do GenServer.call(@server, {:break, module, {function, arity}, condition, breaks}, @timeout) end @doc """ Raising variant of `break/4`. """ @spec break!(module, function, arity, pos_integer) :: id() def break!(module, function, arity, breaks \\ 1) do break_call!(module, function, arity, quote(do: _), breaks) end @doc """ Raising variant of `break/6`. """ @spec break!(module, function, [Macro.t()], Macro.t(), Macro.Env.t(), pos_integer) :: id() def break!(module, function, args, guard, env, breaks \\ 1) when is_atom(module) and is_atom(function) and is_list(args) and is_integer(breaks) and breaks > 0 do condition = build_args_guard_condition(args, guard, env) break_call!(module, function, length(args), condition, breaks) end defp break_call!(module, function, arity, condition, breaks) do case break_call(module, function, arity, condition, breaks) do {:ok, id} -> id {:error, kind} -> message = case kind do :missing_debug_info -> "module #{inspect(module)} was not compiled with debug_info" :no_beam_file -> "could not find .beam file for #{inspect(module)}" :non_elixir_module -> "module #{inspect(module)} was not written in Elixir" :outdated_debug_info -> "module #{inspect(module)} was not compiled with the latest debug_info" :recompilation_failed -> "the module could not be compiled with breakpoints (likely an internal error)" :unknown_function_arity -> "unknown function/macro #{Exception.format_mfa(module, function, arity)}" end raise "could not set breakpoint, " <> message end end defp build_args_guard_condition(args, guards, env) do pattern = {:when, [], [{:{}, [], args}, guards]} to_expand = quote do case Unknown.module() do unquote(pattern) -> :ok end end {{:case, _, [_, [do: [{:->, [], [[condition], _]}]]]}, _} = :elixir_expand.expand(to_expand, env) condition end @doc """ Resets the breaks on a given breakpoint ID. """ @spec reset_break(id) :: :ok | :not_found def reset_break(id) when is_integer(id) do GenServer.call(@server, {:reset_break, {id, :_, :_, :_, :_}}, @timeout) end @doc """ Resets the breaks for the given module, function and arity. If the module is not instrumented or if the given function does not have a breakpoint, it is a no-op and it returns `:not_found`. Otherwise it returns `:ok`. """ @spec reset_break(module, function, arity) :: :ok | :not_found def reset_break(module, function, arity) do GenServer.call(@server, {:reset_break, {:_, module, {function, arity}, :_, :_}}, @timeout) end @doc """ Removes all breakpoints on all modules. This effectively loads the non-instrumented version of currently instrumented modules into memory. """ @spec remove_breaks :: :ok def remove_breaks do GenServer.call(@server, :remove_breaks, @timeout) end @doc """ Removes breakpoints in the given module. This effectively loads the non-instrumented version of the module into memory. """ @spec remove_breaks(module) :: :ok | {:error, :no_beam_file} def remove_breaks(module) do GenServer.call(@server, {:remove_breaks, module}, @timeout) end @doc """ Returns all breakpoints. """ @spec breaks :: [break] def breaks do @server |> GenServer.call(:breaks, @timeout) |> Enum.sort() end ## Callbacks @doc false def start_link(_) do GenServer.start_link(__MODULE__, :ok, name: @server) end @impl true def init(:ok) do Process.flag(:trap_exit, true) :ets.new(@table, [:named_table, :public, write_concurrency: true]) {:ok, @initial_counter} end @impl true def handle_call({:break, module, fa, condition, breaks}, _from, counter) do # If there is a match for the given module and fa, we # use the ref, otherwise we create a new one. {ref, counter} = case :ets.match_object(@table, {:_, module, fa, :_, :_}) do [{ref, _, _, _, _}] -> {ref, counter} [] -> {counter, counter + 1} end case fetch_elixir_debug_info_with_fa_check(module, fa) do {:ok, beam, backend, elixir} -> true = :ets.insert(@table, {ref, module, fa, condition, breaks}) entries = :ets.match_object(@table, {:_, module, :_, :_, :_}) {:reply, instrument(beam, backend, elixir, ref, entries), counter} {:error, _} = error -> {:reply, error, counter} end end def handle_call({:reset_break, pattern}, _from, counter) do reset = for {ref, module, fa, condition, _} <- :ets.match_object(@table, pattern) do if instrumented?(module) do :ets.insert(@table, {ref, module, fa, condition, 0}) true else :ets.delete(@table, ref) false end end if Enum.any?(reset) do {:reply, :ok, counter} else {:reply, :not_found, counter} end end def handle_call(:breaks, _from, counter) do entries = for {id, module, function_arity, _condition, breaks} <- :ets.tab2list(@table), keep_instrumented(id, module) == :ok do {id, module, function_arity, max(breaks, 0)} end {:reply, entries, counter} end def handle_call(:remove_breaks, _from, _counter) do # Make sure to deinstrument before clearing # up the table to avoid race conditions. @table |> :ets.match({:_, :"$1", :_, :_, :_}) |> List.flatten() |> Enum.uniq() |> Enum.each(&deinstrument_if_instrumented/1) true = :ets.delete_all_objects(@table) {:reply, :ok, @initial_counter} end def handle_call({:remove_breaks, module}, _from, counter) do # Make sure to deinstrument before clearing # up the table to avoid race conditions. reply = deinstrument_if_instrumented(module) true = :ets.match_delete(@table, {:_, module, :_, :_, :_}) {:reply, reply, counter} end defp keep_instrumented(id, module) do if instrumented?(module) do :ok else :ets.delete(@table, id) :error end end defp deinstrument_if_instrumented(module) do if instrumented?(module) do deinstrument(module) else :ok end end defp deinstrument(module) do with [_ | _] = beam <- :code.which(module), {:ok, binary} <- File.read(beam) do :code.purge(module) {:module, _} = :code.load_binary(module, beam, binary) :ok else _ -> {:error, :no_beam_file} end end defp fetch_elixir_debug_info_with_fa_check(module, fa) do case :code.which(module) do [_ | _] = beam -> case :beam_lib.chunks(beam, [:debug_info]) do {:ok, {_, [debug_info: {:debug_info_v1, backend, {:elixir_v1, map, _} = elixir}]}} -> case List.keyfind(map.definitions, fa, 0) do {_, _, _, _} -> {:ok, beam, backend, elixir} nil -> {:error, :unknown_function_arity} end {:ok, {_, [debug_info: {:debug_info_v1, _, _}]}} -> {:error, :non_elixir_module} {:error, :beam_lib, {:missing_chunk, _, _}} -> {:error, :missing_debug_info} _ -> {:error, :outdated_debug_info} end _ -> {:error, :no_beam_file} end end defp instrument(beam, backend, {:elixir_v1, map, specs}, counter, entries) do %{attributes: attributes, definitions: definitions, module: module} = map attributes = [{:iex_pry, true} | attributes] definitions = Enum.map(definitions, &instrument_definition(&1, map, entries)) map = %{map | attributes: attributes, definitions: definitions} with {:ok, forms} <- backend.debug_info(:erlang_v1, module, {:elixir_v1, map, specs}, []), {:ok, _, binary, _} <- :compile.noenv_forms(forms, [:return | map.compile_opts]) do :code.purge(module) {:module, _} = :code.load_binary(module, beam, binary) {:ok, counter} else _error -> {:error, :recompilation_failed} end end defp instrument_definition({fa, kind, meta, clauses} = definition, map, entries) do case List.keyfind(entries, fa, 2) do {ref, _, ^fa, condition, _} -> %{module: module, file: file} = map file = case meta[:location] do {file, _} -> file _ -> file end opts = [module: module, file: file, function: fa] clauses = Enum.map(clauses, &instrument_clause(&1, ref, condition, opts)) {fa, kind, meta, clauses} nil -> definition end end defp instrument_clause({meta, args, guards, clause}, ref, case_pattern, opts) do opts = [line: Keyword.get(meta, :line, 1)] ++ opts # We store variables on a map ignoring the context. # In the rare case where variables in different contexts # have the same name, the last one wins. {_, binding} = Macro.prewalk(args, %{}, fn {name, _, ctx} = var, acc when name != :_ and is_atom(name) and is_atom(ctx) -> {var, Map.put(acc, name, var)} expr, acc -> {expr, acc} end) # Have an extra binding per argument for case matching. case_vars = Macro.generate_arguments(length(args), __MODULE__) case_head = {:{}, [], case_vars} update_op = Macro.escape({5, -1, -1, -1}) # Generate the take_over condition with the ETS lookup. # Remember this is expanded AST, so no aliases allowed, # no locals (such as the unary -) and so on. condition = quote do case unquote(case_head) do unquote(case_pattern) -> # :ets.update_counter(table, key, {pos, inc, threshold, reset}) case :ets.update_counter(unquote(@table), unquote(ref), unquote(update_op)) do unquote(-1) -> :ok _ -> :"Elixir.IEx.Pry".pry(unquote(Map.to_list(binding)), unquote(opts)) end _ -> :ok end end args = case_vars |> Enum.zip(args) |> Enum.map(fn {var, arg} -> {:=, [], [arg, var]} end) {meta, args, guards, {:__block__, [], [condition, clause]}} end defp instrumented?(module) do module.__info__(:attributes)[:iex_pry] == [true] end end
30.05482
100
0.608026
9eccb28a348fbfd220fb7ba50aa30c8ce024bc5f
1,312
ex
Elixir
lib/client/recurring_client.ex
littlelines/frex
e3d9005b782fa0c0aaff0c0368f154c45fb7a302
[ "Apache-2.0" ]
2
2017-02-10T16:46:21.000Z
2020-05-04T11:50:50.000Z
lib/client/recurring_client.ex
littlelines/frex
e3d9005b782fa0c0aaff0c0368f154c45fb7a302
[ "Apache-2.0" ]
null
null
null
lib/client/recurring_client.ex
littlelines/frex
e3d9005b782fa0c0aaff0c0368f154c45fb7a302
[ "Apache-2.0" ]
1
2019-11-27T15:43:55.000Z
2019-11-27T15:43:55.000Z
defmodule Frex.Client.Recurring do @moduledoc """ Client module for interacting with the Freshbooks recurring endpoint. """ use Frex.Endpoint, :client @doc """ Return the details of an existing recurring profile, including auto-bill information if this recurring profile has auto-billing enabled. ## Parameters * `recurring_id` (**required**) -- recurring ID """ def get(credentials, recurring_id) do "recurring.get" |> Request.build({:recurring_id, %{}, recurring_id}) |> HTTP.request!(credentials) end @doc """ Returns a list of recurring profile summaries. Results are ordered by descending recurring_id. Note: A list request that returns no results (i.e. page 999), will return an empty result set, not an error. Note: The response will include an empty autobill tag if the recurring profile does not have auto-billing enabled, otherwise the response will include an autobill element with the gateway name and card element. ## Parameters * `filters` (**optional**) -- a map of filters for the list request * `client_id` * `autobill` -- boolean 1 or 0 """ def list(credentials, filters \\ %{}) do opts = opts_to_builder(filters) "recurring.list" |> Request.build(opts) |> HTTP.request!(credentials) end end
26.77551
72
0.696646
9eccb499ca02b08e07042fb64de1f150bb60165d
3,235
ex
Elixir
clients/machine_learning/lib/google_api/machine_learning/v1/model/google_cloud_ml_v1__replica_config.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
null
null
null
clients/machine_learning/lib/google_api/machine_learning/v1/model/google_cloud_ml_v1__replica_config.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
null
null
null
clients/machine_learning/lib/google_api/machine_learning/v1/model/google_cloud_ml_v1__replica_config.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
null
null
null
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.MachineLearning.V1.Model.GoogleCloudMlV1_ReplicaConfig do @moduledoc """ Represents the configuration for a replica in a cluster. ## Attributes * `acceleratorConfig` (*type:* `GoogleApi.MachineLearning.V1.Model.GoogleCloudMlV1_AcceleratorConfig.t`, *default:* `nil`) - Represents the type and number of accelerators used by the replica. [Learn about restrictions on accelerator configurations for training.](/ai-platform/training/docs/using-gpus#compute-engine-machine-types-with-gpu) * `imageUri` (*type:* `String.t`, *default:* `nil`) - The Docker image to run on the replica. This image must be in Container Registry. Learn more about [configuring custom containers](/ai-platform/training/docs/distributed-training-containers). * `tpuTfVersion` (*type:* `String.t`, *default:* `nil`) - The AI Platform runtime version that includes a TensorFlow version matching the one used in the custom container. This field is required if the replica is a TPU worker that uses a custom container. Otherwise, do not specify this field. This must be a [runtime version that currently supports training with TPUs](/ml-engine/docs/tensorflow/runtime-version-list#tpu-support). Note that the version of TensorFlow included in a runtime version may differ from the numbering of the runtime version itself, because it may have a different [patch version](https://www.tensorflow.org/guide/version_compat#semantic_versioning_20). In this field, you must specify the runtime version (TensorFlow minor version). For example, if your custom container runs TensorFlow `1.x.y`, specify `1.x`. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :acceleratorConfig => GoogleApi.MachineLearning.V1.Model.GoogleCloudMlV1_AcceleratorConfig.t(), :imageUri => String.t(), :tpuTfVersion => String.t() } field(:acceleratorConfig, as: GoogleApi.MachineLearning.V1.Model.GoogleCloudMlV1_AcceleratorConfig ) field(:imageUri) field(:tpuTfVersion) end defimpl Poison.Decoder, for: GoogleApi.MachineLearning.V1.Model.GoogleCloudMlV1_ReplicaConfig do def decode(value, options) do GoogleApi.MachineLearning.V1.Model.GoogleCloudMlV1_ReplicaConfig.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.MachineLearning.V1.Model.GoogleCloudMlV1_ReplicaConfig do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
43.716216
196
0.742813
9eccc053e2096992f1642bc58ab9f942076202eb
8,533
exs
Elixir
test/scenic/cache/hash_test.exs
ruan-brandao/scenic
cbd8bf50c4239bb5e4ae7971bbc9850ae1f9f2b7
[ "Apache-2.0" ]
null
null
null
test/scenic/cache/hash_test.exs
ruan-brandao/scenic
cbd8bf50c4239bb5e4ae7971bbc9850ae1f9f2b7
[ "Apache-2.0" ]
null
null
null
test/scenic/cache/hash_test.exs
ruan-brandao/scenic
cbd8bf50c4239bb5e4ae7971bbc9850ae1f9f2b7
[ "Apache-2.0" ]
null
null
null
# # Created by Boyd Multerer on 2017-11-12. # Copyright © 2017 Kry10 Industries. All rights reserved. # defmodule Scenic.Cache.HashTest do use ExUnit.Case, async: true doctest Scenic.Cache.File alias Scenic.Cache.Hash # import IEx @valid_hash_path "test/test_data/valid_hash_file.txt.aqw2vpKePkeDvZzBz-1wFsC2Xac" @bad_hash_path "test/test_data/bad_hash_file.txt.not_a_valid_hash" @missing_hash_path "test/test_data/missing_hash_file.txt" @no_such_file_path "test/test_data/no_such_file.txt.whatever" @valid_hash "aqw2vpKePkeDvZzBz-1wFsC2Xac" @valid_hash_256 "XmLxE6HaLNGiAE3Xhhs-G4I3PCap-fsK90vJZnQMbFI" @missing_hash "TMRA5gAj7BwXxcRfPGq2avbh6nc" @missing_hash_256 "6XheyWIkgKP7baORQ3y2TRWVQNptzlOSfuXFiXoZ_Ao" # hash = Cache.File.compute_file_hash( @valid_hash_path ) # ============================================================================ # compute hash for binary test "binary computes a hash for some binary data" do data = "some data. af98hwu4lhrliw4uhtliuhet;giojres;ihg;usdhg" expected_hash = :crypto.hash(:sha, data) |> Base.url_encode64(padding: false) assert Hash.binary(data, :sha) == {:ok, expected_hash} end test "binary rejects invalid hash types" do data = "some data. af98hwu4lhrliw4uhtliuhet;giojres;ihg;usdhg" assert Hash.binary(data, :invalid) == {:error, :invalid_hash_type} end test "binary! computes a hash for some binary data" do data = "some data. af98hwu4lhrliw4uhtliuhet;giojres;ihg;usdhg" expected_hash = :crypto.hash(:sha, data) |> Base.url_encode64(padding: false) assert Hash.binary!(data, :sha) == expected_hash end test "binary! raises on an invalid hash type" do data = "some data. af98hwu4lhrliw4uhtliuhet;giojres;ihg;usdhg" assert_raise Scenic.Cache.Hash.Error, fn -> Hash.binary!(data, :invalid) end end # ============================================================================ # compute_file test "file loads a file and computes its hash" do assert Hash.file(@missing_hash_path, :sha) == {:ok, @missing_hash} end test "file loads a file and computes its hash with alternate algorithms" do assert Hash.file(@missing_hash_path, :sha256) == {:ok, @missing_hash_256} end test "file passes through file system errors" do assert Hash.file(@no_such_file_path, :sha) == {:error, :enoent} end # ============================================================================ # file! test "file! loads a file and computes its hash" do assert Hash.file!(@missing_hash_path, :sha) == @missing_hash end test "file! loads a file and computes its hash with alternate algorithms" do assert Hash.file!(@missing_hash_path, :sha256) == @missing_hash_256 end test "file! passes through file system errors" do assert_raise File.Error, fn -> Hash.file!(@no_such_file_path, :sha) end end # ============================================================================ # verify test "verify returns {:ok, data} when the hash checks out ok" do data = "This is some data to hash - awleiufhoq34htuwehtljwuh5toihu" expected = Hash.binary!(data, :sha) assert Hash.verify(data, expected, :sha) == {:ok, data} end test "verify returns {:error, :hash_failure} when the hash fails" do data = "This is some data to hash - awleiufhoq34htuwehtljwuh5toihu" assert Hash.verify(data, "not_a_hash", :sha) == {:error, :hash_failure} end # ============================================================================ # verify! test "verify! returns data when the hash checks out ok" do data = "This is some data to hash - awleiufhoq34htuwehtljwuh5toihu" expected = Hash.binary!(data, :sha) assert Hash.verify!(data, expected, :sha) == data end test "verify! raises on a hash failure" do data = "This is some data to hash - awleiufhoq34htuwehtljwuh5toihu" assert_raise Hash.Error, fn -> Hash.verify!(data, "not_a_hash", :sha) end end # ============================================================================ # verify_file test "verify_file returns {:ok, data} when the hash checks out ok" do assert Hash.verify_file(@valid_hash_path) == {:ok, @valid_hash} assert Hash.verify_file({@missing_hash_path, @missing_hash}) == {:ok, @missing_hash} assert Hash.verify_file({@missing_hash_path, @missing_hash_256, :sha256}) == {:ok, @missing_hash_256} end test "verify_file returns {:error, :hash_failure} when the hash fails" do assert Hash.verify_file(@bad_hash_path) == {:error, :hash_failure} assert Hash.verify_file({@missing_hash_path, "not_a_hash"}) == {:error, :hash_failure} assert Hash.verify_file({@missing_hash_path, "not_a_hash", :sha256}) == {:error, :hash_failure} end test "verify_file passes through file system errors" do assert Hash.verify_file(@no_such_file_path) == {:error, :enoent} assert Hash.verify_file({@no_such_file_path, @valid_hash}) == {:error, :enoent} assert Hash.verify_file({@no_such_file_path, @valid_hash_256, :sha256}) == {:error, :enoent} end # ============================================================================ # verify_file! test "verify_file! returns data when the hash checks out ok" do assert Hash.verify_file!(@valid_hash_path) == @valid_hash assert Hash.verify_file!({@missing_hash_path, @missing_hash}) == @missing_hash assert Hash.verify_file!({@missing_hash_path, @missing_hash_256, :sha256}) == @missing_hash_256 end test "verify_file! raises on a hash failure" do assert_raise Hash.Error, fn -> Hash.verify_file!(@bad_hash_path) end assert_raise Hash.Error, fn -> Hash.verify_file!({@valid_hash_path, "not_a_hash"}) end assert_raise Hash.Error, fn -> Hash.verify_file!({@missing_hash_path, "not_a_hash", :sha}) end end test "verify_file! passes through file system errors" do assert_raise File.Error, fn -> Hash.verify_file!(@no_such_file_path) end end # ============================================================================ # path functions test "from_path returns just the hash appended to the end of a path" do assert Hash.from_path(@valid_hash_path) == @valid_hash end test "from_path returns the extension - which is obviously not a valid hash" do assert Hash.from_path(@missing_hash_path) == "txt" end # ============================================================================ # path param checking test "path_params(path) works" do assert Hash.path_params(@valid_hash_path) == {@valid_hash_path, @valid_hash, :sha} end test "path_params(not_a_path) fails" do assert_raise FunctionClauseError, fn -> Hash.path_params(:not_a_path) end end test "path_params(path, hash_type) works" do assert Hash.path_params(@valid_hash_path, :sha256) == {@valid_hash_path, @valid_hash, :sha256} assert Hash.path_params({@valid_hash_path, :sha256}) == {@valid_hash_path, @valid_hash, :sha256} end test "path_params(path, hash) works" do assert Hash.path_params(@missing_hash_path, @missing_hash) == {@missing_hash_path, @missing_hash, :sha} assert Hash.path_params({@missing_hash_path, @missing_hash}) == {@missing_hash_path, @missing_hash, :sha} end test "path_params(path, something_else) fails" do assert_raise FunctionClauseError, fn -> Hash.path_params(@missing_hash_path, 123) end end test "path_params(path, hash, type) works" do assert Hash.path_params(@missing_hash_path, @missing_hash_256, :sha256) == {@missing_hash_path, @missing_hash_256, :sha256} assert Hash.path_params({@missing_hash_path, @missing_hash_256, :sha256}) == {@missing_hash_path, @missing_hash_256, :sha256} end test "path_params(path, hash, type) with bogus params fails" do assert_raise FunctionClauseError, fn -> Hash.path_params(123, @missing_hash, :sha256) end assert_raise FunctionClauseError, fn -> Hash.path_params({123, @missing_hash, :sha256}) end assert_raise FunctionClauseError, fn -> Hash.path_params(@missing_hash_path, 123, :sha256) end assert_raise FunctionClauseError, fn -> Hash.path_params({@missing_hash_path, 1232, :sha256}) end assert_raise FunctionClauseError, fn -> Hash.path_params(@missing_hash_path, @missing_hash, 123) end assert_raise FunctionClauseError, fn -> Hash.path_params({@missing_hash_path, @missing_hash, 123}) end end end
37.262009
98
0.653815
9eccca6731de5a6dceeeb98141e6528f741e3b81
916
ex
Elixir
lib/airbrakex/plug.ex
talum/airbrakex
ffe9c15d56f42928841e903e0baf572b5d1a4400
[ "MIT" ]
null
null
null
lib/airbrakex/plug.ex
talum/airbrakex
ffe9c15d56f42928841e903e0baf572b5d1a4400
[ "MIT" ]
null
null
null
lib/airbrakex/plug.ex
talum/airbrakex
ffe9c15d56f42928841e903e0baf572b5d1a4400
[ "MIT" ]
2
2020-02-05T12:02:31.000Z
2021-04-22T09:06:41.000Z
defmodule Airbrakex.Plug do @moduledoc """ You can plug `Airbrakex.Plug` in your web application Plug stack to send all exception to `airbrake` ```elixir defmodule YourApp.Router do use Phoenix.Router use Airbrakex.Plug # ... end ``` """ alias Airbrakex.{ExceptionParser, Notifier} defmacro __using__(_env) do quote location: :keep do @before_compile Airbrakex.Plug end end defmacro __before_compile__(_env) do quote location: :keep do defoverridable call: 2 def call(conn, opts) do try do super(conn, opts) rescue exception -> session = Map.get(conn.private, :plug_session) error = ExceptionParser.parse(exception) Notifier.notify(error, params: conn.params, session: session) reraise exception, System.stacktrace() end end end end end
20.355556
73
0.628821
9ecd01006ba349dbf6c68feaa3d607774749e638
1,428
ex
Elixir
clients/you_tube/lib/google_api/you_tube/v3/model/localized_string.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
null
null
null
clients/you_tube/lib/google_api/you_tube/v3/model/localized_string.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
1
2020-12-18T09:25:12.000Z
2020-12-18T09:25:12.000Z
clients/you_tube/lib/google_api/you_tube/v3/model/localized_string.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
1
2020-10-04T10:12:44.000Z
2020-10-04T10:12:44.000Z
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.YouTube.V3.Model.LocalizedString do @moduledoc """ ## Attributes * `language` (*type:* `String.t`, *default:* `nil`) - * `value` (*type:* `String.t`, *default:* `nil`) - """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :language => String.t(), :value => String.t() } field(:language) field(:value) end defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.LocalizedString do def decode(value, options) do GoogleApi.YouTube.V3.Model.LocalizedString.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.YouTube.V3.Model.LocalizedString do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
28.56
74
0.710784
9ecd2345b8434bfe58e984326803b41bbd0e5cdf
1,509
exs
Elixir
apps/prometheus/mix.exs
thluiz/prometheus
0936e0b1e066ed35917221628d060adf8d7aed29
[ "MIT" ]
null
null
null
apps/prometheus/mix.exs
thluiz/prometheus
0936e0b1e066ed35917221628d060adf8d7aed29
[ "MIT" ]
null
null
null
apps/prometheus/mix.exs
thluiz/prometheus
0936e0b1e066ed35917221628d060adf8d7aed29
[ "MIT" ]
1
2020-11-04T06:04:51.000Z
2020-11-04T06:04:51.000Z
defmodule Prometheus.MixProject do use Mix.Project def project do [ app: :prometheus, version: "0.1.0", build_path: "../../_build", config_path: "../../config/config.exs", deps_path: "../../deps", lockfile: "../../mix.lock", elixir: "~> 1.7", elixirc_paths: elixirc_paths(Mix.env()), start_permanent: Mix.env() == :prod, aliases: aliases(), deps: deps() ] end # Configuration for the OTP application. # # Type `mix help compile.app` for more information. def application do [ mod: {Prometheus.Application, []}, extra_applications: [:logger, :runtime_tools ] ] end # Specifies which paths to compile per environment. defp elixirc_paths(:test), do: ["lib", "test/support"] defp elixirc_paths(_), do: ["lib"] # Specifies your project dependencies. # # Type `mix help deps` for examples and options. defp deps do [ {:phoenix_pubsub, "~> 2.0"}, {:ecto_sql, "~> 3.4"}, {:postgrex, ">= 0.0.0"}, {:jason, "~> 1.0"}, {:tzdata, "~> 1.0.3"} ] end # Aliases are shortcuts or tasks specific to the current project. # # See the documentation for `Mix` for more info on aliases. defp aliases do [ setup: ["deps.get", "ecto.setup"], "ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"], "ecto.reset": ["ecto.drop", "ecto.setup"], test: ["ecto.create --quiet", "ecto.migrate", "test"] ] end end
25.576271
79
0.579192
9ecd31ca850a2e7c571397c16bba5d5b86dd597a
798
ex
Elixir
lib/todo/database_worker.ex
jaroslav-kubicek/elixir-in-action
8add6a9a01d741c554d7395417838a8e2f5b713a
[ "MIT" ]
null
null
null
lib/todo/database_worker.ex
jaroslav-kubicek/elixir-in-action
8add6a9a01d741c554d7395417838a8e2f5b713a
[ "MIT" ]
null
null
null
lib/todo/database_worker.ex
jaroslav-kubicek/elixir-in-action
8add6a9a01d741c554d7395417838a8e2f5b713a
[ "MIT" ]
null
null
null
defmodule Todo.DatabaseWorker do @moduledoc false use GenServer def start_link(folder) do GenServer.start_link(__MODULE__, folder) end def store(pid, key, data) do GenServer.cast(pid, {:store, key, data}) end def get(pid, key) do GenServer.call(pid, {:get, key}) end # behaviours: def handle_call({:get, key}, _from, folder) do filename = get_filename(folder, key) reply = case File.read(filename) do {:ok, content} -> :erlang.binary_to_term(content) _ -> nil end {:reply, reply, folder} end def handle_cast({:store, key, data}, folder) do get_filename(folder, key) |> File.write!(:erlang.term_to_binary(data)) {:noreply, folder} end defp get_filename(folder, key) do "#{folder}/#{key}.bin" end end
19.95
55
0.64411
9ecd3404472628aa1ab2c78231671cafa1ae2e28
640
ex
Elixir
lib/stathat.ex
travisjeffery/stathat
15c0bf61de4188b0128f83f367a8aa94b870e4b4
[ "MIT" ]
null
null
null
lib/stathat.ex
travisjeffery/stathat
15c0bf61de4188b0128f83f367a8aa94b870e4b4
[ "MIT" ]
null
null
null
lib/stathat.ex
travisjeffery/stathat
15c0bf61de4188b0128f83f367a8aa94b870e4b4
[ "MIT" ]
null
null
null
defmodule StatHat do use Application require Logger def start(_type, _args) do import Supervisor.Spec, warn: false ez_key = Application.get_env(:stathat, :ez_key) children = [ worker(StatHat.Server, [[ez_key: ez_key]]) ] opts = [strategy: :one_for_one, name: StatHat.Supervisor] Supervisor.start_link(children, opts) end @doc """ Count the stat. """ def ez_count(stat, count) do GenServer.cast(StatHat, {:ez_count, stat, count}) end @doc """ Track the stat with the given value. """ def ez_value(stat, value) do GenServer.cast(StatHat, {:ez_value, stat, value}) end end
21.333333
61
0.660938
9ecd641243afbffb94d711bec62f5379db317361
696
ex
Elixir
web/gettext.ex
alexadaze/alexa
35fbef14f6cd884a51447553cfe2cf9794d153b3
[ "Apache-2.0" ]
null
null
null
web/gettext.ex
alexadaze/alexa
35fbef14f6cd884a51447553cfe2cf9794d153b3
[ "Apache-2.0" ]
null
null
null
web/gettext.ex
alexadaze/alexa
35fbef14f6cd884a51447553cfe2cf9794d153b3
[ "Apache-2.0" ]
null
null
null
defmodule Alexa.Gettext do @moduledoc """ A module providing Internationalization with a gettext-based API. By using [Gettext](https://hexdocs.pm/gettext), your module gains a set of macros for translations, for example: import Alexa.Gettext # Simple translation gettext "Here is the string to translate" # Plural translation ngettext "Here is the string to translate", "Here are the strings to translate", 3 # Domain-based translation dgettext "errors", "Here is the error message to translate" See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage. """ use Gettext, otp_app: :alexa end
27.84
72
0.675287
9ecd693c45588f02faf57f8c0fd64c4424396816
32
exs
Elixir
test/test_helper.exs
libitx/shapeshifter
e20c49fa6b920084e427998394f72b470314f261
[ "Apache-2.0" ]
6
2020-08-10T18:33:00.000Z
2022-03-08T12:30:15.000Z
test/test_helper.exs
libitx/shapeshifter
e20c49fa6b920084e427998394f72b470314f261
[ "Apache-2.0" ]
1
2021-01-23T18:49:33.000Z
2021-01-24T19:48:27.000Z
test/test_helper.exs
libitx/shapeshifter
e20c49fa6b920084e427998394f72b470314f261
[ "Apache-2.0" ]
null
null
null
ExUnit.start(exclude: :pending)
16
31
0.78125