hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7311da8b1d9dc6205bccb91549a415d1c8f52b0e | 1,470 | ex | Elixir | clients/container_analysis/lib/google_api/container_analysis/v1beta1/model/artifact_rule.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/container_analysis/lib/google_api/container_analysis/v1beta1/model/artifact_rule.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/container_analysis/lib/google_api/container_analysis/v1beta1/model/artifact_rule.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ContainerAnalysis.V1beta1.Model.ArtifactRule do
@moduledoc """
Defines an object to declare an in-toto artifact rule
## Attributes
* `artifactRule` (*type:* `list(String.t)`, *default:* `nil`) -
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:artifactRule => list(String.t()) | nil
}
field(:artifactRule, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.ContainerAnalysis.V1beta1.Model.ArtifactRule do
def decode(value, options) do
GoogleApi.ContainerAnalysis.V1beta1.Model.ArtifactRule.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ContainerAnalysis.V1beta1.Model.ArtifactRule do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.276596 | 86 | 0.741497 |
731215348687e8dbb452580cd11df4ed3584fe84 | 97 | ex | Elixir | apps/massa_proxy/lib/massa_proxy/runtime/wasm/wasm.ex | marcellanz/massa | 97423e2e45b37d372bba51c76c1dd8433ac85b8c | [
"Apache-2.0"
] | null | null | null | apps/massa_proxy/lib/massa_proxy/runtime/wasm/wasm.ex | marcellanz/massa | 97423e2e45b37d372bba51c76c1dd8433ac85b8c | [
"Apache-2.0"
] | 16 | 2021-11-01T08:19:03.000Z | 2022-03-30T08:17:43.000Z | apps/massa_proxy/lib/massa_proxy/runtime/wasm/wasm.ex | marcellanz/massa | 97423e2e45b37d372bba51c76c1dd8433ac85b8c | [
"Apache-2.0"
] | null | null | null | defmodule MassaProxy.Runtime.Wasm do
@moduledoc """
"""
@behaviour MassaProxy.Runtime
end
| 13.857143 | 36 | 0.721649 |
73124a3436343208615619729cc46a9a2072d5d7 | 1,138 | ex | Elixir | apps/artemis/lib/artemis/contexts/reaction/delete_many_associated_reactions.ex | artemis-platform/artemis_teams | 9930c3d9528e37b76f0525390e32b66eed7eadde | [
"MIT"
] | 2 | 2020-04-23T02:29:18.000Z | 2020-07-07T13:13:17.000Z | apps/artemis/lib/artemis/contexts/reaction/delete_many_associated_reactions.ex | chrislaskey/artemis_teams | 9930c3d9528e37b76f0525390e32b66eed7eadde | [
"MIT"
] | 4 | 2020-04-26T20:35:36.000Z | 2020-11-10T22:13:19.000Z | apps/artemis/lib/artemis/contexts/reaction/delete_many_associated_reactions.ex | chrislaskey/artemis_teams | 9930c3d9528e37b76f0525390e32b66eed7eadde | [
"MIT"
] | null | null | null | defmodule Artemis.DeleteManyAssociatedReactions do
import Ecto.Query
alias Artemis.Reaction
alias Artemis.Repo
def call!(resource_type, resource_id \\ nil, user) do
case call(resource_type, resource_id, user) do
{:error, _} -> raise(Artemis.Context.Error, "Error deleting many associated reactions")
{:ok, result} -> result
result -> result
end
end
def call(resource_type, resource_id \\ nil, user) do
case delete_records(resource_type, resource_id, user) do
{:error, message} -> {:error, message}
{total, _} -> {:ok, %{total: total}}
end
end
defp delete_records(resource_type, resource_id, _user) do
Reaction
|> where([c], c.resource_type == ^resource_type)
|> maybe_where_resource_id(resource_id)
|> Repo.delete_all()
end
defp maybe_where_resource_id(query, nil), do: query
defp maybe_where_resource_id(query, resource_id) when is_integer(resource_id) do
maybe_where_resource_id(query, Integer.to_string(resource_id))
end
defp maybe_where_resource_id(query, resource_id) do
where(query, [c], c.resource_id == ^resource_id)
end
end
| 29.179487 | 93 | 0.70826 |
731286f85566fabca212d7a261d0f339a0d78c88 | 891 | ex | Elixir | lib/makeup/styles/html/pygments/vs.ex | rubysolo/makeup | e1c5d27e3f14c04904d74b94b6298bbc28a406c3 | [
"BSD-2-Clause"
] | null | null | null | lib/makeup/styles/html/pygments/vs.ex | rubysolo/makeup | e1c5d27e3f14c04904d74b94b6298bbc28a406c3 | [
"BSD-2-Clause"
] | null | null | null | lib/makeup/styles/html/pygments/vs.ex | rubysolo/makeup | e1c5d27e3f14c04904d74b94b6298bbc28a406c3 | [
"BSD-2-Clause"
] | null | null | null |
defmodule Makeup.Styles.HTML.VisualStudioStyle do
@moduledoc false
require Makeup.Token.TokenTypes
alias Makeup.Token.TokenTypes, as: Tok
@styles %{
Tok.error => "border:#FF0000",
Tok.keyword => "#0000ff",
Tok.keyword_type => "#2b91af",
Tok.name_class => "#2b91af",
Tok.string => "#a31515",
Tok.operator_word => "#0000ff",
Tok.comment => "#008000",
Tok.comment_preproc => "#0000ff",
Tok.generic_emph => "italic",
Tok.generic_heading => "bold",
Tok.generic_prompt => "bold",
Tok.generic_strong => "bold",
Tok.generic_subheading => "bold",
}
alias Makeup.Styles.HTML.Style
@style_struct Style.make_style(
short_name: "vs",
long_name: "VisualStudio Style",
background_color: "#ffffff",
highlight_color: "#ffffcc",
styles: @styles)
def style() do
@style_struct()
end
end | 24.081081 | 49 | 0.628507 |
7312bbf7e7971c2e8eb53140ebf024d897eb30c2 | 4,748 | ex | Elixir | lib/ueberauth/strategy/procore.ex | alignd/ueberauth_procore | 7a3b9ff77086a233d65b4b60f80b57d73de35f2d | [
"MIT"
] | null | null | null | lib/ueberauth/strategy/procore.ex | alignd/ueberauth_procore | 7a3b9ff77086a233d65b4b60f80b57d73de35f2d | [
"MIT"
] | 2 | 2019-04-03T07:36:00.000Z | 2019-04-22T07:26:56.000Z | lib/ueberauth/strategy/procore.ex | alignd/ueberauth_procore | 7a3b9ff77086a233d65b4b60f80b57d73de35f2d | [
"MIT"
] | 1 | 2019-04-01T22:34:46.000Z | 2019-04-01T22:34:46.000Z | defmodule Ueberauth.Strategy.Procore do
@moduledoc """
Provides an Ueberauth strategy for authenticating with Procore.
### Setup
Create an application in Procore for you to use.
Include the provider in your configuration for Ueberauth
config :ueberauth, Ueberauth,
providers: [
procore: { Ueberauth.Strategy.Procore, [] }
]
Then include the configuration for Procore.
config :ueberauth, Ueberauth.Strategy.Procore.OAuth,
client_id: System.get_env("PROCORE_CLIENT_ID"),
client_secret: System.get_env("PROCORE_CLIENT_SECRET"),
redirect_uri: "https://someproxy.com" #optional
The `redirect_uri` configuration option is intended for proxies due to
Procore requiring an HTTPS callback URL.
"""
use Ueberauth.Strategy, oauth2_module: Ueberauth.Strategy.Procore.OAuth
alias Ueberauth.Auth.{Info, Credentials, Extra}
alias Ueberauth.Strategy.Helpers
@doc """
Handles the initial redirect to the Procore authentication page.
"""
def handle_request!(conn) do
opts = [redirect_uri: redirect_uri(conn)]
module = option(conn, :oauth2_module)
Helpers.redirect!(conn, apply(module, :authorize_url!, [opts]))
end
@doc """
Handles the callback from Procore. When there is a failure from Procore the
failure is included in the `ueberauth_failure` struct. Otherwise the
information returned from Procore is returned in the `Ueberauth.Auth` struct.
"""
def handle_callback!(%Plug.Conn{params: %{"code" => code}} = conn) do
module = option(conn, :oauth2_module)
token = apply(module, :get_token!, [[code: code, redirect_uri: redirect_uri(conn)]])
if token.access_token == nil do
Helpers.set_errors!(conn, [
error(
token.other_params["error"],
token.other_params["error_description"]
)
])
else
fetch_user(conn, token)
end
end
@doc false
def handle_callback!(conn) do
set_errors!(conn, [error("missing_code", "No code received")])
end
@doc """
Cleans up the private area of the connection used for passing the raw Procore
response around during the callback.
"""
def handle_cleanup!(conn) do
conn
|> put_private(:procore_user, nil)
|> put_private(:procore_token, nil)
end
@doc """
Fetches the uid field from the Procore response. This is the id field for
the user.
"""
def uid(conn) do
conn.private.procore_user["id"]
end
@doc """
Includes the credentials from the Procore response.
"""
def credentials(conn) do
token = conn.private.procore_token
%Credentials{
token: token.access_token,
refresh_token: token.refresh_token,
expires_at: token.expires_at,
token_type: token.token_type,
expires: !!token.expires_at
}
end
@doc """
Fetches the fields to populate the info section of the `Ueberauth.Auth`
struct.
"""
def info(conn) do
user = conn.private.procore_user
%Info{
name: user["name"],
first_name: user["first_name"],
last_name: user["last_name"],
phone: user["business_phone"],
email: user["email_address"]
}
end
@doc """
Stores the raw information (including the token) obtained from the Procore
callback.
"""
def extra(conn) do
%Extra{
raw_info: %{
token: conn.private.procore_token,
user: conn.private.procore_user
}
}
end
defp fetch_user(conn, token) do
conn = put_private(conn, :procore_token, token)
with {:ok, companies} <- get_companies(token),
[%{"id" => id} | _] = companies,
{:ok, user} <- get_me(token, id) do
put_private(conn, :procore_user, user)
else
{:error, :unauthorized} -> set_errors!(conn, [error("token", "unauthorized")])
{:error, %OAuth2.Error{reason: reason}} -> set_errors!(conn, [error("OAuth2", reason)])
{:error, reason} -> set_errors!(conn, [error("OAuth2", reason)])
end
end
defp option(conn, key) do
Keyword.get(Helpers.options(conn), key, Keyword.get(default_options(), key))
end
defp redirect_uri(conn) do
option(conn, :redirect_uri) || Helpers.callback_url(conn)
end
defp get_companies(token) do
case Ueberauth.Strategy.Procore.OAuth.get(token, "/vapid/companies") do
{:ok, %OAuth2.Response{status_code: 200, body: companies}} -> {:ok, companies}
{:ok, %OAuth2.Response{status_code: 401, body: _body}} -> {:error, :unauthorized}
other -> {:error, other}
end
end
defp get_me(token, id) do
case Ueberauth.Strategy.Procore.OAuth.get(token, "/vapid/companies/#{id}/me") do
{:ok, %OAuth2.Response{status_code: 200, body: me}} -> {:ok, me}
_other -> {:error, :no_user}
end
end
end
| 28.775758 | 93 | 0.662805 |
7312cfdcbbb779237f153678f40f9ce0053ae6d1 | 776 | ex | Elixir | apps/exsemantica_phx/lib/exsemantica_phx/site/user.ex | Chlorophytus/exsemantica | f1c64cb8ae0543e5a2f015a65071d81d57fa3224 | [
"Apache-2.0"
] | 1 | 2021-09-11T15:46:04.000Z | 2021-09-11T15:46:04.000Z | apps/exsemantica_phx/lib/exsemantica_phx/site/user.ex | Chlorophytus/exsemantica | f1c64cb8ae0543e5a2f015a65071d81d57fa3224 | [
"Apache-2.0"
] | 4 | 2021-01-18T00:49:02.000Z | 2022-02-23T05:18:37.000Z | apps/exsemantica_phx/lib/exsemantica_phx/site/user.ex | Chlorophytus/exsemantica | f1c64cb8ae0543e5a2f015a65071d81d57fa3224 | [
"Apache-2.0"
] | null | null | null | defmodule ExsemanticaPhx.Site.User do
use Ecto.Schema
import Ecto.Changeset
schema "site_users" do
field :biography, :string
field :contract, :binary
field :email, :string
field :node_corresponding, :integer
field :password, :binary
field :privmask, :binary
field :username, :string
timestamps()
end
@doc false
def changeset(user, attrs) do
user
|> cast(attrs, [:node_corresponding, :username, :biography, :email, :password, :contract, :privmask])
|> validate_required([:node_corresponding, :username, :biography, :email, :password, :contract, :privmask])
|> unique_constraint(:node_corresponding)
|> unique_constraint(:username)
|> unique_constraint(:email)
|> unique_constraint(:contract)
end
end
| 27.714286 | 111 | 0.695876 |
7312e889f9e5b6bcc14f9cc8afe6d5c859097f41 | 364 | exs | Elixir | priv/repo/migrations/20210127132951_create_neighborhoods.exs | gissandrogama/delivery_order | 8642453b03f590fe828225fc13aa58a5f79b2117 | [
"MIT"
] | null | null | null | priv/repo/migrations/20210127132951_create_neighborhoods.exs | gissandrogama/delivery_order | 8642453b03f590fe828225fc13aa58a5f79b2117 | [
"MIT"
] | 6 | 2021-01-22T15:23:04.000Z | 2021-01-28T07:56:01.000Z | priv/repo/migrations/20210127132951_create_neighborhoods.exs | gissandrogama/delivery_order | 8642453b03f590fe828225fc13aa58a5f79b2117 | [
"MIT"
] | null | null | null | defmodule OrderApi.Repo.Migrations.CreateNeighborhoods do
use Ecto.Migration
def change do
create table(:neighborhoods) do
add :code, :string
add :name, :string
add :receiver_address_id, references(:receiver_address, on_delete: :nilify_all)
timestamps()
end
create index(:neighborhoods, [:receiver_address_id])
end
end
| 22.75 | 85 | 0.714286 |
7312f020919968b54c24e885e4ba8295071af23f | 81,283 | ex | Elixir | lib/ash/filter/filter.ex | smt116/ash | 880a17f197873eb1c8dc8d81a8b4d6d9cb570b3f | [
"MIT"
] | null | null | null | lib/ash/filter/filter.ex | smt116/ash | 880a17f197873eb1c8dc8d81a8b4d6d9cb570b3f | [
"MIT"
] | null | null | null | lib/ash/filter/filter.ex | smt116/ash | 880a17f197873eb1c8dc8d81a8b4d6d9cb570b3f | [
"MIT"
] | null | null | null | defmodule Ash.Filter do
# credo:disable-for-this-file Credo.Check.Readability.StrictModuleLayout
@dialyzer {:nowarn_function, do_map: 2, map: 2}
alias Ash.Actions.Load
alias Ash.Engine.Request
alias Ash.Error.Query.{
AggregatesNotSupported,
CalculationsNotSupported,
InvalidFilterValue,
NoSuchAttributeOrRelationship,
NoSuchFilterPredicate,
NoSuchFunction,
NoSuchOperator,
ReadActionRequired
}
alias Ash.Error.Invalid.InvalidPrimaryKey
alias Ash.Query.Function.{Ago, Contains, If, IsNil}
alias Ash.Query.Operator.{
Eq,
GreaterThan,
GreaterThanOrEqual,
Has,
In,
LessThan,
LessThanOrEqual,
NotEq
}
alias Ash.Query.{BooleanExpression, Call, Not, Ref}
alias Ash.Query.{Aggregate, Calculation, Function, Operator}
@functions [
Ago,
Contains,
IsNil,
If
]
@operators [
Ash.Query.Operator.IsNil,
Eq,
NotEq,
In,
LessThan,
GreaterThan,
LessThanOrEqual,
GreaterThanOrEqual,
Has
] ++ Ash.Query.Operator.Basic.operator_modules()
@builtins @functions ++ @operators
@operators_with_aliases @operators |> Enum.reject(&(&1.name() == &1.operator()))
@operator_aliases [
equals: Eq,
not_equals: NotEq,
gt: GreaterThan,
lt: LessThan,
gte: GreaterThanOrEqual,
lte: LessThanOrEqual
] ++ Enum.map(@operators_with_aliases, &{&1.name(), &1})
@moduledoc """
The representation of a filter in Ash.
Ash filters are stored as nested `Ash.Query.BooleanExpression{}` and `%Ash.Query.Not{}` structs,
terminating in an operator or a function struct. An expression is simply a boolean operator
and the left and right hand side of that operator.
## Filter Templates
Filter templates are simplified fielter statements (they only support atom keys), that have substitutions in them.
Currently, the substitutions are `{:_actor, :field}` and `{:_actor, :_primary_key}`
You can pass a filter template to `build_filter_from_template/2` with an actor, and it will return the new result
Additionally, you can ask if the filter template contains an actor reference via `template_references_actor?/1`
## Writing a filter
### Built In Predicates
#{Enum.map_join(@operators, "\n", &"* `#{&1.operator()}`")}
#{Enum.map_join(@operator_aliases, "\n", fn {key, val} -> "* `#{key}` (alias for `#{val.operator()}`)" end)}
### BooleanExpression syntax
The expression syntax ultimately just builds the keyword list style filter, but with lots of conveniences that
would be very annoying to do manually.
Examples
```elixir
Ash.Query.filter(resource, name == "Zardoz")
Ash.Query.filter(resource, first_name == "Zar" and last_name == "Doz")
Ash.Query.filter(resource, first_name == "Zar" and last_name in ["Doz", "Daz"] and high_score > 10)
Ash.Query.filter(resource, first_name == "Zar" or last_name == "Doz" or (high_score > 10 and high_score < -10))
```
### Keyword list syntax
A filter is a nested keyword list (with some exceptions, like `true` for everything and `false` for nothing).
The key is the "predicate" (A.K.A condition) and the value is the parameter. You can use `and` and `or` to create
nested filters. Datalayers can expose custom predicates. Eventually, you will be able to define your own custom
predicates, which will be a mechanism for you to attach complex filters supported by the data layer to your queries.
** Important **
In a given keyword list, all predicates are considered to be "ands". So `[or: [first_name: "Tom", last_name: "Bombadil"]]` doesn't
mean 'First name == "tom" or last_name == "bombadil"'. To say that, you want to provide a list of filters,
like so: `[or: [[first_name: "Tom"], [last_name: "Bombadil"]]]`
Some example filters:
```elixir
Ash.Query.filter(resource, [name: "Zardoz"]))
Ash.Query.filter(resource, [first_name: "Zar", last_name: "Doz"])
Ash.Query.filter(resource, [first_name: "Zar", last_name: [in: ["Doz", "Daz"]], high_score: [greater_than: 10]])
Ash.Query.filter(resource, [or: [
[first_name: "Zar"],
[last_name: "Doz"],
[or: [
[high_score: [greater_than: 10]]],
[high_score: [less_than: -10]]
]
]])
### Other formats
Maps are also accepted, as are maps with string keys. Technically, a list of `[{"string_key", value}]` would also work.
If you are using a map with string keys, it is likely that you are parsing input. It is important to note that, before
passing a filter supplied from an external source directly to `Ash.Query.filter/2`, you should first call `Ash.Filter.parse_input/2`
(or `Ash.Filter.parse_input/4` if your query has aggregates/calculations in it). This ensures that the filter only uses public attributes,
relationships, aggregates and calculations. You may additionally wish to pass in the query context, in the case that you have calculations
that use the provided context.
```
"""
@builtin_operators Enum.map(@operators, &{&1.operator(), &1}) ++ @operator_aliases
@builtin_functions Enum.map(@functions, &{&1.name(), &1})
@string_builtin_operators Enum.into(@builtin_operators, %{}, fn {key, value} ->
{to_string(key), value}
end)
@string_builtin_functions Enum.into(@builtin_functions, %{}, fn {key, value} ->
{to_string(key), value}
end)
defstruct [:resource, :expression]
@type t :: %__MODULE__{}
def builtins, do: @builtins
def builtin_functions, do: @functions
def builtin_operators, do: @operators
def builtin_predicate_operators, do: Enum.filter(@operators, & &1.predicate?())
defmodule Simple do
@moduledoc "Represents a simplified filter, with a simple list of predicates"
defstruct [:resource, :predicates]
defmodule Not do
@moduledoc "A negated predicate"
defstruct [:predicate]
end
end
@doc """
Parses a filter statement, accepting only public attributes/relationships
See `parse/2` for more
"""
def parse_input(
resource,
statement,
aggregates \\ %{},
calculations \\ %{},
context \\ %{}
) do
context = %{
resource: resource,
relationship_path: [],
aggregates: aggregates,
calculations: calculations,
public?: true,
data_layer: Ash.DataLayer.data_layer(resource),
query_context: context
}
with {:ok, expression} <- parse_expression(statement, context),
:ok <- validate_references(expression, resource) do
{:ok, %__MODULE__{expression: expression, resource: resource}}
end
end
@doc """
Parses a filter statement, accepting only public attributes/relationships, raising on errors.
See `parse_input/2` for more
"""
def parse_input!(resource, statement, aggregates \\ %{}, calculations \\ %{}, context \\ %{}) do
case parse_input(resource, statement, aggregates, calculations, context) do
{:ok, filter} ->
filter
{:error, error} ->
raise error
end
end
@doc """
Parses a filter statement
See `parse/2` for more
"""
def parse!(resource, statement, aggregates \\ %{}, calculations \\ %{}, context \\ %{}) do
case parse(resource, statement, aggregates, calculations, context) do
{:ok, filter} ->
filter
{:error, error} ->
raise error
end
end
@doc """
Parses a filter statement
See the module documentation for more information on the supported formats for filter
statements.
### Important
If you are trying to validate a filter supplied from an external/untrusted source,
be sure to use `parse_input/2` instead! The only difference is that it only accepts
filters over public attributes/relationships.
### Aggregates and calculations
Since custom aggregates/calculations can be added to a query, and they must be explicitly loaded into
a query, the filter parser does not parse them by default. If you wish to support parsing filters
over aggregates/calculations, provide them as the third argument. The best way to do this is to build a query
with them added/loaded, and then use the `aggregates` and `calculations` keys on the query.
### NOTE
A change was made recently that will automatically load any aggregates/calculations that are used in a filter, but
if you are using this function you still need to pass them in.
```elixir
Ash.Filter.parse(MyResource, [id: 1], query.aggregates, query.calculations)
```
"""
def parse(resource, statement, aggregates \\ %{}, calculations \\ %{}, context \\ %{})
def parse(_resource, nil, _aggregates, _calculations, _context) do
{:ok, nil}
end
def parse(resource, statement, aggregates, calculations, context) do
context = %{
resource: resource,
relationship_path: [],
aggregates: aggregates,
calculations: calculations,
public?: false,
data_layer: Ash.DataLayer.data_layer(resource),
query_context: context
}
with {:ok, expression} <- parse_expression(statement, context),
:ok <- validate_references(expression, resource) do
{:ok, %__MODULE__{expression: expression, resource: resource}}
end
end
defp validate_references(expression, resource) do
refs =
expression
|> list_refs()
|> Enum.map(fn ref ->
field =
case ref.attribute do
field when is_atom(field) ->
Ash.Resource.Info.field(resource, field)
field ->
field
end
%{ref | attribute: field}
end)
errors =
refs
|> Enum.flat_map(fn ref ->
field = ref.attribute
# This handles manually added calcualtions and aggregates
case Map.fetch(field, :filterable?) do
:error ->
[]
{:ok, true} ->
[]
{:ok, false} ->
[Ash.Error.Query.InvalidFilterReference.exception(field: field.name)]
{:ok, :simple_equality} ->
if ref.simple_equality? do
[]
else
[
Ash.Error.Query.InvalidFilterReference.exception(
field: field.name,
simple_equality?: true
)
]
end
end
end)
multiple_filter_errors =
refs
|> Enum.filter(fn ref ->
Map.fetch(ref.attribute, :filterable?) == {:ok, :simple_equality}
end)
|> Enum.group_by(& &1.attribute.name)
|> Enum.flat_map(fn
{_, []} ->
[]
{_, [_]} ->
[]
{name, _} ->
[
Ash.Error.Query.InvalidFilterReference.exception(
field: name,
simple_equality?: true
)
]
end)
case Enum.concat(errors, multiple_filter_errors) do
[] ->
:ok
errors ->
{:error, Enum.uniq(errors)}
end
end
@doc """
Returns a filter statement that would find a single record based on the input.
For example:
iex> get_filter(MyApp.Post, 1)
{:ok, %{id: 1}} #using primary key
iex> get_filter(MyApp.Post, id: 1)
{:ok, %{id: 1}} #using primary key
iex> get_filter(MyApp.Post, author_id: 1, publication_id: 2, first_name: "fred")
{:ok, %{author_id: 1, publication_id: 1}} # using a unique identity
iex> get_filter(MyApp.Post, first_name: "fred")
:error # not enough information
"""
def get_filter(resource, id) do
primary_key = Ash.Resource.Info.primary_key(resource)
keyval? = Keyword.keyword?(id) || is_map(id)
case {primary_key, id} do
{[field], [{field, value}]} ->
{:ok, %{field => value}}
{[field], value} when not keyval? ->
{:ok, %{field => value}}
{fields, value} ->
if keyval? do
with :error <- get_keys(value, fields),
:error <- get_identity_filter(resource, id) do
{:error, InvalidPrimaryKey.exception(resource: resource, value: id)}
end
else
{:error, InvalidPrimaryKey.exception(resource: resource, value: id)}
end
end
end
defp get_keys(value, fields) do
Enum.reduce_while(fields, {:ok, %{}}, fn field, {:ok, vals} ->
case fetch(value, field) do
{:ok, value} ->
{:cont, {:ok, Map.put(vals, field, value)}}
:error ->
case fetch(value, to_string(field)) do
{:ok, value} ->
{:cont, {:ok, Map.put(vals, field, value)}}
:error ->
{:halt, :error}
end
end
end)
end
defp fetch(val, key) when is_map(val), do: Map.fetch(val, key)
defp fetch(val, key) when is_list(val) and is_atom(key), do: Keyword.fetch(val, key)
defp fetch(_, _), do: :error
defp get_identity_filter(resource, id) do
resource
|> Ash.Resource.Info.identities()
|> Enum.find_value(
:error,
fn identity ->
case get_keys(id, identity.keys) do
{:ok, key} ->
{:ok, key}
_ ->
nil
end
end
)
end
@to_simple_filter_options [
skip_invalid?: [
type: :boolean,
default: false,
doc:
"If an invalid filter expression is reached that can't be used with a simple filter (like an `or` statement, or a non-predicate expression), it will be ignored instead of raising an error."
]
]
@doc """
Transform an expression based filter to a simple filter, which is just a list of predicates
Options:
- skip_invalid?:
"""
def to_simple_filter(%{resource: resource, expression: expression}, opts \\ []) do
opts = NimbleOptions.validate!(opts, @to_simple_filter_options)
predicates = get_predicates(expression, opts[:skip_invalid?])
%Simple{resource: resource, predicates: predicates}
end
@doc "Replace any actor value references in a template with the values from a given actor"
def build_filter_from_template(template, actor \\ nil, args \\ %{}, context \\ %{}) do
walk_filter_template(template, fn
{:_actor, :_primary_key} ->
if actor do
Map.take(actor, Ash.Resource.Info.primary_key(actor.__struct__))
else
false
end
{:_actor, field} ->
Map.get(actor || %{}, field)
{:_arg, field} ->
Map.get(args, field) || Map.get(args, to_string(field))
{:_context, fields} when is_list(fields) ->
get_in(context, fields)
{:_context, field} ->
Map.get(context, field)
other ->
other
end)
end
@doc "Whether or not a given template contains an actor reference"
def template_references_actor?({:_actor, _}), do: true
def template_references_actor?(%BooleanExpression{op: :and, left: left, right: right}) do
template_references_actor?(left) || template_references_actor?(right)
end
def template_references_actor?(%Not{expression: expression}) do
template_references_actor?(expression)
end
def template_references_actor?(%{left: left, right: right}) do
template_references_actor?(left) || template_references_actor?(right)
end
def template_references_actor?(%{arguments: args}) do
Enum.any?(args, &template_references_actor?/1)
end
def template_references_actor?(%Ash.Query.Call{args: args}) do
Enum.any?(args, &template_references_actor?/1)
end
def template_references_actor?(_), do: false
defp walk_filter_template(filter, mapper) when is_list(filter) do
case mapper.(filter) do
^filter ->
Enum.map(filter, &walk_filter_template(&1, mapper))
other ->
walk_filter_template(other, mapper)
end
end
defp walk_filter_template(%BooleanExpression{left: left, right: right} = expr, mapper) do
case mapper.(expr) do
^expr ->
%{
expr
| left: walk_filter_template(left, mapper),
right: walk_filter_template(right, mapper)
}
other ->
walk_filter_template(other, mapper)
end
end
defp walk_filter_template(%Not{expression: expression} = not_expr, mapper) do
case mapper.(not_expr) do
^not_expr ->
%{not_expr | expression: walk_filter_template(expression, mapper)}
other ->
walk_filter_template(other, mapper)
end
end
defp walk_filter_template(%{__predicate__?: _, left: left, right: right} = pred, mapper) do
case mapper.(pred) do
^pred ->
%{
pred
| left: walk_filter_template(left, mapper),
right: walk_filter_template(right, mapper)
}
other ->
walk_filter_template(other, mapper)
end
end
defp walk_filter_template(%{__predicate__?: _, arguments: arguments} = func, mapper) do
case mapper.(func) do
^func ->
%{
func
| arguments: Enum.map(arguments, &walk_filter_template(&1, mapper))
}
other ->
walk_filter_template(other, mapper)
end
end
defp walk_filter_template(%Call{args: args} = call, mapper) do
case mapper.(call) do
^call ->
%{
call
| args: Enum.map(args, &walk_filter_template(&1, mapper))
}
other ->
walk_filter_template(other, mapper)
end
end
defp walk_filter_template(filter, mapper) when is_map(filter) do
if Map.has_key?(filter, :__struct__) do
filter
else
case mapper.(filter) do
^filter ->
Enum.into(filter, %{}, &walk_filter_template(&1, mapper))
other ->
walk_filter_template(other, mapper)
end
end
end
defp walk_filter_template(tuple, mapper) when is_tuple(tuple) do
case mapper.(tuple) do
^tuple ->
tuple
|> Tuple.to_list()
|> Enum.map(&walk_filter_template(&1, mapper))
|> List.to_tuple()
other ->
walk_filter_template(other, mapper)
end
end
defp walk_filter_template(value, mapper), do: mapper.(value)
@doc """
Can be used to find a simple equality predicate on an attribute
Use this when your attribute is configured with `filterable? :simple_equality`, and you want to
to find the value that it is being filtered on with (if any).
"""
def find_simple_equality_predicate(expression, attribute) do
expression
|> find(&simple_eq?(&1, attribute))
|> case do
nil ->
nil
%{right: right} ->
right
end
end
defp simple_eq?(%Eq{left: %Ref{}, right: %Ref{}}, _), do: false
defp simple_eq?(%Eq{right: %Ref{}} = eq, attribute) do
simple_eq?(%{eq | left: eq.right, right: eq.left}, attribute)
end
defp simple_eq?(%Eq{left: %Ref{attribute: attribute}}, attribute), do: true
defp simple_eq?(%Eq{left: %Ref{attribute: %{name: attribute}}}, attribute), do: true
defp simple_eq?(_, _), do: false
@doc "Find an expression inside of a filter that matches the provided predicate"
def find(expr, pred) do
if pred.(expr) do
expr
else
case expr do
%__MODULE__{expression: expression} ->
find(expression, pred)
%Not{expression: expression} ->
find(expression, pred)
%BooleanExpression{left: left, right: right} ->
find(left, pred) || find(right, pred)
%Call{args: arguments} ->
Enum.find(arguments, &find(&1, pred))
%{__operator__?: true, left: left, right: right} ->
find(left, pred) || find(right, pred)
%{__function__?: true, arguments: arguments} ->
Enum.find(arguments, &find(&1, pred))
_ ->
nil
end
end
end
defp get_predicates(expr, skip_invalid?, acc \\ [])
defp get_predicates(true, _skip_invalid?, acc), do: acc
defp get_predicates(false, _, _), do: false
defp get_predicates(_, _, false), do: false
defp get_predicates(%BooleanExpression{op: :and, left: left, right: right}, skip_invalid?, acc) do
acc = get_predicates(left, skip_invalid?, acc)
get_predicates(right, skip_invalid?, acc)
end
defp get_predicates(%Not{expression: expression}, skip_invalid?, acc) do
expression
|> get_predicates(skip_invalid?)
|> Enum.reduce(acc, fn predicate, acc ->
[%Simple.Not{predicate: predicate} | acc]
end)
end
defp get_predicates(%{__predicate__?: true} = predicate, _skip_invalid?, acc),
do: [predicate | acc]
defp get_predicates(_invalid, true, acc), do: acc
defp get_predicates(invalid, false, _acc) do
raise "Invalid filter statement provided: #{inspect(invalid)} while constructing a simple filter. To skip invalid statements, use `skip_invalid?: true`."
end
def used_calculations(
filter,
resource,
relationship_path \\ [],
calculations \\ %{},
aggregates \\ %{}
) do
filter
|> list_refs()
|> Enum.filter(fn
%Ref{attribute: %Calculation{}, relationship_path: ref_relationship_path} ->
(relationship_path in [nil, []] and ref_relationship_path in [nil, []]) ||
relationship_path == ref_relationship_path
_ ->
false
end)
|> Enum.map(& &1.attribute)
|> calculations_used_by_calculations(
resource,
relationship_path,
calculations,
aggregates
)
end
defp calculations_used_by_calculations(
used_calculations,
resource,
relationship_path,
calculations,
aggregates
) do
used_calculations
|> Enum.flat_map(fn calculation ->
expression = calculation.module.expression(calculation.opts, calculation.context)
case Ash.Filter.hydrate_refs(expression, %{
resource: resource,
aggregates: aggregates,
calculations: calculations,
public?: false
}) do
{:ok, expression} ->
with_recursive_used =
calculations_used_by_calculations(
used_calculations(
expression,
resource,
relationship_path,
calculations,
aggregates
),
resource,
relationship_path,
calculations,
aggregates
)
[calculation | with_recursive_used]
_ ->
[calculation]
end
end)
end
def used_aggregates(filter, relationship_path \\ [], return_refs? \\ false) do
refs =
filter
|> list_refs()
|> Enum.filter(fn
%Ref{attribute: %Aggregate{}, relationship_path: ref_relationship_path} ->
relationship_path == :all ||
(relationship_path in [nil, []] and ref_relationship_path in [nil, []]) ||
relationship_path == ref_relationship_path
_ ->
false
end)
if return_refs? do
refs
else
Enum.map(refs, & &1.attribute)
end
end
def put_at_path(value, []), do: value
def put_at_path(value, [key | rest]), do: [{key, put_at_path(value, rest)}]
def add_to_filter!(
base,
addition,
op \\ :and,
aggregates \\ %{},
calculations \\ %{},
context \\ %{}
) do
case add_to_filter(base, addition, op, aggregates, calculations, context) do
{:ok, value} ->
value
{:error, error} ->
raise Ash.Error.to_ash_error(error)
end
end
def add_to_filter(
base,
addition,
op \\ :and,
aggregates \\ %{},
calculations \\ %{},
context \\ %{}
)
def add_to_filter(nil, %__MODULE__{} = addition, _, _, _, _), do: {:ok, addition}
def add_to_filter(
%__MODULE__{} = base,
%__MODULE__{} = addition,
op,
_,
_,
_
) do
{:ok,
%{
base
| expression: BooleanExpression.optimized_new(op, base.expression, addition.expression)
}}
end
def add_to_filter(%__MODULE__{} = base, statement, op, aggregates, calculations, context) do
case parse(base.resource, statement, aggregates, calculations, context) do
{:ok, filter} -> add_to_filter(base, filter, op, aggregates, calculations)
{:error, error} -> {:error, error}
end
end
@doc """
Returns true if the second argument is a strict subset (always returns the same or less data) of the first
"""
def strict_subset_of(nil, _), do: true
def strict_subset_of(_, nil), do: false
def strict_subset_of(%{resource: resource}, %{resource: other_resource})
when resource != other_resource,
do: false
def strict_subset_of(filter, candidate) do
Ash.SatSolver.strict_filter_subset(filter, candidate)
end
def strict_subset_of?(filter, candidate) do
strict_subset_of(filter, candidate) == true
end
def relationship_filter_request_paths(filter) do
filter
|> relationship_paths()
|> Enum.map(&[:filter, &1])
end
def read_requests(_, nil, _), do: {:ok, []}
def read_requests(api, %{resource: original_resource} = filter, request_path) do
filter
|> Ash.Filter.relationship_paths()
|> Enum.map(fn path ->
{path, scope_expression_by_relationship_path(filter, path)}
end)
|> Enum.reduce_while({:ok, []}, fn {path, scoped_filter}, {:ok, requests} ->
%{resource: resource} = scoped_filter
with %{errors: []} = query <- Ash.Query.new(resource, api),
%{errors: []} = query <- Ash.Query.do_filter(query, scoped_filter),
{:action, action} when not is_nil(action) <-
{:action, Ash.Resource.Info.primary_action(resource, :read)} do
request =
Request.new(
resource: resource,
api: api,
query:
Request.resolve(
[request_path ++ [:data, :authorization_filter]],
fn context ->
authorization_filter =
get_in(context, request_path ++ [:data, :authorization_filter])
if authorization_filter do
relationship =
Ash.Resource.Info.relationship(
original_resource,
List.first(path)
)
case Load.reverse_relationship_path(
relationship,
tl(path)
) do
:error ->
{:ok, query}
{:ok, reverse_relationship} ->
filter = put_at_path(authorization_filter, reverse_relationship)
{:ok, Ash.Query.do_filter(query, filter)}
end
else
{:ok, query}
end
end
),
async?: false,
path: request_path ++ [:filter, path],
strict_check_only?: true,
action: action,
name: "authorize filter #{Enum.join(path, ".")}",
data: []
)
{:cont, {:ok, [request | requests]}}
else
{:error, error} -> {:halt, {:error, error}}
%{errors: errors} -> {:halt, {:error, errors}}
{:action, nil} -> {:halt, {:error, ReadActionRequired.exception(resource: resource)}}
end
end)
end
defp map(%__MODULE__{expression: nil} = filter, _) do
filter
end
defp map(%__MODULE__{expression: expression} = filter, func) do
%{filter | expression: do_map(func.(expression), func)}
end
defp map(expression, func) do
do_map(func.(expression), func)
end
defp do_map(expression, func) do
case expression do
{:halt, expr} ->
expr
%BooleanExpression{left: left, right: right} = expr ->
%{expr | left: do_map(left, func), right: do_map(right, func)}
%Not{expression: not_expr} = expr ->
%{expr | expression: do_map(not_expr, func)}
%{__operator__?: true, left: left, right: right} = op ->
%{op | left: do_map(left, func), right: do_map(right, func)}
%{__function__?: true, arguments: arguments} = func ->
%{
func
| arguments:
Enum.map(arguments, fn
{key, arg} when is_atom(key) ->
{key, do_map(arg, func)}
arg ->
do_map(arg, func)
end)
}
other ->
func.(other)
end
end
def update_aggregates(%__MODULE__{expression: expression} = filter, mapper) do
%{filter | expression: update_aggregates(expression, mapper)}
end
def update_aggregates(expression, mapper) do
case expression do
{key, value} when is_atom(key) ->
{key, update_aggregates(value, mapper)}
%Not{expression: expression} = not_expr ->
%{not_expr | expression: update_aggregates(expression, mapper)}
%BooleanExpression{left: left, right: right} = expression ->
%{
expression
| left: update_aggregates(left, mapper),
right: update_aggregates(right, mapper)
}
%{__operator__?: true, left: left, right: right} = op ->
left = update_aggregates(left, mapper)
right = update_aggregates(right, mapper)
%{op | left: left, right: right}
%{__function__?: true, arguments: args} = func ->
%{func | arguments: Enum.map(args, &update_aggregates(&1, mapper))}
%Ref{attribute: %Aggregate{} = agg} = ref ->
%{ref | attribute: mapper.(agg, ref)}
other ->
other
end
end
def run_other_data_layer_filters(api, resource, %{expression: expression} = filter, data) do
case do_run_other_data_layer_filters(expression, api, resource, data) do
{:filter_requests, requests} -> {:filter_requests, requests}
{:ok, new_expression} -> {:ok, %{filter | expression: new_expression}}
{:error, error} -> {:error, error}
end
end
def run_other_data_layer_filters(_, _, filter, _data) when filter in [nil, true, false],
do: {:ok, filter}
defp do_run_other_data_layer_filters(
%BooleanExpression{op: :or, left: left, right: right},
api,
resource,
data
) do
left_result = do_run_other_data_layer_filters(left, api, resource, data)
right_result = do_run_other_data_layer_filters(right, api, resource, data)
case {left_result, right_result} do
{{:ok, left}, {:ok, right}} ->
{:ok, BooleanExpression.optimized_new(:or, left, right)}
{{:error, error}, _} ->
{:error, error}
{_, {:error, error}} ->
{:error, error}
{{:filter_requests, left_filter_requests}, {:filter_requests, right_filter_requests}} ->
{:filter_requests, left_filter_requests ++ right_filter_requests}
{{:filter_requests, left_filter_requests}, _} ->
{:filter_requests, left_filter_requests}
{_, {:filter_requests, right_filter_requests}} ->
{:filter_requests, right_filter_requests}
end
end
defp do_run_other_data_layer_filters(
%BooleanExpression{op: :and} = expression,
api,
resource,
data
) do
expression
|> relationship_paths(:ands_only)
|> filter_paths_that_change_data_layers(resource)
|> case do
[] ->
{:ok, expression}
paths ->
paths
|> do_run_other_data_layer_filter_paths(expression, resource, api, data)
|> case do
{:filter_requests, requests} -> {:filter_requests, requests}
{:ok, result} -> do_run_other_data_layer_filters(result, api, resource, data)
{:error, error} -> {:error, error}
end
end
end
defp do_run_other_data_layer_filters(%Not{expression: expression}, api, resource, data) do
case do_run_other_data_layer_filters(expression, api, resource, data) do
{:ok, expr} -> {:ok, Not.new(expr)}
{:filter_requests, requests} -> {:filter_requests, requests}
{:error, error} -> {:error, error}
end
end
defp do_run_other_data_layer_filters(%{__predicate__?: _} = predicate, api, resource, data) do
predicate
|> relationship_paths(:ands_only)
|> filter_paths_that_change_data_layers(resource)
|> Enum.find_value(fn path ->
case split_expression_by_relationship_path(predicate, path) do
{nil, _} ->
nil
{for_path, nil} ->
{path, for_path}
end
end)
|> case do
nil ->
{:ok, predicate}
{path, new_predicate} ->
relationship = Ash.Resource.Info.relationship(resource, path)
fetch_related_data(resource, path, new_predicate, api, relationship, data)
end
end
defp do_run_other_data_layer_filters(other, _api, _resource, _data), do: {:ok, other}
defp do_run_other_data_layer_filter_paths(paths, expression, resource, api, data) do
Enum.reduce_while(paths, {:ok, expression, []}, fn path, {:ok, expression, requests} ->
{for_path, without_path} = split_expression_by_relationship_path(expression, path)
relationship = Ash.Resource.Info.relationship(resource, path)
query =
relationship.destination
|> Ash.Query.new(api)
|> Map.put(:filter, %__MODULE__{
expression: for_path,
resource: relationship.destination
})
case filter_related_in(query, relationship, :lists.droplast(path), api, data) do
{:ok, new_predicate} ->
if requests == [] do
{:cont, {:ok, BooleanExpression.optimized_new(:and, without_path, new_predicate), []}}
else
{:cont, {:ok, new_predicate, []}}
end
{:filter_requests, new_requests} ->
{:ok, expression, requests ++ new_requests}
{:error, error} ->
{:halt, {:error, error}}
end
end)
|> case do
{:ok, expr, []} ->
{:ok, expr}
{:ok, _, requests} ->
{:filter_requests, requests}
other ->
other
end
end
defp split_expression_by_relationship_path(%{expression: expression}, path) do
split_expression_by_relationship_path(expression, path)
end
defp split_expression_by_relationship_path(
%BooleanExpression{op: op, left: left, right: right},
path
) do
{new_for_path_left, new_without_path_left} = split_expression_by_relationship_path(left, path)
{new_for_path_right, new_without_path_right} =
split_expression_by_relationship_path(right, path)
{BooleanExpression.optimized_new(op, new_for_path_left, new_for_path_right),
BooleanExpression.optimized_new(op, new_without_path_left, new_without_path_right)}
end
defp split_expression_by_relationship_path(%Not{expression: expression}, path) do
{new_for_path, new_without_path} = split_expression_by_relationship_path(expression, path)
{Not.new(new_for_path), Not.new(new_without_path)}
end
defp split_expression_by_relationship_path(
%{
__predicate__?: _,
left: left,
right: right
} = predicate,
path
) do
refs = list_refs([left, right])
if Enum.any?(refs, &List.starts_with?(&1.relationship_path, path)) do
if Enum.all?(refs, &List.starts_with?(&1.relationship_path, path)) do
{scope_refs(predicate, path), nil}
else
{scope_refs(predicate, path), predicate}
end
else
{nil, predicate}
end
end
defp split_expression_by_relationship_path(
%{__predicate__?: _, arguments: args} = predicate,
path
) do
refs = list_refs(args)
if Enum.any?(refs, &List.starts_with?(&1.relationship_path, path)) do
if Enum.all?(refs, &List.starts_with?(&1.relationship_path, path)) do
{scope_refs(predicate, path), nil}
else
{scope_refs(predicate, path), predicate}
end
else
{nil, predicate}
end
end
defp scope_refs(%BooleanExpression{left: left, right: right} = expr, path) do
%{expr | left: scope_refs(left, path), right: scope_refs(right, path)}
end
defp scope_refs(%Not{expression: expression} = expr, path) do
%{expr | expression: scope_refs(expression, path)}
end
defp scope_refs(%{__predicate__?: _, left: left, right: right} = pred, path) do
%{pred | left: scope_refs(left, path), right: scope_refs(right, path)}
end
defp scope_refs(%{__predicate__?: _, argsuments: arguments} = pred, path) do
%{pred | args: Enum.map(arguments, &scope_refs(&1, path))}
end
defp scope_refs({key, value}, path) do
{key, scope_refs(value, path)}
end
defp scope_refs(%Ref{relationship_path: ref_path} = ref, path) do
if List.starts_with?(ref_path, path) do
%{ref | relationship_path: Enum.drop(ref_path, Enum.count(path))}
else
ref
end
end
defp scope_refs(other, _), do: other
def prefix_refs(%BooleanExpression{left: left, right: right} = expr, path) do
%{expr | left: prefix_refs(left, path), right: prefix_refs(right, path)}
end
def prefix_refs(%Not{expression: expression} = expr, path) do
%{expr | expression: prefix_refs(expression, path)}
end
def prefix_refs(%{__predicate__?: _, left: left, right: right} = pred, path) do
%{pred | left: prefix_refs(left, path), right: prefix_refs(right, path)}
end
def prefix_refs(%{__predicate__?: _, argsuments: arguments} = pred, path) do
%{pred | args: Enum.map(arguments, &prefix_refs(&1, path))}
end
def prefix_refs(%Ref{relationship_path: ref_path} = ref, path) do
if List.starts_with?(ref_path, path) do
%{ref | relationship_path: path ++ ref_path}
else
ref
end
end
def prefix_refs(other, _), do: other
defp fetch_related_data(
resource,
path,
new_predicate,
api,
%{type: :many_to_many, join_relationship: join_relationship, through: through} =
relationship,
data
) do
if Ash.DataLayer.data_layer(through) == Ash.DataLayer.data_layer(resource) &&
Ash.DataLayer.data_layer_can?(resource, {:join, through}) do
filter = %__MODULE__{
resource: relationship.destination,
expression: new_predicate
}
relationship.destination
|> Ash.Query.new(api)
|> Ash.Query.do_filter(filter)
|> filter_related_in(
relationship,
:lists.droplast(path) ++ [join_relationship],
api,
data
)
else
filter = %__MODULE__{
resource: through,
expression: new_predicate
}
relationship.destination
|> Ash.Query.new(api)
|> Ash.Query.do_filter(filter)
|> Ash.Actions.Read.unpaginated_read()
|> case do
{:ok, results} ->
relationship.through
|> Ash.Query.new(api)
|> Ash.Query.do_filter([
{relationship.destination_field_on_join_table,
in: Enum.map(results, &Map.get(&1, relationship.destination_field))}
])
|> filter_related_in(
Ash.Resource.Info.relationship(resource, join_relationship),
:lists.droplast(path),
api,
data
)
{:error, error} ->
{:error, error}
end
end
end
defp fetch_related_data(
_resource,
path,
new_predicate,
api,
relationship,
data
) do
filter = %__MODULE__{
resource: relationship.destination,
expression: new_predicate
}
relationship.destination
|> Ash.Query.new(api)
|> Ash.Query.do_filter(filter)
|> Ash.Query.do_filter(relationship.filter)
|> Ash.Query.sort(relationship.sort)
|> Ash.Query.set_context(relationship.context)
|> filter_related_in(relationship, :lists.droplast(path), api, data)
end
defp filter_related_in(query, relationship, path, api, {request_path, tenant, data}) do
query = Ash.Query.set_tenant(query, tenant)
path = request_path ++ [:other_data_layer_filter, path ++ [relationship.name], query]
case get_in(data, path ++ [:data]) do
%{data: records} ->
records_to_expression(
records,
relationship,
path
)
_ ->
action = Ash.Resource.Info.primary_action!(query.resource, :read)
action = %{action | pagination: false}
{:filter_requests,
Ash.Actions.Read.as_requests(path, query.resource, api, action,
query: query,
page: false,
tenant: tenant
)
|> Enum.map(fn request ->
# By returning the request and a key, we register a dependency on that key
{request, :data}
end)}
end
end
defp records_to_expression([], _, _), do: {:ok, false}
defp records_to_expression([single_record], relationship, path) do
Ash.Query.Operator.new(
Eq,
%Ref{
relationship_path: path,
resource: relationship.source,
attribute: Ash.Resource.Info.attribute(relationship.source, relationship.source_field)
},
Map.get(single_record, relationship.destination_field)
)
end
defp records_to_expression(records, relationship, path) do
Enum.reduce_while(records, {:ok, nil}, fn record, {:ok, expression} ->
case records_to_expression([record], relationship, path) do
{:ok, operator} ->
{:cont, {:ok, BooleanExpression.optimized_new(:and, expression, operator)}}
{:error, error} ->
{:halt, {:error, error}}
end
end)
end
defp filter_paths_that_change_data_layers(paths, resource, acc \\ [])
defp filter_paths_that_change_data_layers([], _resource, acc), do: acc
defp filter_paths_that_change_data_layers([path | rest], resource, acc) do
case shortest_path_to_changed_data_layer(resource, path) do
{:ok, path} ->
new_rest = Enum.reject(rest, &List.starts_with?(&1, path))
filter_paths_that_change_data_layers(new_rest, resource, [path | acc])
:error ->
filter_paths_that_change_data_layers(rest, resource, acc)
end
end
defp shortest_path_to_changed_data_layer(resource, path, acc \\ [])
defp shortest_path_to_changed_data_layer(_resource, [], _acc), do: :error
defp shortest_path_to_changed_data_layer(resource, [relationship | rest], acc) do
relationship = Ash.Resource.Info.relationship(resource, relationship)
if relationship.type == :many_to_many do
if Ash.DataLayer.data_layer_can?(resource, {:join, relationship.through}) do
shortest_path_to_changed_data_layer(relationship.destination, rest, [
relationship.name | acc
])
else
{:ok, Enum.reverse([relationship.name | acc])}
end
else
if Ash.DataLayer.data_layer_can?(resource, {:join, relationship.destination}) do
shortest_path_to_changed_data_layer(relationship.destination, rest, [
relationship.name | acc
])
else
{:ok, Enum.reverse([relationship.name | acc])}
end
end
end
def relationship_paths(filter_or_expression, kind \\ :all)
def relationship_paths(nil, _), do: []
def relationship_paths(%{expression: nil}, _), do: []
def relationship_paths(%__MODULE__{expression: expression}, kind),
do: relationship_paths(expression, kind)
def relationship_paths(expression, kind) do
expression
|> do_relationship_paths(kind)
|> List.wrap()
|> List.flatten()
|> Enum.uniq()
|> Enum.map(fn {path} -> path end)
end
defp do_relationship_paths(%Ref{relationship_path: path}, _) when path != [] do
{path}
end
defp do_relationship_paths(%BooleanExpression{op: :or}, :ands_only) do
[]
end
defp do_relationship_paths(%BooleanExpression{left: left, right: right}, kind) do
[do_relationship_paths(left, kind), do_relationship_paths(right, kind)]
end
defp do_relationship_paths(%Not{expression: expression}, kind) do
do_relationship_paths(expression, kind)
end
defp do_relationship_paths(%{__operator__?: true, left: left, right: right}, kind) do
[do_relationship_paths(left, kind), do_relationship_paths(right, kind)]
end
defp do_relationship_paths({key, value}, kind) when is_atom(key) do
do_relationship_paths(value, kind)
end
defp do_relationship_paths(%{__function__?: true, arguments: arguments}, kind) do
Enum.map(arguments, &do_relationship_paths(&1, kind))
end
defp do_relationship_paths(_, _), do: []
@doc false
def embed_predicates(nil), do: nil
def embed_predicates(%__MODULE__{expression: expression} = filter) do
%{filter | expression: embed_predicates(expression)}
end
def embed_predicates(%Not{expression: expression} = not_expr) do
%{not_expr | expression: embed_predicates(expression)}
end
def embed_predicates(%BooleanExpression{left: left, right: right} = expr) do
%{expr | left: embed_predicates(left), right: embed_predicates(right)}
end
def embed_predicates(%Call{args: args} = call) do
%{call | args: embed_predicates(args)}
end
def embed_predicates(%{__predicate__?: true} = pred) do
%{pred | embedded?: true}
end
def embed_predicates(list) when is_list(list) do
Enum.map(list, &embed_predicates(&1))
end
def embed_predicates(other), do: other
def list_refs(expression, no_longer_simple? \\ false, in_an_eq? \\ false)
def list_refs(list, no_longer_simple?, in_an_eq?) when is_list(list) do
Enum.flat_map(list, &list_refs(&1, no_longer_simple?, in_an_eq?))
end
def list_refs({key, value}, no_longer_simple?, in_an_eq?) when is_atom(key),
do: list_refs(value, no_longer_simple?, in_an_eq?)
def list_refs(%__MODULE__{expression: expression}, no_longer_simple?, in_an_eq?) do
list_refs(expression, no_longer_simple?, in_an_eq?)
end
def list_refs(expression, no_longer_simple?, in_an_eq?) do
case expression do
%BooleanExpression{left: left, right: right, op: op} ->
no_longer_simple? = no_longer_simple? || op == :or
list_refs(left, no_longer_simple?) ++ list_refs(right, no_longer_simple?)
%Not{expression: not_expr} ->
list_refs(not_expr, true)
%struct{__predicate__?: _, left: left, right: right} ->
in_an_eq? = struct == Ash.Query.Operator.Eq
list_refs(left, no_longer_simple?, in_an_eq?) ++
list_refs(right, no_longer_simple?, in_an_eq?)
%{__predicate__?: _, arguments: args} ->
Enum.flat_map(args, &list_refs(&1, true))
%Call{args: args} ->
Enum.flat_map(args, &list_refs(&1, true))
%Ref{} = ref ->
[%{ref | simple_equality?: !no_longer_simple? && in_an_eq?}]
_ ->
[]
end
end
def list_predicates(%__MODULE__{expression: expression}) do
list_predicates(expression)
end
def list_predicates(expression) do
case expression do
%BooleanExpression{left: left, right: right} ->
list_predicates(left) ++ list_predicates(right)
%Not{expression: not_expr} ->
list_predicates(not_expr)
%{__predicate__?: true} = pred ->
[pred]
_ ->
[]
end
end
def scope_expression_by_relationship_path(filter, path) do
%__MODULE__{
resource: Ash.Resource.Info.related(filter.resource, path),
expression: do_scope_expression_by_relationship_path(filter.expression, path)
}
end
defp do_scope_expression_by_relationship_path(
%BooleanExpression{op: op, left: left, right: right},
path
) do
new_left = do_scope_expression_by_relationship_path(left, path)
new_right = do_scope_expression_by_relationship_path(right, path)
BooleanExpression.optimized_new(op, new_left, new_right)
end
defp do_scope_expression_by_relationship_path(%Not{expression: expression}, path) do
new_expression = do_scope_expression_by_relationship_path(expression, path)
Not.new(new_expression)
end
defp do_scope_expression_by_relationship_path(
%{__operator__?: true, left: left, right: right} = op,
path
) do
[left, right] = Enum.map([left, right], &do_scope_expression_by_relationship_path(&1, path))
%{op | left: left, right: right}
end
defp do_scope_expression_by_relationship_path(
%{__function__?: true, arguments: arguments} = func,
path
) do
arguments = Enum.map(arguments, &do_scope_expression_by_relationship_path(&1, path))
%{func | arguments: arguments}
end
defp do_scope_expression_by_relationship_path(%Call{args: arguments} = call, path) do
arguments = Enum.map(arguments, &do_scope_expression_by_relationship_path(&1, path))
%{call | args: arguments}
end
defp do_scope_expression_by_relationship_path({key, value}, path) do
{key, do_scope_expression_by_relationship_path(value, path)}
end
defp do_scope_expression_by_relationship_path(%Ref{} = ref, path) do
if List.starts_with?(ref.relationship_path, path) do
%{ref | relationship_path: Enum.drop(ref.relationship_path, Enum.count(path))}
else
ref
end
end
defp do_scope_expression_by_relationship_path(other, _path) do
other
end
defp attribute(%{public?: true, resource: resource}, attribute),
do: Ash.Resource.Info.public_attribute(resource, attribute)
defp attribute(%{public?: false, resource: resource}, attribute),
do: Ash.Resource.Info.attribute(resource, attribute)
defp aggregate(%{public?: true, resource: resource}, aggregate),
do: Ash.Resource.Info.public_aggregate(resource, aggregate)
defp aggregate(%{public?: false, resource: resource}, aggregate),
do: Ash.Resource.Info.aggregate(resource, aggregate)
defp calculation(%{public?: true, resource: resource}, calculation),
do: Ash.Resource.Info.public_calculation(resource, calculation)
defp calculation(%{public?: false, resource: resource}, calculation),
do: Ash.Resource.Info.calculation(resource, calculation)
defp relationship(%{public?: true, resource: resource}, relationship) do
Ash.Resource.Info.public_relationship(resource, relationship)
end
defp relationship(%{public?: false, resource: resource}, relationship) do
Ash.Resource.Info.relationship(resource, relationship)
end
defp related(context, relationship) when not is_list(relationship) do
related(context, [relationship])
end
defp related(context, []), do: context.resource
defp related(context, [rel | rest]) do
case relationship(context, rel) do
%{destination: destination} -> related(%{context | resource: destination}, rest)
nil -> nil
end
end
defp parse_expression(%__MODULE__{expression: expression}, context),
do: {:ok, move_to_relationship_path(expression, context.relationship_path)}
defp parse_expression(statement, context) when is_list(statement) do
Enum.reduce_while(statement, {:ok, nil}, fn expression_part, {:ok, expression} ->
case add_expression_part(expression_part, context, expression) do
{:ok, new_expression} ->
{:cont, {:ok, new_expression}}
{:error, error} ->
{:halt, {:error, error}}
end
end)
end
defp parse_expression(statement, context) do
parse_expression([statement], context)
end
defp add_expression_part(boolean, _context, expression) when is_boolean(boolean),
do: {:ok, BooleanExpression.optimized_new(:and, expression, boolean)}
defp add_expression_part(%__MODULE__{expression: adding_expression}, context, expression) do
{:ok,
BooleanExpression.optimized_new(
:and,
expression,
move_to_relationship_path(adding_expression, context.relationship_path)
)}
end
defp add_expression_part({not_key, nested_statement}, context, expression)
when not_key in [:not, "not"] do
case parse_expression(nested_statement, context) do
{:ok, nested_expression} ->
{:ok, BooleanExpression.optimized_new(:and, expression, Not.new(nested_expression))}
{:error, error} ->
{:error, error}
end
end
defp add_expression_part({or_key, nested_statements}, context, expression)
when or_key in [:or, "or"] do
with {:ok, nested_expression} <- parse_and_join(nested_statements, :or, context),
:ok <- validate_data_layers_support_boolean_filters(nested_expression) do
{:ok, BooleanExpression.optimized_new(:and, expression, nested_expression)}
end
end
defp add_expression_part({and_key, nested_statements}, context, expression)
when and_key in [:and, "and"] do
case parse_and_join(nested_statements, :and, context) do
{:ok, nested_expression} ->
{:ok, BooleanExpression.optimized_new(:and, expression, nested_expression)}
{:error, error} ->
{:error, error}
end
end
defp add_expression_part(%Call{} = call, context, expression) do
case resolve_call(call, context) do
{:ok, result} ->
{:ok, BooleanExpression.optimized_new(:and, expression, result)}
{:error, error} ->
{:error, error}
end
end
defp add_expression_part({%Ref{} = ref, nested_statement}, context, expression) do
case related(context, ref.relationship_path) do
nil ->
{:error,
NoSuchAttributeOrRelationship.exception(
attribute_or_relationship: List.first(ref.relationship_path),
resource: context.resource
)}
related ->
new_context = %{
relationship_path: ref.relationship_path,
resource: related,
aggregates: context.aggregates,
calculations: context.calculations,
query_context: context.query_context,
public?: context.public?
}
add_expression_part({ref.attribute.name, nested_statement}, new_context, expression)
end
end
defp add_expression_part(
%BooleanExpression{op: op, left: left, right: right},
context,
expression
) do
add_expression_part({op, [left, right]}, context, expression)
end
defp add_expression_part(%Not{expression: not_expression}, context, expression) do
add_expression_part({:not, not_expression}, context, expression)
end
defp add_expression_part(%_{} = record, context, expression) do
pkey_filter =
record
|> Map.take(Ash.Resource.Info.primary_key(context.resource))
|> Map.to_list()
add_expression_part(pkey_filter, context, expression)
end
defp add_expression_part({:is_nil, attribute}, context, expression) when is_atom(attribute) do
add_expression_part({attribute, [is_nil: true]}, context, expression)
end
defp add_expression_part({function, args}, context, expression)
when is_tuple(args) and is_atom(function) do
case get_function(function, context.resource) do
nil ->
{:error,
NoSuchAttributeOrRelationship.exception(
attribute_or_relationship: function,
resource: context.resource
)}
function_module ->
nested_statement = Tuple.to_list(args)
with {:ok, args} <-
hydrate_refs(List.wrap(nested_statement), context),
refs <- list_refs(args),
:ok <-
validate_not_crossing_datalayer_boundaries(
refs,
context.resource,
{function, nested_statement}
),
{:ok, function} <-
Function.new(
function_module,
args
) do
if is_boolean(function) do
{:ok, BooleanExpression.optimized_new(:and, expression, function)}
else
if Ash.DataLayer.data_layer_can?(context.resource, {:filter_expr, function}) do
{:ok, BooleanExpression.optimized_new(:and, expression, function)}
else
{:error, "data layer does not support the function #{inspect(function)}"}
end
end
end
end
end
defp add_expression_part({field, nested_statement}, context, expression)
when is_atom(field) or is_binary(field) do
aggregates =
Enum.flat_map(context.aggregates, fn {key, _} ->
[key, to_string(key)]
end)
calculations =
Enum.flat_map(context.calculations, fn {key, _} ->
[key, to_string(key)]
end)
cond do
rel = relationship(context, field) ->
context =
context
|> Map.update!(:relationship_path, fn path -> path ++ [rel.name] end)
|> Map.put(:resource, rel.destination)
|> Map.update!(
:query_context,
&Ash.Helpers.deep_merge_maps(&1 || %{}, rel.context || %{})
)
if is_list(nested_statement) || is_map(nested_statement) do
case parse_expression(nested_statement, context) do
{:ok, nested_expression} ->
{:ok, BooleanExpression.optimized_new(:and, expression, nested_expression)}
{:error, error} ->
{:error, error}
end
else
with [field] <- Ash.Resource.Info.primary_key(context.resource),
attribute <- attribute(context, field),
{:ok, casted} <-
Ash.Type.cast_input(attribute.type, nested_statement, attribute.constraints) do
add_expression_part({field, casted}, context, expression)
else
_other ->
{:error,
InvalidFilterValue.exception(
value: inspect(nested_statement),
message:
"A single value must be castable to the primary key of the resource: #{inspect(context.resource)}"
)}
end
end
attr = attribute(context, field) ->
case parse_predicates(nested_statement, attr, context) do
{:ok, nested_statement} ->
{:ok, BooleanExpression.optimized_new(:and, expression, nested_statement)}
{:error, error} ->
{:error, error}
end
aggregate = aggregate(context, field) ->
related = Ash.Resource.Info.related(context.resource, aggregate.relationship_path)
with %{valid?: true} = aggregate_query <-
Ash.Query.build(related, filter: aggregate.filter, sort: aggregate.sort),
{:ok, query_aggregate} <-
Aggregate.new(
context.resource,
aggregate.name,
aggregate.kind,
aggregate.relationship_path,
aggregate_query,
aggregate.field,
aggregate.default,
aggregate.filterable?
) do
case parse_predicates(nested_statement, query_aggregate, context) do
{:ok, nested_statement} ->
{:ok, BooleanExpression.optimized_new(:and, expression, nested_statement)}
{:error, error} ->
{:error, error}
end
else
%{valid?: false, errors: errors} ->
{:error, errors}
{:error, error} ->
{:error, error}
end
field in calculations ->
{module, _} = module_and_opts(Map.get(context.calculations, field).calculation)
field =
if is_binary(field) do
String.to_existing_atom(field)
else
field
end
add_calculation_expression(context, nested_statement, field, module, expression)
field in aggregates ->
field =
if is_binary(field) do
String.to_existing_atom(field)
else
field
end
add_aggregate_expression(context, nested_statement, field, expression)
resource_calculation = calculation(context, field) ->
{module, opts} = module_and_opts(resource_calculation.calculation)
{input, nested_statement} =
case nested_statement do
{input, nested} ->
{input || %{}, nested}
nested ->
{%{}, nested}
end
with {:ok, args} <-
Ash.Query.validate_calculation_arguments(
resource_calculation,
input
),
{:ok, calculation} <-
Calculation.new(
resource_calculation.name,
module,
opts,
resource_calculation.type,
Map.put(args, :context, context.query_context),
resource_calculation.filterable?
) do
case parse_predicates(nested_statement, calculation, context) do
{:ok, nested_statement} ->
{:ok, BooleanExpression.optimized_new(:and, expression, nested_statement)}
{:error, error} ->
{:error, error}
end
else
{:error, error} ->
{:error, error}
end
op_module = get_operator(field) && match?([_, _ | _], nested_statement) ->
with {:ok, [left, right]} <-
hydrate_refs(nested_statement, context),
refs <- list_refs([left, right]),
:ok <-
validate_not_crossing_datalayer_boundaries(
refs,
context.resource,
{field, nested_statement}
),
{:ok, operator} <- Operator.new(op_module, left, right) do
if is_boolean(operator) do
{:ok, BooleanExpression.optimized_new(:and, expression, operator)}
else
if Ash.DataLayer.data_layer_can?(context.resource, {:filter_expr, operator}) do
{:ok, BooleanExpression.optimized_new(:and, expression, operator)}
else
{:error, "data layer does not support the operator #{inspect(operator)}"}
end
end
end
true ->
{:error,
NoSuchAttributeOrRelationship.exception(
attribute_or_relationship: field,
resource: context.resource
)}
end
end
defp add_expression_part(value, context, expression) when is_map(value) do
# Can't call `parse_expression/2` here because it will loop
value
|> Map.to_list()
|> Enum.reduce_while({:ok, nil}, fn {key, value}, {:ok, expression} ->
case add_expression_part({key, value}, context, expression) do
{:ok, new_expression} ->
{:cont, {:ok, new_expression}}
{:error, error} ->
{:halt, {:error, error}}
end
end)
|> case do
{:ok, new_expression} ->
{:ok, BooleanExpression.optimized_new(:and, expression, new_expression)}
{:error, error} ->
{:error, error}
end
end
defp add_expression_part(value, context, expression) when is_list(value) do
Enum.reduce_while(value, {:ok, expression}, fn value, {:ok, expression} ->
case add_expression_part(value, context, expression) do
{:ok, expression} -> {:cont, {:ok, expression}}
{:error, error} -> {:halt, {:error, error}}
end
end)
end
defp add_expression_part(value, _, _) do
{:error, InvalidFilterValue.exception(value: value)}
end
defp validate_not_crossing_datalayer_boundaries(refs, resource, expr) do
refs
|> Enum.map(&Ash.Resource.Info.related(resource, &1.relationship_path))
|> Enum.filter(& &1)
|> Enum.group_by(&Ash.DataLayer.data_layer/1)
|> Map.to_list()
|> case do
[] ->
:ok
[{_data_layer, resources}] ->
can_join? =
Enum.all?(resources, fn resource ->
resources
|> Kernel.--([resource])
|> Enum.all?(fn other_resource ->
Ash.DataLayer.data_layer_can?(resource, {:join, other_resource})
end)
end)
if can_join? do
:ok
else
{:error,
Ash.Error.Query.InvalidExpression.exception(
expression: expr,
message:
"Cannot access multiple resources for a data layer that can't be joined from within a single expression"
)}
end
[_ | _] ->
{:error,
Ash.Error.Query.InvalidExpression.exception(
expression: expr,
message: "Cannot access multiple data layers within a single expression"
)}
end
end
defp resolve_call(%Call{name: name, args: args, operator?: true} = call, context) do
with :ok <- validate_datalayer_supports_nested_expressions(args, context.resource),
{:op, op_module} when not is_nil(op_module) <-
{:op, get_operator(name)},
{:ok, [left, right]} <-
hydrate_refs(args, context),
refs <- list_refs([left, right]),
:ok <-
validate_not_crossing_datalayer_boundaries(refs, context.resource, call),
{:ok, operator} <- Operator.new(op_module, left, right) do
if is_boolean(operator) do
{:ok, operator}
else
if Ash.DataLayer.data_layer_can?(context.resource, {:filter_expr, operator}) do
{:ok, operator}
else
{:error, "data layer does not support the operator #{inspect(operator)}"}
end
end
else
{:op, nil} ->
{:error, NoSuchOperator.exception(name: name)}
other ->
other
end
end
defp resolve_call(%Call{name: name, args: args} = call, context) do
could_be_calculation? = Enum.count(args) == 1 && Keyword.keyword?(Enum.at(args, 0))
resource = Ash.Resource.Info.related(context.resource, call.relationship_path)
case {calculation(%{context | resource: resource}, name), could_be_calculation?} do
{resource_calculation, true} when not is_nil(resource_calculation) ->
{module, opts} = module_and_opts(resource_calculation.calculation)
with {:ok, args} <-
Ash.Query.validate_calculation_arguments(
resource_calculation,
Map.new(Enum.at(args, 0) || [])
),
{:ok, calculation} <-
Calculation.new(
resource_calculation.name,
module,
opts,
resource_calculation.type,
Map.put(args, :context, context.query_context),
resource_calculation.filterable?
) do
{:ok,
%Ref{
attribute: calculation,
relationship_path: context.relationship_path ++ call.relationship_path,
resource: resource
}}
else
{:error, error} ->
{:error, error}
end
_ ->
with :ok <- validate_datalayer_supports_nested_expressions(args, context.resource),
{:ok, args} <-
hydrate_refs(args, context),
refs <- list_refs(args),
:ok <- validate_not_crossing_datalayer_boundaries(refs, context.resource, call),
{:func, function_module} when not is_nil(function_module) <-
{:func, get_function(name, context.resource)},
{:ok, function} <-
Function.new(
function_module,
args
) do
if is_boolean(function) do
{:ok, function}
else
if Ash.DataLayer.data_layer_can?(context.resource, {:filter_expr, function}) do
{:ok, function}
else
{:error, "data layer does not support the function #{inspect(function)}"}
end
end
else
{:func, nil} ->
{:error, NoSuchFunction.exception(name: name, resource: context.resource)}
other ->
other
end
end
end
defp validate_datalayer_supports_nested_expressions(args, resource) do
if Enum.any?(args, &Ash.Query.is_expr?/1) &&
!Ash.DataLayer.data_layer_can?(resource, :nested_expressions) do
{:error, "Datalayer does not support nested expressions"}
else
:ok
end
end
defp module_and_opts({module, opts}), do: {module, opts}
defp module_and_opts(module), do: {module, []}
def hydrate_refs({key, value}, context) when is_atom(key) do
case hydrate_refs(value, context) do
{:ok, hydrated} ->
{:ok, {key, hydrated}}
other ->
other
end
end
def hydrate_refs(
%Ref{attribute: attribute} = ref,
%{aggregates: aggregates, calculations: calculations} = context
)
when is_atom(attribute) do
case related(context, ref.relationship_path) do
nil ->
{:error,
"Invalid reference #{inspect(ref)} at relationship_path #{inspect(ref.relationship_path)}"}
related ->
context = %{context | resource: related}
cond do
Map.has_key?(aggregates, attribute) ->
{:ok, %{ref | attribute: Map.get(aggregates, attribute), resource: related}}
Map.has_key?(calculations, attribute) ->
{:ok, %{ref | attribute: Map.get(calculations, attribute), resource: related}}
attribute = attribute(context, attribute) ->
{:ok, %{ref | attribute: attribute, resource: related}}
resource_calculation = calculation(context, attribute) ->
{module, opts} = module_and_opts(resource_calculation.calculation)
with {:ok, args} <-
Ash.Query.validate_calculation_arguments(resource_calculation, %{}),
{:ok, calculation} <-
Calculation.new(
resource_calculation.name,
module,
opts,
resource_calculation.type,
Map.put(args, :context, context.query_context),
resource_calculation.filterable?
) do
{:ok, %{ref | attribute: calculation, resource: related}}
else
{:error, error} ->
{:error, error}
end
aggregate = aggregate(context, attribute) ->
agg_related = Ash.Resource.Info.related(related, aggregate.relationship_path)
with %{valid?: true} = aggregate_query <-
Ash.Query.build(agg_related, filter: aggregate.filter, sort: aggregate.sort),
{:ok, query_aggregate} <-
Aggregate.new(
related,
aggregate.name,
aggregate.kind,
aggregate.relationship_path,
aggregate_query,
aggregate.field,
aggregate.default,
aggregate.filterable?
) do
{:ok, %{ref | attribute: query_aggregate, resource: related}}
else
%{valid?: false, errors: errors} ->
{:error, errors}
{:error, error} ->
{:error, error}
end
relationship = relationship(context, attribute) ->
case Ash.Resource.Info.primary_key(relationship.destination) do
[key] ->
new_ref = %{
ref
| relationship_path: ref.relationship_path ++ [relationship.name],
attribute: Ash.Resource.Info.attribute(relationship.destination, key),
resource: relationship.destination
}
{:ok, new_ref}
_ ->
{:error,
"Invalid reference #{inspect(ref)} when hydrating relationship ref for #{inspect(ref.relationship_path ++ [relationship.name])}. Require single attribute primary key."}
end
true ->
{:error, "Invalid reference #{inspect(ref)}"}
end
end
end
def hydrate_refs(%Ref{relationship_path: relationship_path, resource: nil} = ref, context) do
{:ok, %{ref | resource: Ash.Resource.Info.related(context.resource, relationship_path)}}
end
def hydrate_refs(%BooleanExpression{left: left, right: right} = expr, context) do
with {:ok, left} <- hydrate_refs(left, context),
{:ok, right} <- hydrate_refs(right, context) do
{:ok, %{expr | left: left, right: right}}
else
other ->
other
end
end
def hydrate_refs(%Not{expression: expression} = expr, context) do
with {:ok, expression} <- hydrate_refs(expression, context) do
{:ok, %{expr | expression: expression}}
end
end
def hydrate_refs(%Call{} = call, context) do
resolve_call(call, context)
end
def hydrate_refs(%{__predicate__?: _, left: left, right: right} = expr, context) do
with {:ok, left} <- hydrate_refs(left, context),
{:ok, right} <- hydrate_refs(right, context) do
{:ok, %{expr | left: left, right: right}}
else
other ->
other
end
end
def hydrate_refs(%{__predicate__?: _, arguments: arguments} = expr, context) do
case hydrate_refs(arguments, context) do
{:ok, args} ->
{:ok, %{expr | arguments: args}}
other ->
other
end
end
def hydrate_refs(list, context) when is_list(list) do
list
|> Enum.reduce_while({:ok, []}, fn val, {:ok, acc} ->
case hydrate_refs(val, context) do
{:ok, value} ->
{:cont, {:ok, [value | acc]}}
{:error, error} ->
{:halt, {:error, error}}
end
end)
|> case do
{:ok, value} -> {:ok, Enum.reverse(value)}
{:error, error} -> {:error, error}
end
end
def hydrate_refs(val, _context) do
{:ok, val}
end
defp add_aggregate_expression(context, nested_statement, field, expression) do
if Ash.DataLayer.data_layer_can?(context.resource, :aggregate_filter) do
case parse_predicates(nested_statement, Map.get(context.aggregates, field), context) do
{:ok, nested_statement} ->
{:ok, BooleanExpression.optimized_new(:and, expression, nested_statement)}
{:error, error} ->
{:error, error}
end
else
{:error, AggregatesNotSupported.exception(resource: context.resource, feature: "filtering")}
end
end
defp add_calculation_expression(context, nested_statement, field, module, expression) do
if Ash.DataLayer.data_layer_can?(context.resource, :expression_calculation) &&
:erlang.function_exported(module, :expression, 2) do
case parse_predicates(nested_statement, Map.get(context.calculations, field), context) do
{:ok, nested_statement} ->
{:ok, BooleanExpression.optimized_new(:and, expression, nested_statement)}
{:error, error} ->
{:error, error}
end
else
{:error,
CalculationsNotSupported.exception(resource: context.resource, feature: "filtering")}
end
end
defp validate_data_layers_support_boolean_filters(%BooleanExpression{
op: :or,
left: left,
right: right
}) do
left_resources =
left
|> map(fn
%Ref{} = ref ->
[ref.resource]
_ ->
[]
end)
|> List.flatten()
|> Enum.uniq()
right_resources =
right
|> map(fn
%Ref{} = ref ->
[ref.resource]
_ ->
[]
end)
|> List.flatten()
|> Enum.uniq()
left_resources
|> Enum.filter(&(&1 in right_resources))
|> Enum.reduce_while(:ok, fn resource, :ok ->
if Ash.DataLayer.data_layer_can?(resource, :boolean_filter) do
{:cont, :ok}
else
{:halt, {:error, "Data layer for #{resource} does not support boolean filters"}}
end
end)
end
defp validate_data_layers_support_boolean_filters(_), do: :ok
def move_to_relationship_path(expression, []), do: expression
def move_to_relationship_path(expression, relationship_path) do
case expression do
{key, value} when is_atom(key) ->
{key, move_to_relationship_path(value, relationship_path)}
%Not{expression: expression} = not_expr ->
%{not_expr | expression: move_to_relationship_path(expression, relationship_path)}
%BooleanExpression{left: left, right: right} = expression ->
%{
expression
| left: move_to_relationship_path(left, relationship_path),
right: move_to_relationship_path(right, relationship_path)
}
%{__operator__?: true, left: left, right: right} = op ->
left = move_to_relationship_path(left, relationship_path)
right = move_to_relationship_path(right, relationship_path)
%{op | left: left, right: right}
%Ref{} = ref ->
add_to_ref_path(ref, relationship_path)
%{__function__?: true, arguments: args} = func ->
%{func | arguments: Enum.map(args, &move_to_relationship_path(&1, relationship_path))}
%Call{args: args} = call ->
%{call | args: Enum.map(args, &move_to_relationship_path(&1, relationship_path))}
%__MODULE__{expression: expression} = filter ->
%{filter | expression: move_to_relationship_path(expression, relationship_path)}
other ->
other
end
end
defp add_to_ref_path(%Ref{relationship_path: relationship_path} = ref, to_add) do
%{ref | relationship_path: to_add ++ relationship_path}
end
defp add_to_ref_path(other, _), do: other
defp parse_and_join(statements, op, context) do
Enum.reduce_while(statements, {:ok, nil}, fn statement, {:ok, expression} ->
case parse_expression(statement, context) do
{:ok, nested_expression} ->
{:cont, {:ok, BooleanExpression.optimized_new(op, expression, nested_expression)}}
{:error, error} ->
{:halt, {:error, error}}
end
end)
end
defp parse_predicates(value, field, context) when not is_list(value) and not is_map(value) do
parse_predicates([eq: value], field, context)
end
defp parse_predicates(values, attr, context) do
if is_map(values) || Keyword.keyword?(values) do
Enum.reduce_while(values, {:ok, nil}, fn
{:not, value}, {:ok, expression} ->
case parse_predicates(List.wrap(value), attr, context) do
{:ok, not_expression} ->
{:cont,
{:ok,
BooleanExpression.optimized_new(:and, expression, %Not{expression: not_expression})}}
{:error, error} ->
{:halt, {:error, error}}
end
{key, value}, {:ok, expression} ->
case get_operator(key) do
nil ->
error = NoSuchFilterPredicate.exception(key: key, resource: context.resource)
{:halt, {:error, error}}
operator_module ->
left = %Ref{
attribute: attr,
relationship_path: context.relationship_path,
resource: context.resource
}
with {:ok, [left, right]} <-
hydrate_refs([left, value], context),
refs <- list_refs([left, right]),
:ok <-
validate_not_crossing_datalayer_boundaries(
refs,
context.resource,
{attr, value}
),
{:ok, operator} <- Operator.new(operator_module, left, right) do
if is_boolean(operator) do
{:cont, {:ok, operator}}
else
if Ash.DataLayer.data_layer_can?(context.resource, {:filter_expr, operator}) do
{:cont, {:ok, BooleanExpression.optimized_new(:and, expression, operator)}}
else
{:halt,
{:error, "data layer does not support the operator #{inspect(operator)}"}}
end
end
else
{:error, error} -> {:halt, {:error, error}}
end
end
end)
else
error = InvalidFilterValue.exception(value: values)
{:error, error}
end
end
def get_function(key, resource) when is_atom(key) do
@builtin_functions[key] ||
Enum.find(Ash.DataLayer.data_layer_functions(resource), &(&1.name() == key))
end
def get_function(key, resource) when is_binary(key) do
Map.get(@string_builtin_functions, key) ||
Enum.find(Ash.DataLayer.data_layer_functions(resource), &(&1.name() == key))
end
def get_function(_, _), do: nil
def get_operator(key) when is_atom(key) do
@builtin_operators[key]
end
def get_operator(key) when is_binary(key) do
Map.get(@string_builtin_operators, key)
end
def get_operator(_), do: nil
defimpl Inspect do
import Inspect.Algebra
@custom_colors [
number: :cyan
]
def inspect(
%{expression: expression},
opts
) do
opts = %{opts | syntax_colors: Keyword.merge(opts.syntax_colors, @custom_colors)}
concat(["#Ash.Filter<", to_doc(expression, opts), ">"])
end
end
end
| 31.383398 | 197 | 0.60966 |
73135a3a0e46a02f92297ed45fbf681a97a728cd | 65 | ex | Elixir | lib/coderjobs_web/views/page_view.ex | johndavedecano/CoderJobs | a185c3129f92430d8e72184f359c16f93f24c43b | [
"MIT"
] | 28 | 2017-11-20T02:01:17.000Z | 2021-05-08T16:52:58.000Z | lib/coderjobs_web/views/page_view.ex | johndavedecano/CoderJobs | a185c3129f92430d8e72184f359c16f93f24c43b | [
"MIT"
] | 1 | 2018-12-05T06:07:36.000Z | 2018-12-09T17:33:28.000Z | lib/coderjobs_web/views/page_view.ex | johndavedecano/CoderJobs | a185c3129f92430d8e72184f359c16f93f24c43b | [
"MIT"
] | 3 | 2017-12-21T03:29:39.000Z | 2019-08-11T02:56:08.000Z | defmodule CoderjobsWeb.PageView do
use CoderjobsWeb, :view
end
| 16.25 | 34 | 0.815385 |
7313a66eadbaa7a1a0f70c7f5102eb7113ced618 | 5,158 | ex | Elixir | lib/app_optex.ex | sashman/app_optex | cb7bcd04b8ce1e0b0a9364b49ce490b6f5489397 | [
"MIT"
] | 5 | 2019-04-02T20:30:01.000Z | 2019-04-26T08:30:19.000Z | lib/app_optex.ex | sashman/app_optex | cb7bcd04b8ce1e0b0a9364b49ce490b6f5489397 | [
"MIT"
] | 6 | 2019-04-05T15:14:01.000Z | 2019-05-13T14:07:14.000Z | lib/app_optex.ex | sashman/app_optex | cb7bcd04b8ce1e0b0a9364b49ce490b6f5489397 | [
"MIT"
] | 1 | 2019-05-13T13:58:15.000Z | 2019-05-13T13:58:15.000Z | defmodule AppOptex do
alias AppOptex.{Worker, Client}
@moduledoc """
Client library for sending and reading AppOptics API measurements. To auth AppOptics make sure to set the `APPOPTICS_TOKEN` environment variable. This can also be overridden in the Application config.
"""
@doc """
Send one measurement with tags. The measurements are sent to AppOptics asynchronously.
* `name` - Name of the measurement
* `value` - Value of the measurement
* `tags` - A map of tags to send with the measurement. Cannot be empty.
## Examples
iex> AppOptex.measurement("my.metric", 10, %{my_tag: "value"})
:ok
"""
def measurement(name, value, tags) do
GenServer.cast(Worker, {:measurements, [%{name: name, value: value}], tags})
end
@moduledoc """
Client library for sending and reading AppOptics API measurements. To auth AppOptics make sure to set the `APPOPTICS_TOKEN` environment variable. This can also be overridden in the Application config.
"""
@doc """
Send one measurement with tags. The measurements are sent to AppOptics asynchronously.
* `measurement` - Map of the measurement data
* `tags` - A map of tags to send with the measurement. Cannot be empty.
## Examples
iex> AppOptex.measurement(%{name: "my.metric", value: 10}, %{my_tag: "value"})
:ok
"""
def measurement(measurement = %{name: _, value: _}, tags) when is_map(measurement) do
GenServer.cast(Worker, {:measurements, [measurement], tags})
end
@doc """
Send multiple measurements with tags. The measurements are sent to AppOptics asynchronously.
* `measurements` - a batch of metrics to send as a list of maps.
* `tags` - A map of tags to send with the measurement. Cannot be empty.
## Examples
iex> AppOptex.measurements([%{name: "my.metric", value: 1}, %{name: "my.other_metric", value: 5}], %{my_tag: "value"})
:ok
"""
def measurements(measurements, tags) do
GenServer.cast(Worker, {:measurements, measurements, tags})
end
@doc """
Recieve multiple measurements with tags. The measurements are read from AppOptics synchronously.
- `metric name` - the name of the metric you want measurements on.
- `resolution` - the resolution of the measurements in seconds.
- `params` - A map of parameters to restrict the result to possible values include:
- `start_time` - Unix Time of where to start the time search from. This parameter is optional if duration is specified.
- `end_time` - Unix Time of where to end the search. This parameter is optional and defaults to current wall time.
- `duration` - How far back to look in time, measured in seconds. This parameter can be used in combination with endtime to set a starttime N seconds back in time. It is an error to set starttime, endtime and duration.
## Examples
iex> AppOptex.client.read_measurements("my.other_metric", 60, %{duration: 999999})
%{
"attributes" => %{"created_by_ua" => "hackney/1.15.1"},
"links" => [],
"name" => "my.other_metric",
"resolution" => 60,
"series" => [
%{
"measurements" => [%{"time" => 1554720060, "value" => 10.0}],
"tags" => %{"my_tag" => "value"}
}
]
}
"""
def read_measurements(metric_name, resolution, params) do
appoptics_url = Application.get_env(:app_optex, :appoptics_url)
token =
Application.get_env(:app_optex, :appoptics_token)
|> case do
{:system, env_var} -> System.get_env(env_var)
token -> token
end
Client.read_measurements(appoptics_url, token, metric_name, resolution, params)
end
@doc """
Set the global tags that will be applied to all measurements. These can be overriden by tags provided in measurement/3 and measurements/2.
* `tags` - maps of tags to set.
## Examples
iex> AppOptex.put_global_tags(%{my_tag: "value"})
:ok
"""
def put_global_tags(tags) when is_map(tags),
do: GenServer.cast(Worker, {:put_global_tags, tags})
@doc """
Get the global tags that will be applied to all measurements.
## Examples
iex> AppOptex.get_global_tags()
%{my_tag: "value"}
"""
def get_global_tags(),
do: GenServer.call(Worker, {:get_global_tags})
@doc """
Asynchronously add to queue of measurements to be sent to AppOptics later.
## Examples
iex> AppOptex.push_to_queue([%{name: "my.metric", value: 1}], %{test: true})
:ok
"""
def push_to_queue(measurements, tags),
do: GenServer.cast(Worker, {:push_to_queue, measurements, tags})
@doc """
Return the current contents of the measurements queue. The queue format is a list of tuples, each tuple contains a measurements list and a tags map.
## Examples
iex> AppOptex.read_queue
[{[%{name: "my.metric", value: 1}], %{test: true}}]
"""
def read_queue(),
do: GenServer.call(Worker, {:read_queue})
@doc """
Asynchronously send the contents of the queue to AppOptics and clear it.
## Examples
iex> AppOptex.flush_queue()
:ok
"""
def flush_queue(),
do: GenServer.cast(Worker, {:flush_queue})
end
| 32.037267 | 222 | 0.667507 |
7313aa28fff305db8e969bac35890fafeb560323 | 1,493 | ex | Elixir | clients/analytics/lib/google_api/analytics/v3/model/ga_data_data_table_rows.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/analytics/lib/google_api/analytics/v3/model/ga_data_data_table_rows.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/analytics/lib/google_api/analytics/v3/model/ga_data_data_table_rows.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Analytics.V3.Model.GaDataDataTableRows do
@moduledoc """
## Attributes
* `c` (*type:* `list(GoogleApi.Analytics.V3.Model.GaDataDataTableRowsC.t)`, *default:* `nil`) -
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:c => list(GoogleApi.Analytics.V3.Model.GaDataDataTableRowsC.t())
}
field(:c, as: GoogleApi.Analytics.V3.Model.GaDataDataTableRowsC, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Analytics.V3.Model.GaDataDataTableRows do
def decode(value, options) do
GoogleApi.Analytics.V3.Model.GaDataDataTableRows.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Analytics.V3.Model.GaDataDataTableRows do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.765957 | 100 | 0.744139 |
7313c7adf35b1555cd3d0e519ca377fd75bf157d | 373 | ex | Elixir | lib/fake_server/server/cowboy.ex | TakteS/fake_server | c982457977acb1aee491a5d5102e68eed84873f2 | [
"Apache-2.0"
] | null | null | null | lib/fake_server/server/cowboy.ex | TakteS/fake_server | c982457977acb1aee491a5d5102e68eed84873f2 | [
"Apache-2.0"
] | null | null | null | lib/fake_server/server/cowboy.ex | TakteS/fake_server | c982457977acb1aee491a5d5102e68eed84873f2 | [
"Apache-2.0"
] | null | null | null | defmodule FakeServer.Cowboy do
@moduledoc false
alias FakeServer.Instance
def start_listen(%Instance{} = server) do
:cowboy.start_clear(
server.server_name,
[port: server.port],
%{env: %{dispatch: server.router}, http10_keepalive: false}
)
end
def stop(%Instance{} = server) do
:cowboy.stop_listener(server.server_name)
end
end
| 20.722222 | 65 | 0.683646 |
7314000ffadd1e1050d0ef9bf2fd90c47a9553a1 | 14 | ex | Elixir | testData/org/elixir_lang/parser_definition/no_parentheses_no_arguments_call_parsing_test_case/EmptyBlockDotIdentifier.ex | ArtemGordinsky/intellij-elixir | e2d9b4dfc65651b293d499043edeaad606cf5652 | [
"Apache-2.0"
] | null | null | null | testData/org/elixir_lang/parser_definition/no_parentheses_no_arguments_call_parsing_test_case/EmptyBlockDotIdentifier.ex | ArtemGordinsky/intellij-elixir | e2d9b4dfc65651b293d499043edeaad606cf5652 | [
"Apache-2.0"
] | null | null | null | testData/org/elixir_lang/parser_definition/no_parentheses_no_arguments_call_parsing_test_case/EmptyBlockDotIdentifier.ex | ArtemGordinsky/intellij-elixir | e2d9b4dfc65651b293d499043edeaad606cf5652 | [
"Apache-2.0"
] | null | null | null | (;).identifier | 14 | 14 | 0.714286 |
731400c63f8fe9bc478e78ff3791b7f68d95f5ae | 6,098 | exs | Elixir | test/failure_examples.exs | Kr00lIX/assertions | 84b0ef5844f06589773a898fd8bd4fdf04d5a338 | [
"MIT"
] | 121 | 2018-11-30T16:08:43.000Z | 2022-03-10T04:10:04.000Z | test/failure_examples.exs | Kr00lIX/assertions | 84b0ef5844f06589773a898fd8bd4fdf04d5a338 | [
"MIT"
] | 21 | 2019-02-14T08:31:06.000Z | 2022-02-22T16:36:43.000Z | test/failure_examples.exs | Kr00lIX/assertions | 84b0ef5844f06589773a898fd8bd4fdf04d5a338 | [
"MIT"
] | 17 | 2018-12-14T13:04:13.000Z | 2021-12-10T22:44:59.000Z | defmodule Assertions.FailureExamples do
@moduledoc """
This module is not run when running `mix test` because the file name doesn't
follow the `*_test.exs` pattern. This is intentional. All of these examples
fail, and this is to show how the diff is generated when using the
`ExUnit.Console` formatter.
"""
@path Path.expand("../tmp/file.txt", __DIR__)
use Assertions.Case
setup do
on_exit(fn ->
File.rm_rf(Path.dirname(@path))
end)
end
describe "assert!/1" do
test "fails" do
assert!("A string")
end
test "fails when using nil" do
assert!(nil > 0)
end
end
describe "refute!/1" do
test "fails" do
refute!(nil)
end
test "fails when using nil" do
refute!(nil < 0)
end
end
describe "assert_lists_equal/2" do
test "fails" do
assert_lists_equal([1, 2, 3], [1, 4, 2])
end
end
describe "assert_lists_equal/3" do
test "fails when the third argument is a custom message" do
assert_lists_equal([1, 2, 3], [1, 4, 2], "Didn't match!")
end
test "fails when the third argument is a custom function" do
assert_lists_equal(["cat"], ["lion"], &(String.length(&1) == String.length(&2)))
end
test "fails nicely with a list of maps" do
left = [
%{first: :first, second: :third},
%{four: 4, five: "five"},
%{"six" => :six, "seven" => 7}
]
right = [
%{first: :first, second: :third},
%{four: "four", five: "five"},
%{"six" => :six, "seven" => 7}
]
assert_lists_equal(left, right, &assert_maps_equal(&1, &2, Map.keys(&2)))
end
end
describe "assert_map_in_list/3" do
test "fails with atom keys" do
map = %{first: :first, second: :second, not: :used, keys: :are, always: :pruned}
list = [%{first: :first, second: :third, third: :fourth, a: :b, d: :e}]
keys = [:first, :second]
assert_map_in_list(map, list, keys)
end
test "fails with string keys" do
map = %{"first" => :first, "second" => :second}
list = [%{"first" => :first, "second" => :third}]
keys = ["first", "second"]
assert_map_in_list(map, list, keys)
end
test "fails with list keys" do
map = %{["first"] => :first, ["second"] => :second}
list = [%{["first"] => :first, ["second"] => :third}]
keys = [["first"], ["second"]]
assert_map_in_list(map, list, keys)
end
end
describe "assert_maps_equal/3" do
test "fails" do
assert_maps_equal(
%{first: :first, second: :second},
%{first: :second, third: :third},
[:first]
)
end
end
describe "assert_struct_in_list/3" do
test "fails with struct/keys/list" do
assert_struct_in_list(DateTime.utc_now(), [:year, :month], [Date.utc_today()])
end
test "fails with map/module/list" do
map = Map.take(DateTime.utc_now(), [:year, :month])
assert_struct_in_list(map, DateTime, [Date.utc_today()])
end
end
describe "assert_structs_equal/3" do
test "fails" do
assert_structs_equal(
DateTime.utc_now(),
DateTime.utc_now(),
[:year, :month, :millisecond, :microsecond]
)
end
end
describe "assert_all_have_value/3" do
test "fails" do
list = [
%{key: :value, other: :pair},
%{key: :pair, other: :value},
[key: :list, other: :keyword]
]
assert_all_have_value(list, :key, :value)
end
end
describe "assert_changes_file/3" do
test "fails when the file doesn't exist" do
assert_changes_file @path, "hi" do
File.write(@path, "hi")
end
end
test "fails when the file matches before the expression is executed" do
File.mkdir_p!(Path.dirname(@path))
File.write(@path, "hi there, I'm pre-existing.")
assert_changes_file @path, "hi" do
File.write(@path, "hi")
end
end
test "fails when the file doesn't exist after the expression is executed" do
assert_changes_file @path, "hi" do
File.mkdir_p!(Path.dirname(@path))
end
end
test "fails when the file doesn't match the comparison" do
assert_changes_file @path, "guten Tag" do
File.mkdir_p!(Path.dirname(@path))
File.write(@path, "hi")
end
end
end
describe "assert_creates_file/2" do
test "fails when the file exists before the function" do
File.mkdir_p!(Path.dirname(@path))
File.write(@path, "hi")
assert_creates_file @path do
File.write(@path, "hi")
end
end
test "fails when the file doesn't exist after the function" do
assert_creates_file @path do
File.mkdir_p!(Path.dirname(@path))
end
end
end
describe "assert_deletes_file/2" do
test "fails when the file doesn't exist before the function" do
assert_deletes_file @path do
File.mkdir_p!(Path.dirname(@path))
end
end
test "fails when the file exists after the function" do
File.mkdir_p!(Path.dirname(@path))
File.write(@path, "hi there")
assert_deletes_file @path do
File.write(@path, "I'm pre-existing.")
end
end
end
describe "assert_receive_only/2" do
test "fails if it receives no messages" do
assert_receive_only(:hello, 1)
end
test "fails if it receives the wrong message first" do
send(self(), :hello_again)
send(self(), [:hello])
assert_receive_only([_])
end
test "fails if the messages are sent after the assert call" do
Process.send_after(self(), :hello, 50)
Process.send_after(self(), :hello_again, 20)
assert_receive_only(:hello, 100)
end
test "fails if it receives an unexpected message after the expected pattern" do
send(self(), :hello)
send(self(), :hello_again)
assert_receive_only(:hello)
end
end
describe "assert_raise/1" do
test "fails if the expression does not raise" do
assert_raise(fn ->
first = 1
second = 2
first / second
end)
end
end
end
| 25.948936 | 86 | 0.602493 |
73140d4957469b169181ece8f7fbc6fba19c1e1a | 22,596 | ex | Elixir | lib/ecto/query/builder.ex | mschae/ecto | 00f85444c4f61080617179232c0d528381de5ec3 | [
"Apache-2.0"
] | null | null | null | lib/ecto/query/builder.ex | mschae/ecto | 00f85444c4f61080617179232c0d528381de5ec3 | [
"Apache-2.0"
] | null | null | null | lib/ecto/query/builder.ex | mschae/ecto | 00f85444c4f61080617179232c0d528381de5ec3 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Query.Builder do
@moduledoc false
@distinct ~w(count)a
alias Ecto.Query
@typedoc """
Quoted types store primitive types and types in the format
{source, quoted}. The latter are handled directly in the planner,
never forwarded to Ecto.Type.
The Ecto.Type module concerns itself only with runtime types,
which include all primitive types and custom user types. Also
note custom user types do not show up during compilation time.
"""
@type quoted_type :: Ecto.Type.primitive | {non_neg_integer, atom | Macro.t}
@doc """
Smart escapes a query expression and extracts interpolated values in
a map.
Everything that is a query expression will be escaped, interpolated
expressions (`^foo`) will be moved to a map unescaped and replaced
with `^index` in the query where index is a number indexing into the
map.
"""
@spec escape(Macro.t, quoted_type, map(), Keyword.t, Macro.Env.t) :: {Macro.t, %{}}
def escape(expr, type, params, vars, env)
# var.x - where var is bound
def escape({{:., _, [{var, _, context}, field]}, _, []}, _type, params, vars, _env)
when is_atom(var) and is_atom(context) and is_atom(field) do
{escape_field(var, field, vars), params}
end
# field macro
def escape({:field, _, [{var, _, context}, field]}, _type, params, vars, _env)
when is_atom(var) and is_atom(context) do
{escape_field(var, field, vars), params}
end
# param interpolation
def escape({:^, _, [arg]}, type, params, _vars, _env) do
index = Map.size(params)
params = Map.put(params, index, {arg, type})
expr = {:{}, [], [:^, [], [index]]}
{expr, params}
end
# tagged types
def escape({:type, _, [{:^, _, [arg]}, type]}, _type, params, vars, _env) do
{type, escaped} = validate_type!(type, vars)
index = Map.size(params)
params = Map.put(params, index, {arg, type})
expr = {:{}, [], [:type, [], [{:{}, [], [:^, [], [index]]}, escaped]]}
{expr, params}
end
# fragments
def escape({:fragment, _, [query]}, _type, params, vars, env) when is_list(query) do
{escaped, params} = Enum.map_reduce(query, params, &escape_fragment(&1, :any, &2, vars, env))
{{:{}, [], [:fragment, [], [escaped]]}, params}
end
def escape({:fragment, _, [{:^, _, _} = expr]}, _type, params, vars, env) do
{escaped, params} = escape(expr, :any, params, vars, env)
{{:{}, [], [:fragment, [], [escaped]]}, params}
end
def escape({:fragment, _, [query|frags]}, _type, params, vars, env) when is_binary(query) do
pieces = split_binary(query)
if length(pieces) != length(frags) + 1 do
error! "fragment(...) expects extra arguments in the same amount of question marks in string"
end
{frags, params} = Enum.map_reduce(frags, params, &escape(&1, :any, &2, vars, env))
{{:{}, [], [:fragment, [], merge_fragments(pieces, frags)]}, params}
end
def escape({:fragment, _, [query | _]}, _type, _params, _vars, _env) do
error! "fragment(...) expects the first argument to be a string for SQL fragments, " <>
"a keyword list, or an interpolated value, got: `#{Macro.to_string(query)}`"
end
# interval
def escape({:datetime_add, _, [datetime, count, interval]} = expr, type, params, vars, env) do
assert_type!(expr, type, :datetime)
{datetime, params} = escape(datetime, :datetime, params, vars, env)
{count, interval, params} = escape_interval(count, interval, params, vars, env)
{{:{}, [], [:datetime_add, [], [datetime, count, interval]]}, params}
end
def escape({:date_add, _, [date, count, interval]} = expr, type, params, vars, env) do
assert_type!(expr, type, :date)
{date, params} = escape(date, :date, params, vars, env)
{count, interval, params} = escape_interval(count, interval, params, vars, env)
{{:{}, [], [:date_add, [], [date, count, interval]]}, params}
end
# sigils
def escape({name, _, [_, []]} = sigil, type, params, vars, _env)
when name in ~w(sigil_s sigil_S sigil_w sigil_W)a do
{literal(sigil, type, vars), params}
end
# lists
def escape(list, {:array, type}, params, vars, env) when is_list(list),
do: Enum.map_reduce(list, params, &escape(&1, type, &2, vars, env))
def escape(list, _type, params, vars, env) when is_list(list),
do: Enum.map_reduce(list, params, &escape(&1, :any, &2, vars, env))
# literals
def escape({:<<>>, _, args} = expr, type, params, vars, _env) do
valid? = Enum.all?(args, fn
{:::, _, [left, _]} -> is_integer(left) or is_binary(left)
left -> is_integer(left) or is_binary(left)
end)
unless valid? do
error! "`#{Macro.to_string(expr)}` is not a valid query expression. " <>
"Only literal binaries and strings are allowed, " <>
"dynamic values need to be explicitly interpolated in queries with ^"
end
{literal(expr, type, vars), params}
end
def escape({:-, _, [number]}, type, params, vars, _env) when is_number(number),
do: {literal(-number, type, vars), params}
def escape(number, type, params, vars, _env) when is_number(number),
do: {literal(number, type, vars), params}
def escape(binary, type, params, vars, _env) when is_binary(binary),
do: {literal(binary, type, vars), params}
def escape(boolean, type, params, vars, _env) when is_boolean(boolean),
do: {literal(boolean, type, vars), params}
def escape(nil, _type, params, _vars, _env),
do: {nil, params}
# comparison operators
def escape({comp_op, _, [left, right]} = expr, type, params, vars, env) when comp_op in ~w(== != < > <= >=)a do
assert_type!(expr, type, :boolean)
if is_nil(left) or is_nil(right) do
error! "comparison with nil is forbidden as it always evaluates to false. " <>
"If you want to check if a value is (not) nil, use is_nil/1 instead"
end
ltype = quoted_type(right, vars)
rtype = quoted_type(left, vars)
{left, params} = escape(left, ltype, params, vars, env)
{right, params} = escape(right, rtype, params, vars, env)
{{:{}, [], [comp_op, [], [left, right]]}, params}
end
# in operator
def escape({:in, _, [left, right]} = expr, type, params, vars, env)
when is_list(right)
when is_tuple(right) and elem(right, 0) in ~w(sigil_w sigil_W)a do
assert_type!(expr, type, :boolean)
{:array, ltype} = quoted_type(right, vars)
rtype = {:array, quoted_type(left, vars)}
{left, params} = escape(left, ltype, params, vars, env)
{right, params} = escape(right, rtype, params, vars, env)
{{:{}, [], [:in, [], [left, right]]}, params}
end
def escape({:in, _, [left, {:^, _, _} = right]} = expr, type, params, vars, env) do
assert_type!(expr, type, :boolean)
# The rtype in will be unwrapped in the query planner
ltype = :any
rtype = {:in_spread, quoted_type(left, vars)}
{left, params} = escape(left, ltype, params, vars, env)
{right, params} = escape(right, rtype, params, vars, env)
{{:{}, [], [:in, [], [left, right]]}, params}
end
def escape({:in, _, [left, right]} = expr, type, params, vars, env) do
assert_type!(expr, type, :boolean)
ltype = quoted_type(right, vars)
rtype = {:array, quoted_type(left, vars)}
# The ltype in will be unwrapped in the query planner
{left, params} = escape(left, {:in_array, ltype}, params, vars, env)
{right, params} = escape(right, rtype, params, vars, env)
{{:{}, [], [:in, [], [left, right]]}, params}
end
# Other functions - no type casting
def escape({name, _, args} = expr, type, params, vars, env) when is_atom(name) and is_list(args) do
case call_type(name, length(args)) do
{in_type, out_type} ->
assert_type!(expr, type, out_type)
escape_call(expr, in_type, params, vars, env)
nil ->
try_expansion(expr, type, params, vars, env)
end
end
# Vars are not allowed
def escape({name, _, context} = var, _type, _params, _vars, _env) when is_atom(name) and is_atom(context) do
error! "variable `#{Macro.to_string(var)}` is not a valid query expression. " <>
"Variables need to be explicitly interpolated in queries with ^"
end
# Everything else is not allowed
def escape(other, _type, _params, _vars, _env) do
error! "`#{Macro.to_string(other)}` is not a valid query expression"
end
defp split_binary(query), do: split_binary(query, "")
defp split_binary(<<>>, consumed), do: [consumed]
defp split_binary(<<??, rest :: binary >>, consumed), do: [consumed | split_binary(rest, "")]
defp split_binary(<<?\\, ??, rest :: binary >>, consumed), do: split_binary(rest, consumed <> <<??>>)
defp split_binary(<<first :: utf8, rest :: binary>>, consumed), do: split_binary(rest, consumed <> <<first>>)
defp escape_call({name, _, [arg, :distinct]}, type, params, vars, env) when name in @distinct do
{arg, params} = escape(arg, type, params, vars, env)
expr = {:{}, [], [name, [], [arg, :distinct]]}
{expr, params}
end
defp escape_call({name, _, args}, type, params, vars, env) do
{args, params} = Enum.map_reduce(args, params, &escape(&1, type, &2, vars, env))
expr = {:{}, [], [name, [], args]}
{expr, params}
end
defp escape_field(var, field, vars) do
var = escape_var(var, vars)
field = quoted_field!(field)
dot = {:{}, [], [:., [], [var, field]]}
{:{}, [], [dot, [], []]}
end
defp escape_interval(count, interval, params, vars, env) do
type =
cond do
is_float(count) -> :float
is_integer(count) -> :integer
true -> :decimal
end
{count, params} = escape(count, type, params, vars, env)
{count, quoted_interval!(interval), params}
end
defp escape_fragment({key, [{_, _}|_] = exprs}, type, params, vars, env) when is_atom(key) do
{escaped, params} = Enum.map_reduce(exprs, params, &escape_fragment(&1, type, &2, vars, env))
{{key, escaped}, params}
end
defp escape_fragment({key, expr}, type, params, vars, env) when is_atom(key) do
{escaped, params} = escape(expr, type, params, vars, env)
{{key, escaped}, params}
end
defp escape_fragment({key, _expr}, _type, _params, _vars, _env) do
error! "fragment(...) with keywords accepts only atoms as keys, got `#{Macro.to_string(key)}`"
end
defp merge_fragments([h1|t1], [h2|t2]),
do: [{:raw, h1}, {:expr, h2}|merge_fragments(t1, t2)]
defp merge_fragments([h1], []),
do: [{:raw, h1}]
defp call_type(agg, 1) when agg in ~w(max count sum min avg)a, do: {:any, :any}
defp call_type(agg, 2) when agg in @distinct, do: {:any, :any}
defp call_type(comp, 2) when comp in ~w(== != < > <= >=)a, do: {:any, :boolean}
defp call_type(like, 2) when like in ~w(like ilike)a, do: {:string, :boolean}
defp call_type(bool, 2) when bool in ~w(and or)a, do: {:boolean, :boolean}
defp call_type(:not, 1), do: {:boolean, :boolean}
defp call_type(:is_nil, 1), do: {:any, :boolean}
defp call_type(_, _), do: nil
defp assert_type!(_expr, {int, _field}, _actual) when is_integer(int) do
:ok
end
defp assert_type!(expr, type, actual) do
if Ecto.Type.match?(type, actual) do
:ok
else
error! "expression `#{Macro.to_string(expr)}` does not type check. " <>
"It returns a value of type #{inspect actual} but a value of " <>
"type #{inspect type} is expected"
end
end
defp validate_type!({:array, {:__aliases__, _, _}} = type, _vars), do: {type, type}
defp validate_type!({:array, atom} = type, _vars) when is_atom(atom), do: {type, type}
defp validate_type!({:__aliases__, _, _} = type, _vars), do: {type, type}
defp validate_type!(type, _vars) when is_atom(type), do: {type, type}
defp validate_type!({{:., _, [{var, _, context}, field]}, _, []}, vars)
when is_atom(var) and is_atom(context) and is_atom(field),
do: {{find_var!(var, vars), field}, escape_field(var, field, vars)}
defp validate_type!({:field, _, [{var, _, context}, field]}, vars)
when is_atom(var) and is_atom(context) and is_atom(field),
do: {{find_var!(var, vars), field}, escape_field(var, field, vars)}
defp validate_type!(type, _vars) do
error! "type/2 expects an alias, atom or source.field as second argument, got: `#{Macro.to_string(type)}"
end
@always_tagged [:binary]
defp literal(value, expected, vars),
do: do_literal(value, expected, quoted_type(value, vars))
defp do_literal(value, :any, current) when current in @always_tagged,
do: {:%, [], [Ecto.Query.Tagged, {:%{}, [], [value: value, type: current]}]}
defp do_literal(value, :any, _current),
do: value
defp do_literal(value, expected, expected),
do: value
defp do_literal(value, expected, _current),
do: {:%, [], [Ecto.Query.Tagged, {:%{}, [], [value: value, type: expected]}]}
@doc """
Escape the params entries map.
"""
@spec escape_params(map()) :: Macro.t
def escape_params(map) do
Map.values(map)
end
@doc """
Escapes a variable according to the given binds.
A escaped variable is represented internally as
`&0`, `&1` and so on.
"""
@spec escape_var(atom, Keyword.t) :: Macro.t | no_return
def escape_var(var, vars) do
{:{}, [], [:&, [], [find_var!(var, vars)]]}
end
@doc """
Escapes a list of bindings as a list of atoms.
Only variables or `{:atom, value}` tuples are allowed in the `bindings` list,
otherwise an `Ecto.Query.CompileError` is raised.
## Examples
iex> escape_binding(quote do: [x, y, z])
[x: 0, y: 1, z: 2]
iex> escape_binding(quote do: [x: 0, z: 2])
[x: 0, z: 2]
iex> escape_binding(quote do: [x, y, x])
** (Ecto.Query.CompileError) variable `x` is bound twice
iex> escape_binding(quote do: [a, b, :foo])
** (Ecto.Query.CompileError) binding list should contain only variables, got: :foo
"""
@spec escape_binding(list) :: Keyword.t
def escape_binding(binding) when is_list(binding) do
vars = binding |> Enum.with_index |> Enum.map(&escape_bind(&1))
bound_vars = vars |> Keyword.keys |> Enum.filter(&(&1 != :_))
dup_vars = bound_vars -- Enum.uniq(bound_vars)
unless dup_vars == [] do
error! "variable `#{hd dup_vars}` is bound twice"
end
vars
end
def escape_binding(bind) do
error! "binding should be list of variables, got: #{Macro.to_string(bind)}"
end
defp escape_bind({{var, _} = tuple, _}) when is_atom(var),
do: tuple
defp escape_bind({{var, _, context}, ix}) when is_atom(var) and is_atom(context),
do: {var, ix}
defp escape_bind({bind, _ix}),
do: error!("binding list should contain only variables, got: #{Macro.to_string(bind)}")
defp try_expansion(expr, type, params, vars, env) do
case Macro.expand(expr, env) do
^expr ->
error! """
`#{Macro.to_string(expr)}` is not a valid query expression.
* If you intended to call a database function, please check the documentation
for Ecto.Query to see the supported database expressions
* If you intended to call an Elixir function or introduce a value,
you need to explicitly interpolate it with ^
"""
expanded ->
escape(expanded, type, params, vars, env)
end
end
@doc """
Finds the index value for the given var in vars or raises.
"""
def find_var!(var, vars) do
vars[var] || error! "unbound variable `#{var}` in query"
end
@doc """
Checks if the field is an atom at compilation time or
delegate the check to runtime for interpolation.
"""
def quoted_field!({:^, _, [expr]}),
do: quote(do: Ecto.Query.Builder.field!(unquote(expr)))
def quoted_field!(atom) when is_atom(atom),
do: atom
def quoted_field!(other),
do: error!("expected literal atom or interpolated value in field/2, got: `#{inspect other}`")
@doc """
Called by escaper at runtime to verify that value is an atom.
"""
def field!(atom) when is_atom(atom),
do: atom
def field!(other),
do: error!("expected atom in field/2, got: `#{inspect other}`")
@doc """
Checks if the field is a valid interval at compilation time or
delegate the check to runtime for interpolation.
"""
def quoted_interval!({:^, _, [expr]}),
do: quote(do: Ecto.Query.Builder.interval!(unquote(expr)))
def quoted_interval!(other),
do: interval!(other)
@doc """
Called by escaper at runtime to verify that value is an atom.
"""
@interval ~w(year month week day hour minute second millisecond microsecond)
def interval!(interval) when interval in @interval,
do: interval
def interval!(other),
do: error!("invalid interval: `#{inspect other}` (expected one of #{Enum.join(@interval, ", ")})")
@doc """
Returns the type of an expression at build time.
"""
@spec quoted_type(Macro.t, Keyword.t) :: quoted_type
# Fields
def quoted_type({{:., _, [{var, _, context}, field]}, _, []}, vars)
when is_atom(var) and is_atom(context) and is_atom(field),
do: {find_var!(var, vars), field}
def quoted_type({:field, _, [{var, _, context}, field]}, vars)
when is_atom(var) and is_atom(context) and is_atom(field),
do: {find_var!(var, vars), field}
# Unquoting code here means the second argument of field will
# always be unquoted twice, one by the type checking and another
# in the query itself. We are assuming this is not an issue
# as the solution is somewhat complicated.
def quoted_type({:field, _, [{var, _, context}, {:^, _, [code]}]}, vars)
when is_atom(var) and is_atom(context),
do: {find_var!(var, vars), code}
# Interval
def quoted_type({:datetime_add, _, [_, _, __]}, _vars), do: :datetime
def quoted_type({:date_add, _, [_, _, __]}, _vars), do: :date
# Tagged
def quoted_type({:<<>>, _, _}, _vars), do: :binary
def quoted_type({:type, _, [_, type]}, _vars), do: type
# Sigils
def quoted_type({sigil, _, [_, []]}, _vars) when sigil in ~w(sigil_s sigil_S)a, do: :string
def quoted_type({sigil, _, [_, []]}, _vars) when sigil in ~w(sigil_w sigil_W)a, do: {:array, :string}
# Lists
def quoted_type(list, vars) when is_list(list) do
case Enum.uniq(Enum.map(list, "ed_type(&1, vars))) do
[type] -> {:array, type}
_ -> {:array, :any}
end
end
# Negative numbers
def quoted_type({:-, _, [number]}, _vars) when is_integer(number), do: :integer
def quoted_type({:-, _, [number]}, _vars) when is_float(number), do: :float
# Literals
def quoted_type(literal, _vars) when is_float(literal), do: :float
def quoted_type(literal, _vars) when is_binary(literal), do: :string
def quoted_type(literal, _vars) when is_boolean(literal), do: :boolean
def quoted_type(literal, _vars) when is_integer(literal), do: :integer
def quoted_type({name, _, args}, _vars) when is_atom(name) and is_list(args) do
case call_type(name, length(args)) do
{_in, out} -> out
nil -> :any
end
end
def quoted_type(_, _vars), do: :any
@doc """
Raises a query building error.
"""
def error!(message) when is_binary(message) do
{:current_stacktrace, [_|t]} = Process.info(self, :current_stacktrace)
t = Enum.drop_while t, fn
{mod, _, _, _} ->
String.starts_with?(Atom.to_string(mod), ["Elixir.Ecto.Query.", "Elixir.Enum"])
_ ->
false
end
reraise Ecto.Query.CompileError, [message: message], t
end
@doc """
Counts the bindings in a query expression.
## Examples
iex> count_binds(%Ecto.Query{joins: [1,2,3]})
3
iex> count_binds(%Ecto.Query{from: 0, joins: [1,2]})
3
"""
@spec count_binds(Ecto.Query.t) :: non_neg_integer
def count_binds(%Query{from: from, joins: joins}) do
count = if from, do: 1, else: 0
count + length(joins)
end
@doc """
Applies a query at compilation time or at runtime.
This function is responsible for checking if a given query is an
`Ecto.Query` struct at compile time. If it is not it will act
accordingly.
If a query is available, it invokes the `apply` function in the
given `module`, otherwise, it delegates the call to runtime.
It is important to keep in mind the complexities introduced
by this function. In particular, a %Query{} is mixture of escaped
and unescaped expressions which makes it impossible for this
function to properly escape or unescape it at compile/runtime.
For this reason, the apply function should be ready to handle
arguments in both escaped and unescaped form.
For example, take into account the `Builder.Select`:
select = %Ecto.Query.QueryExpr{expr: expr, file: env.file, line: env.line}
Builder.apply_query(query, __MODULE__, [select], env)
`expr` is already an escaped expression and we must not escape
it again. However, it is wrapped in an Ecto.Query.QueryExpr,
which must be escaped! Furthermore, the `apply/2` function
in `Builder.Select` very likely will inject the QueryExpr inside
Query, which again, is a mixture of escaped and unescaped expressions.
That said, you need to obey the following rules:
1. In order to call this function, the arguments must be escapable
values supported by the `escape/1` function below;
2. The apply function may not manipulate the given arguments,
with exception to the query.
In particular, when invoked at compilation time, all arguments
(except the query) will be escaped, so they can be injected into
the query properly, but they will be in their runtime form
when invoked at runtime.
"""
def apply_query(query, module, args, env) do
query = Macro.expand(query, env)
args = for i <- args, do: escape_query(i)
case unescape_query(query) do
%Query{} = unescaped ->
apply(module, :apply, [unescaped|args]) |> escape_query
_ ->
quote do: unquote(module).apply(unquote_splicing([query|args]))
end
end
# Unescapes an `Ecto.Query` struct.
defp unescape_query({:%, _, [Query, {:%{}, _, list}]}) do
struct(Query, list)
end
defp unescape_query({:%{}, _, list} = ast) do
if List.keyfind(list, :__struct__, 0) == {:__struct__, Query} do
Enum.into(list, %{})
else
ast
end
end
defp unescape_query(other) do
other
end
# Escapes an `Ecto.Query` and associated structs.
defp escape_query(%Query{} = query),
do: {:%{}, [], Map.to_list(query)}
defp escape_query(other),
do: other
end
| 36.1536 | 113 | 0.634183 |
7314104ff0ad0846635af0f06a3bfdc233a4db22 | 903 | exs | Elixir | test/mix/tasks/git_hooks/install_test.exs | jwilson-ts/elixir_git_hooks | c8ef3f286accc91cb6d5e61694b8ee7dbdbe8ec7 | [
"MIT"
] | null | null | null | test/mix/tasks/git_hooks/install_test.exs | jwilson-ts/elixir_git_hooks | c8ef3f286accc91cb6d5e61694b8ee7dbdbe8ec7 | [
"MIT"
] | null | null | null | test/mix/tasks/git_hooks/install_test.exs | jwilson-ts/elixir_git_hooks | c8ef3f286accc91cb6d5e61694b8ee7dbdbe8ec7 | [
"MIT"
] | null | null | null | defmodule Mix.Tasks.InstallTest do
@moduledoc false
use ExUnit.Case, async: false
use GitHooks.TestSupport.ConfigCase
alias Mix.Tasks.GitHooks.Install
alias GitHooks.Git.Path, as: GitPath
@tag capture_log: true
describe "run/1" do
test "replaces the hook template with config values" do
put_git_hook_config(
[:pre_commit, :pre_push],
tasks: {:cmd, "check"}
)
hooks_file = Install.run(["--dry-run", "--quiet"])
assert hooks_file == [
pre_commit: expect_hook_template("pre_commit"),
pre_push: expect_hook_template("pre_push")
]
end
end
#
# Private functions
#
defp expect_hook_template(git_hook) do
app_path = GitPath.resolve_app_path()
~s(#!/bin/sh
[ "#{app_path}" != "" ] && cd "#{app_path}"
mix git_hooks.run #{git_hook} "$@"
[ $? -ne 0 ] && exit 1
exit 0
)
end
end
| 20.066667 | 62 | 0.619048 |
731415808454195be68db62d0bf7e4f6eb67411d | 21,769 | ex | Elixir | lib/blockchain/transaction.ex | m0ar/blockchain | 6c3bf50edc673433f61a679a35050f1e72fe2274 | [
"MIT"
] | 93 | 2017-08-08T11:58:16.000Z | 2021-12-12T15:31:42.000Z | lib/blockchain/transaction.ex | m0ar/blockchain | 6c3bf50edc673433f61a679a35050f1e72fe2274 | [
"MIT"
] | 17 | 2017-08-07T08:08:05.000Z | 2018-08-20T19:27:20.000Z | lib/blockchain/transaction.ex | m0ar/blockchain | 6c3bf50edc673433f61a679a35050f1e72fe2274 | [
"MIT"
] | 14 | 2017-10-21T18:02:31.000Z | 2021-07-09T01:02:07.000Z | defmodule Blockchain.Transaction do
@moduledoc """
This module encodes the transaction object, defined in Section 4.3
of the Yellow Paper (http://gavwood.com/Paper.pdf). We are focused
on implementing 𝛶, as defined in Eq.(1).
"""
alias Blockchain.Account
alias Block.Header
defstruct nonce: 0,
# Tn
# Tp
gas_price: 0,
# Tg
gas_limit: 0,
# Tt
to: <<>>,
# Tv
value: 0,
# Tw
v: nil,
# Tr
r: nil,
# Ts
s: nil,
# Ti
init: <<>>,
# Td
data: <<>>
@type t :: %__MODULE__{
nonce: EVM.val(),
gas_price: EVM.val(),
gas_limit: EVM.val(),
to: EVM.address() | <<_::0>>,
value: EVM.val(),
v: Blockchain.Transaction.Signature.hash_v(),
r: Blockchain.Transaction.Signature.hash_r(),
s: Blockchain.Transaction.Signature.hash_s(),
init: EVM.MachineCode.t(),
data: binary()
}
@doc """
Encodes a transaction such that it can be RLP-encoded.
This is defined at L_T Eq.(14) in the Yellow Paper.
## Examples
iex> Blockchain.Transaction.serialize(%Blockchain.Transaction{nonce: 5, gas_price: 6, gas_limit: 7, to: <<1::160>>, value: 8, v: 27, r: 9, s: 10, data: "hi"})
[<<5>>, <<6>>, <<7>>, <<1::160>>, <<8>>, "hi", <<27>>, <<9>>, <<10>>]
iex> Blockchain.Transaction.serialize(%Blockchain.Transaction{nonce: 5, gas_price: 6, gas_limit: 7, to: <<>>, value: 8, v: 27, r: 9, s: 10, init: <<1, 2, 3>>})
[<<5>>, <<6>>, <<7>>, <<>>, <<8>>, <<1, 2, 3>>, <<27>>, <<9>>, <<10>>]
iex> Blockchain.Transaction.serialize(%Blockchain.Transaction{nonce: 5, gas_price: 6, gas_limit: 7, to: <<>>, value: 8, v: 27, r: 9, s: 10, init: <<1, 2, 3>>}, false)
[<<5>>, <<6>>, <<7>>, <<>>, <<8>>, <<1, 2, 3>>]
iex> Blockchain.Transaction.serialize(%Blockchain.Transaction{ data: "", gas_limit: 21000, gas_price: 20000000000, init: "", nonce: 9, r: 0, s: 0, to: "55555555555555555555", v: 1, value: 1000000000000000000 })
["\t", <<4, 168, 23, 200, 0>>, "R\b", "55555555555555555555", <<13, 224, 182, 179, 167, 100, 0, 0>>, "", <<1>>, "", ""]
"""
@spec serialize(t) :: ExRLP.t()
def serialize(trx, include_vrs \\ true) do
base = [
trx.nonce |> BitHelper.encode_unsigned(),
trx.gas_price |> BitHelper.encode_unsigned(),
trx.gas_limit |> BitHelper.encode_unsigned(),
trx.to,
trx.value |> BitHelper.encode_unsigned(),
if(trx.to == <<>>, do: trx.init, else: trx.data)
]
if include_vrs do
base ++
[
trx.v |> BitHelper.encode_unsigned(),
trx.r |> BitHelper.encode_unsigned(),
trx.s |> BitHelper.encode_unsigned()
]
else
base
end
end
@doc """
Decodes a transaction that was previously encoded
using `Transaction.serialize/1`. Note, this is the
inverse of L_T Eq.(14) defined in the Yellow Paper.
## Examples
iex> Blockchain.Transaction.deserialize([<<5>>, <<6>>, <<7>>, <<1::160>>, <<8>>, "hi", <<27>>, <<9>>, <<10>>])
%Blockchain.Transaction{nonce: 5, gas_price: 6, gas_limit: 7, to: <<1::160>>, value: 8, v: 27, r: 9, s: 10, data: "hi"}
iex> Blockchain.Transaction.deserialize([<<5>>, <<6>>, <<7>>, <<>>, <<8>>, <<1, 2, 3>>, <<27>>, <<9>>, <<10>>])
%Blockchain.Transaction{nonce: 5, gas_price: 6, gas_limit: 7, to: <<>>, value: 8, v: 27, r: 9, s: 10, init: <<1, 2, 3>>}
iex> Blockchain.Transaction.deserialize(["\t", <<4, 168, 23, 200, 0>>, "R\b", "55555555555555555555", <<13, 224, 182, 179, 167, 100, 0, 0>>, "", <<1>>, "", ""])
%Blockchain.Transaction{
data: "",
gas_limit: 21000,
gas_price: 20000000000,
init: "",
nonce: 9,
r: 0,
s: 0,
to: "55555555555555555555",
v: 1,
value: 1000000000000000000
}
"""
@spec deserialize(ExRLP.t()) :: t
def deserialize(rlp) do
[
nonce,
gas_price,
gas_limit,
to,
value,
init_or_data,
v,
r,
s
] = rlp
{init, data} = if to == <<>>, do: {init_or_data, <<>>}, else: {<<>>, init_or_data}
%__MODULE__{
nonce: :binary.decode_unsigned(nonce),
gas_price: :binary.decode_unsigned(gas_price),
gas_limit: :binary.decode_unsigned(gas_limit),
to: to,
value: :binary.decode_unsigned(value),
init: init,
data: data,
v: :binary.decode_unsigned(v),
r: :binary.decode_unsigned(r),
s: :binary.decode_unsigned(s)
}
end
@doc """
Validates the validity of a transaction that is required to be
true before we're willing to execute a transaction. This is
specified in Section 6.2 of the Yellow Paper Eq.(65) and Eq.(66).
TODO: Consider returning a set of reasons, instead of a singular reason.
## Examples
# Sender address is nil
iex> trx = %Blockchain.Transaction{data: <<>>, gas_limit: 1_000, gas_price: 1, init: <<1>>, nonce: 5, to: <<>>, value: 5, r: 1, s: 2, v: 3}
iex> MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
...> |> Blockchain.Transaction.is_valid?(trx, %Block.Header{})
{:invalid, :invalid_sender}
# Sender account is nil
iex> private_key = <<1::256>>
iex> trx =
...> %Blockchain.Transaction{data: <<>>, gas_limit: 1_000, gas_price: 1, init: <<1>>, nonce: 5, to: <<>>, value: 5}
...> |> Blockchain.Transaction.Signature.sign_transaction(private_key)
iex> MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
...> |> Blockchain.Transaction.is_valid?(trx, %Block.Header{})
{:invalid, :missing_account}
# Has sender account, but nonce mismatch
iex> private_key = <<1::256>>
iex> sender = <<126, 95, 69, 82, 9, 26, 105, 18, 93, 93, 252, 183, 184, 194, 101, 144, 41, 57, 91, 223>> # based on simple private key
iex> trx =
...> %Blockchain.Transaction{data: <<>>, gas_limit: 1_000, gas_price: 1, init: <<1>>, nonce: 4, to: <<>>, value: 5}
...> |> Blockchain.Transaction.Signature.sign_transaction(private_key)
iex> MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
...> |> Blockchain.Account.put_account(sender, %Blockchain.Account{balance: 1000, nonce: 5})
...> |> Blockchain.Transaction.is_valid?(trx, %Block.Header{})
{:invalid, :nonce_mismatch}
# Insufficient starting gas
iex> private_key = <<1::256>>
iex> sender = <<126, 95, 69, 82, 9, 26, 105, 18, 93, 93, 252, 183, 184, 194, 101, 144, 41, 57, 91, 223>> # based on simple private key
iex> trx =
...> %Blockchain.Transaction{data: <<>>, gas_limit: 1_000, gas_price: 1, init: <<1>>, nonce: 5, to: <<>>, value: 5}
...> |> Blockchain.Transaction.Signature.sign_transaction(private_key)
iex> MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
...> |> Blockchain.Account.put_account(sender, %Blockchain.Account{balance: 1000, nonce: 5})
...> |> Blockchain.Transaction.is_valid?(trx, %Block.Header{})
{:invalid, :insufficient_intrinsic_gas}
# Insufficient endowment
iex> private_key = <<1::256>>
iex> sender = <<126, 95, 69, 82, 9, 26, 105, 18, 93, 93, 252, 183, 184, 194, 101, 144, 41, 57, 91, 223>> # based on simple private key
iex> trx =
...> %Blockchain.Transaction{data: <<>>, gas_limit: 100_000, gas_price: 1, init: <<1>>, nonce: 5, to: <<>>, value: 5}
...> |> Blockchain.Transaction.Signature.sign_transaction(private_key)
iex> MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
...> |> Blockchain.Account.put_account(sender, %Blockchain.Account{balance: 1000, nonce: 5})
...> |> Blockchain.Transaction.is_valid?(trx, %Block.Header{})
{:invalid, :insufficient_balance}
iex> private_key = <<1::256>>
iex> sender = <<126, 95, 69, 82, 9, 26, 105, 18, 93, 93, 252, 183, 184, 194, 101, 144, 41, 57, 91, 223>> # based on simple private key
iex> trx =
...> %Blockchain.Transaction{data: <<>>, gas_limit: 100_000, gas_price: 1, init: <<1>>, nonce: 5, to: <<>>, value: 5}
...> |> Blockchain.Transaction.Signature.sign_transaction(private_key)
iex> MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
...> |> Blockchain.Account.put_account(sender, %Blockchain.Account{balance: 100_001, nonce: 5})
...> |> Blockchain.Transaction.is_valid?(trx, %Block.Header{})
{:invalid, :insufficient_balance}
iex> private_key = <<1::256>>
iex> sender = <<126, 95, 69, 82, 9, 26, 105, 18, 93, 93, 252, 183, 184, 194, 101, 144, 41, 57, 91, 223>> # based on simple private key
iex> trx =
...> %Blockchain.Transaction{data: <<>>, gas_limit: 100_000, gas_price: 1, init: <<1>>, nonce: 5, to: <<>>, value: 5}
...> |> Blockchain.Transaction.Signature.sign_transaction(private_key)
iex> MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
...> |> Blockchain.Account.put_account(sender, %Blockchain.Account{balance: 100_006, nonce: 5})
...> |> Blockchain.Transaction.is_valid?(trx, %Block.Header{gas_limit: 50_000, gas_used: 49_999})
{:invalid, :over_gas_limit}
iex> private_key = <<1::256>>
iex> sender = <<126, 95, 69, 82, 9, 26, 105, 18, 93, 93, 252, 183, 184, 194, 101, 144, 41, 57, 91, 223>> # based on simple private key
iex> trx =
...> %Blockchain.Transaction{data: <<>>, gas_limit: 100_000, gas_price: 1, init: <<1>>, nonce: 5, to: <<>>, value: 5}
...> |> Blockchain.Transaction.Signature.sign_transaction(private_key)
iex> MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
...> |> Blockchain.Account.put_account(sender, %Blockchain.Account{balance: 100_006, nonce: 5})
...> |> Blockchain.Transaction.is_valid?(trx, %Block.Header{gas_limit: 500_000, gas_used: 49_999})
:valid
"""
@spec is_valid?(EVM.state(), t, Header.t()) :: :valid | {:invalid, atom()}
def is_valid?(state, trx, block_header) do
g_0 = intrinsic_gas_cost(trx, block_header)
v_0 = trx.gas_limit * trx.gas_price + trx.value
case Blockchain.Transaction.Signature.sender(trx) do
{:error, _reason} ->
{:invalid, :invalid_sender}
{:ok, sender_address} ->
sender_account = Account.get_account(state, sender_address)
if sender_account do
cond do
sender_account.nonce != trx.nonce -> {:invalid, :nonce_mismatch}
g_0 > trx.gas_limit -> {:invalid, :insufficient_intrinsic_gas}
v_0 > sender_account.balance -> {:invalid, :insufficient_balance}
trx.gas_limit > Header.available_gas(block_header) -> {:invalid, :over_gas_limit}
true -> :valid
end
else
{:invalid, :missing_account}
end
end
end
@doc """
Performs transaction execution, as defined in Section 6
of the Yellow Paper, defined there as 𝛶, Eq.(1) and Eq.(59),
Eq.(70), Eq.(79) and Eq.(80).
From the Yellow Paper, T_o is the original transactor, which can differ from the
sender in the case of a message call or contract creation
not directly triggered by a transaction but coming from
the execution of EVM-code.
# TODO: Add rich examples in `transaction_test.exs`
## Examples
# Create contract
iex> beneficiary = <<0x05::160>>
iex> private_key = <<1::256>>
iex> sender = <<126, 95, 69, 82, 9, 26, 105, 18, 93, 93, 252, 183, 184, 194, 101, 144, 41, 57, 91, 223>> # based on simple private key
iex> contract_address = Blockchain.Contract.new_contract_address(sender, 6)
iex> machine_code = EVM.MachineCode.compile([:push1, 3, :push1, 5, :add, :push1, 0x00, :mstore, :push1, 32, :push1, 0, :return])
iex> trx = %Blockchain.Transaction{nonce: 5, gas_price: 3, gas_limit: 100_000, to: <<>>, value: 5, init: machine_code}
...> |> Blockchain.Transaction.Signature.sign_transaction(private_key)
iex> {state, gas, logs} = MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
...> |> Blockchain.Account.put_account(sender, %Blockchain.Account{balance: 400_000, nonce: 5})
...> |> Blockchain.Transaction.execute_transaction(trx, %Block.Header{beneficiary: beneficiary})
iex> {gas, logs}
{53780, <<>>}
iex> Blockchain.Account.get_accounts(state, [sender, beneficiary, contract_address])
[%Blockchain.Account{balance: 238655, nonce: 6}, %Blockchain.Account{balance: 161340}, %Blockchain.Account{balance: 5, code_hash: <<243, 247, 169, 254, 54, 79, 170, 185, 59, 33, 109, 165, 10, 50, 20, 21, 79, 34, 160, 162, 180, 21, 178, 58, 132, 200, 22, 158, 139, 99, 110, 227>>}]
# Message call
iex> beneficiary = <<0x05::160>>
iex> private_key = <<1::256>>
iex> sender = <<126, 95, 69, 82, 9, 26, 105, 18, 93, 93, 252, 183, 184, 194, 101, 144, 41, 57, 91, 223>> # based on simple private key
iex> contract_address = Blockchain.Contract.new_contract_address(sender, 6)
iex> machine_code = EVM.MachineCode.compile([:push1, 3, :push1, 5, :add, :push1, 0x00, :mstore, :push1, 0, :push1, 32, :return])
iex> trx = %Blockchain.Transaction{nonce: 5, gas_price: 3, gas_limit: 100_000, to: contract_address, value: 5, init: machine_code}
...> |> Blockchain.Transaction.Signature.sign_transaction(private_key)
iex> {state, gas, logs} = MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
...> |> Blockchain.Account.put_account(sender, %Blockchain.Account{balance: 400_000, nonce: 5})
...> |> Blockchain.Account.put_code(contract_address, machine_code)
...> |> Blockchain.Transaction.execute_transaction(trx, %Block.Header{beneficiary: beneficiary})
iex> {gas, logs}
{21780, <<>>}
iex> Blockchain.Account.get_accounts(state, [sender, beneficiary, contract_address])
[%Blockchain.Account{balance: 334655, nonce: 6}, %Blockchain.Account{balance: 65340}, %Blockchain.Account{balance: 5, code_hash: <<216, 114, 80, 103, 17, 50, 164, 75, 162, 123, 123, 99, 162, 105, 226, 15, 215, 200, 136, 216, 29, 106, 193, 119, 1, 173, 138, 37, 219, 39, 23, 231>>}]
"""
@spec execute_transaction(EVM.state(), t, Header.t()) ::
{EVM.state(), EVM.Gas.t(), EVM.SubState.logs()}
def execute_transaction(state, trx, block_header) do
# TODO: Check transaction validity.
{:ok, sender} = Blockchain.Transaction.Signature.sender(trx)
state_0 = begin_transaction(state, sender, trx)
# sender and originator are the same for transaction execution
originator = sender
# stack depth starts at zero for transaction execution
stack_depth = 0
# apparent value is the full value for transaction execution
apparent_value = trx.value
# gas is equal to what was just subtracted from sender account less intrinsic gas cost
gas = trx.gas_limit - intrinsic_gas_cost(trx, block_header)
# TODO: Sender versus originator?
{state_p, remaining_gas, sub_state} =
case trx.to do
# Λ
<<>> ->
Blockchain.Contract.create_contract(
state_0,
sender,
originator,
gas,
trx.gas_price,
trx.value,
trx.init,
stack_depth,
block_header
)
recipient ->
# Note, we only want to take the first 3 items from the tuples, as designated Θ_3 in the literature
# Θ_3
{state, remaining_gas_, sub_state_, _output} =
Blockchain.Contract.message_call(
state_0,
sender,
originator,
recipient,
recipient,
gas,
trx.gas_price,
trx.value,
apparent_value,
trx.data,
stack_depth,
block_header
)
{state, remaining_gas_, sub_state_}
end
refund = calculate_total_refund(trx, remaining_gas, sub_state.refund)
state_after_gas = finalize_transaction_gas(state_p, sender, trx, refund, block_header)
state_after_suicides =
Enum.reduce(sub_state.suicide_list, state_after_gas, fn address, state ->
Account.del_account(state, address)
end)
expended_gas = trx.gas_limit - remaining_gas
# { σ', Υ^g, Υ^l }, as defined in Eq.(79) and Eq.(80)
{state_after_suicides, expended_gas, sub_state.logs}
end
@doc """
Performs first step of transaction, which adjusts the sender's
balance and nonce, as defined in Eq.(67), Eq.(68) and Eq.(69)
of the Yellow Paper.
Note: we pass in sender here so we do not need to compute it
several times (since we'll use it elsewhere).
TODO: we execute this as two separate updates; we may want to
combine a series of updates before we update our state.
## Examples
iex> state = MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
...> |> Blockchain.Account.put_account(<<0x01::160>>, %Blockchain.Account{balance: 1000, nonce: 7})
iex> state = Blockchain.Transaction.begin_transaction(state, <<0x01::160>>, %Blockchain.Transaction{gas_price: 3, gas_limit: 100})
iex> Blockchain.Account.get_account(state, <<0x01::160>>)
%Blockchain.Account{balance: 700, nonce: 8}
"""
@spec begin_transaction(EVM.state(), EVM.address(), t) :: EVM.state()
def begin_transaction(state, sender, trx) do
state
|> Account.dec_wei(sender, trx.gas_limit * trx.gas_price)
|> Account.increment_nonce(sender)
end
@doc """
Finalizes the gas payout, repaying the sender for excess or refunded gas
and paying the miner his due. This is defined according to Eq.(73), Eq.(74),
Eq.(75) and Eq.(76) of the Yellow Paper.
Again, we take a sender so that we don't have to re-compute the sender
address several times.
## Examples
iex> trx = %Blockchain.Transaction{gas_price: 10, gas_limit: 30}
iex> state = MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
...> |> Blockchain.Account.put_account(<<0x01::160>>, %Blockchain.Account{balance: 11})
...> |> Blockchain.Account.put_account(<<0x02::160>>, %Blockchain.Account{balance: 22})
iex> Blockchain.Transaction.finalize_transaction_gas(state, <<0x01::160>>, trx, 5, %Block.Header{beneficiary: <<0x02::160>>})
...> |> Blockchain.Account.get_accounts([<<0x01::160>>, <<0x02::160>>])
[
%Blockchain.Account{balance: 61},
%Blockchain.Account{balance: 272},
]
"""
@spec finalize_transaction_gas(EVM.state(), EVM.address(), t, EVM.Gas.t(), Block.Header.t()) ::
EVM.state()
def finalize_transaction_gas(state, sender, trx, total_refund, block_header) do
state
# Eq.(74)
|> Account.add_wei(sender, total_refund * trx.gas_price)
# Eq.(75)
|> Account.add_wei(block_header.beneficiary, (trx.gas_limit - total_refund) * trx.gas_price)
end
@doc """
Caluclates the amount which should be refunded based on the current transactions
final usage. This includes the remaining gas plus refunds from clearing storage.
The specs calls for capping the refund at half of the total amount of gas used.
This function is defined as `g*` in Eq.(72) in the Yellow Paper.
## Examples
iex> Blockchain.Transaction.calculate_total_refund(%Blockchain.Transaction{gas_limit: 100}, 10, 5)
15
iex> Blockchain.Transaction.calculate_total_refund(%Blockchain.Transaction{gas_limit: 100}, 10, 99)
55
iex> Blockchain.Transaction.calculate_total_refund(%Blockchain.Transaction{gas_limit: 100}, 10, 0)
10
iex> Blockchain.Transaction.calculate_total_refund(%Blockchain.Transaction{gas_limit: 100}, 11, 99)
55
"""
@spec calculate_total_refund(t, EVM.Gas.t(), EVM.SubState.refund()) :: EVM.Gas.t()
def calculate_total_refund(trx, remaining_gas, refund) do
# TODO: Add a math helper, finally
max_refund = round(:math.floor((trx.gas_limit - remaining_gas) / 2))
remaining_gas + min(max_refund, refund)
end
@doc """
Defines the "intrinsic gas cost," that is the amount of gas
this transaction requires to be paid prior to execution. This
is defined as g_0 in Eq.(62), Eq.(63) and Eq.(64) of the
Yellow Paper.
## Examples
iex> Blockchain.Transaction.intrinsic_gas_cost(%Blockchain.Transaction{to: <<1::160>>, init: <<>>, data: <<1, 2, 0, 3>>}, %Block.Header{number: 5})
3 * 68 + 4 + 21000
iex> Blockchain.Transaction.intrinsic_gas_cost(%Blockchain.Transaction{to: <<1::160>>, init: <<>>, data: <<1, 2, 0, 3>>}, %Block.Header{number: 5_000_000})
3 * 68 + 4 + 21000
iex> Blockchain.Transaction.intrinsic_gas_cost(%Blockchain.Transaction{to: <<1::160>>, init: <<>>, data: <<>>}, %Block.Header{number: 5_000_000})
21000
iex> Blockchain.Transaction.intrinsic_gas_cost(%Blockchain.Transaction{to: <<>>, init: <<1, 2, 0, 3>>, data: <<>>}, %Block.Header{number: 5})
3 * 68 + 4 + 21000
iex> Blockchain.Transaction.intrinsic_gas_cost(%Blockchain.Transaction{to: <<>>, init: <<1, 2, 0, 3>>, data: <<>>}, %Block.Header{number: 5_000_000})
3 * 68 + 4 + 32000 + 21000
"""
@spec intrinsic_gas_cost(t, Header.t()) :: EVM.Gas.t()
def intrinsic_gas_cost(trx, block_header) do
EVM.Gas.g_txdata(trx.init) + EVM.Gas.g_txdata(trx.data) +
if(
trx.to == <<>> and Header.is_after_homestead?(block_header),
do: EVM.Gas.g_txcreate(),
else: 0
) + EVM.Gas.g_transaction()
end
end
| 43.977778 | 287 | 0.620148 |
7314194f362b5b3a56b5e758aba131b80cc6a04a | 108 | ex | Elixir | test/support/mocks.ex | the-mikedavis/snipe | 3aae23bd1216aa2006ee1964e1482328731e2589 | [
"MIT"
] | null | null | null | test/support/mocks.ex | the-mikedavis/snipe | 3aae23bd1216aa2006ee1964e1482328731e2589 | [
"MIT"
] | null | null | null | test/support/mocks.ex | the-mikedavis/snipe | 3aae23bd1216aa2006ee1964e1482328731e2589 | [
"MIT"
] | null | null | null | Mox.defmock(Snipe.SshMock, for: Snipe.Ssh.Behaviour)
Mox.defmock(Snipe.SftpMock, for: Snipe.Sftp.Behaviour)
| 36 | 54 | 0.796296 |
731423fc3d21e1ab34673c5b8d1e59e9e35ad160 | 5,405 | exs | Elixir | test/guard_clauses_test.exs | snow-jallen/thinkingelixir | 973f33d618fd8b9c1b8d44cebfec4f1c07e192b2 | [
"MIT"
] | null | null | null | test/guard_clauses_test.exs | snow-jallen/thinkingelixir | 973f33d618fd8b9c1b8d44cebfec4f1c07e192b2 | [
"MIT"
] | null | null | null | test/guard_clauses_test.exs | snow-jallen/thinkingelixir | 973f33d618fd8b9c1b8d44cebfec4f1c07e192b2 | [
"MIT"
] | null | null | null | defmodule PatternMatching.GuardClausesTest do
@moduledoc """
Fix or complete the code to make the tests pass.
"""
use ExUnit.Case
alias PatternMatching.User
alias PatternMatching.GuardClauses
describe "return_numbers/1" do
test "returns value or error" do
assert 1.5 == GuardClauses.return_numbers(1.5)
assert 100.99 == GuardClauses.return_numbers(100.99)
assert 10 == GuardClauses.return_numbers(10)
assert -500 == GuardClauses.return_numbers(-500)
assert :error == GuardClauses.return_numbers("1.5")
assert :error == GuardClauses.return_numbers(:an_atom)
assert :error == GuardClauses.return_numbers(%{name: "Tammy"})
end
end
describe "return_lists/1" do
test "returns value or error" do
assert [10, 1.5] == GuardClauses.return_lists([10, 1.5])
assert [] == GuardClauses.return_lists([])
assert [1, 2, 3] == GuardClauses.return_lists([1, 2, 3])
assert :error == GuardClauses.return_lists("1.5")
assert :error == GuardClauses.return_lists(10)
assert :error == GuardClauses.return_lists(10.75)
assert :error == GuardClauses.return_lists(:ok)
end
end
describe "return_any_size_tuples/1" do
test "returns value or error" do
assert {:ok, 1} == GuardClauses.return_any_size_tuples({:ok, 1})
assert {1, 2, 3} == GuardClauses.return_any_size_tuples({1, 2, 3})
assert {"a"} == GuardClauses.return_any_size_tuples({"a"})
assert {:ok, 2018, 12, 25} == GuardClauses.return_any_size_tuples({:ok, 2018, 12, 25})
assert :error == GuardClauses.return_any_size_tuples("1.5")
assert :error == GuardClauses.return_any_size_tuples(1.4)
assert :error == GuardClauses.return_any_size_tuples(:ok)
assert :error == GuardClauses.return_any_size_tuples([1, 2, 3])
end
end
describe "return_maps/1" do
test "returns value or error" do
assert %{name: "Jim"} == GuardClauses.return_maps(%{name: "Jim"})
assert %{name: "Tammy"} == GuardClauses.return_maps(%{name: "Tammy"})
assert %{email: "[email protected]"} == GuardClauses.return_maps(%{email: "[email protected]"})
assert %{} == GuardClauses.return_maps(%{})
assert :error == GuardClauses.return_maps({:ok, "valid"})
assert :error == GuardClauses.return_maps("1.5")
assert :error == GuardClauses.return_maps(1.4)
assert :error == GuardClauses.return_maps(:ok)
assert :error == GuardClauses.return_maps([1, 2, 3])
end
end
describe "run_function/1" do
test "returns the result of the function or error" do
fun = fn -> :ok end
assert :ok == GuardClauses.run_function(fun)
assert "I ran!!" == GuardClauses.run_function(fn -> "I ran!!" end)
assert :error == GuardClauses.run_function("ok?")
assert :error == GuardClauses.run_function([1, 2, 3])
end
end
describe "classify_user/1" do
test "idenitifies minors" do
assert {:ok, :minor} == GuardClauses.classify_user(%User{name: "Jill", age: 10})
assert {:ok, :minor} == GuardClauses.classify_user(%User{name: "Bill", age: 17})
assert {:ok, :minor} == GuardClauses.classify_user(%User{name: "Annie", age: 7})
end
test "identifies legal US adults" do
assert {:ok, :adult} == GuardClauses.classify_user(%User{name: "Tom", age: 50})
assert {:ok, :adult} == GuardClauses.classify_user(%User{name: "Mary", age: 35})
assert {:ok, :adult} == GuardClauses.classify_user(%User{name: "Lizzie", age: 18})
end
test "returns error when not a user" do
assert {:error, "Not a user"} == GuardClauses.classify_user(10)
assert {:error, "Not a user"} == GuardClauses.classify_user("I'm a user")
assert {:error, "Not a user"} == GuardClauses.classify_user({:ok, "Let me in"})
end
test "returns error when age not provided" do
assert {:error, "Age missing"} == GuardClauses.classify_user(%User{name: "Adam"})
assert {:error, "Age missing"} == GuardClauses.classify_user(%User{name: "Dave", age: nil})
end
test "returns error when age is negative" do
assert {:error, "Age cannot be negative"} == GuardClauses.classify_user(%User{name: "Steve", age: -1})
assert {:error, "Age cannot be negative"} == GuardClauses.classify_user(%User{name: "Sally", age: -50})
end
end
describe "award_child_points/3" do
setup _ do
timmy = %User{name: "Little Timmy", age: 11, points: 100, gender: :male}
grace = %User{name: "Grace", age: 13, points: 200, gender: :female}
[timmy: timmy, grace: grace]
end
test "when user's age is <= age cutoff, increase user's points by x amount", %{timmy: timmy, grace: grace} do
# test at the boundary condition
expected_points = timmy.points + 100
assert %User{points: ^expected_points} = GuardClauses.award_child_points(timmy, 11, 100)
# test below the boundary
expected_points = grace.points + 100
assert %User{points: ^expected_points} = GuardClauses.award_child_points(grace, 15, 100)
end
test "when user's age is older than the cutoff, return user unmodified", %{grace: grace} do
assert grace.age == 13
assert grace == GuardClauses.award_child_points(grace, 10, 2000)
assert grace == GuardClauses.award_child_points(grace, 11, 2000)
assert grace == GuardClauses.award_child_points(grace, 12, 2000)
end
end
end
| 41.576923 | 113 | 0.661055 |
73144623d636357e645971a1e15cfd9df6c8fb6d | 558 | ex | Elixir | apps/mishka_file/lib/mishka_file/application.ex | mojtaba-naserei/mishka-cms | 1f31f61347bab1aae6ba0d47c5515a61815db6c9 | [
"Apache-2.0"
] | 35 | 2021-06-26T09:05:50.000Z | 2022-03-30T15:41:22.000Z | apps/mishka_file/lib/mishka_file/application.ex | iArazar/mishka-cms | 8b579101d607d91e80834527c1508fe5f4ceefef | [
"Apache-2.0"
] | 101 | 2021-01-01T09:54:07.000Z | 2022-03-28T10:02:24.000Z | apps/mishka_file/lib/mishka_file/application.ex | iArazar/mishka-cms | 8b579101d607d91e80834527c1508fe5f4ceefef | [
"Apache-2.0"
] | 8 | 2021-01-17T17:08:07.000Z | 2022-03-11T16:12:06.000Z | defmodule MishkaFile.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
@impl true
def start(_type, _args) do
children = [
# Starts a worker by calling: MishkaFile.Worker.start_link(arg)
# {MishkaFile.Worker, arg}
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: MishkaFile.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 26.571429 | 69 | 0.713262 |
73144766da8d0c81a97e3605dfd43d262afe8c70 | 568 | exs | Elixir | test/views/error_view_test.exs | smpallen99/ucx_chat | 0dd98d0eb5e0537521844520ea2ba63a08fd3f19 | [
"MIT"
] | 60 | 2017-05-09T19:08:26.000Z | 2021-01-20T11:09:42.000Z | test/views/error_view_test.exs | smpallen99/ucx_chat | 0dd98d0eb5e0537521844520ea2ba63a08fd3f19 | [
"MIT"
] | 6 | 2017-05-10T15:43:16.000Z | 2020-07-15T07:14:41.000Z | test/views/error_view_test.exs | smpallen99/ucx_chat | 0dd98d0eb5e0537521844520ea2ba63a08fd3f19 | [
"MIT"
] | 10 | 2017-05-10T04:13:54.000Z | 2020-12-28T10:30:27.000Z | defmodule UcxChat.ErrorViewTest do
use UcxChat.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(UcxChat.ErrorView, "404.html", []) ==
"Page not found"
end
test "render 500.html" do
assert render_to_string(UcxChat.ErrorView, "500.html", []) ==
"Internal server error"
end
test "render any other" do
assert render_to_string(UcxChat.ErrorView, "505.html", []) ==
"Internal server error"
end
end
| 25.818182 | 66 | 0.676056 |
73146bf4be7365f070c2ab40c39e238e99556cca | 2,023 | ex | Elixir | clients/vision/lib/google_api/vision/v1/model/text_annotation.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/vision/lib/google_api/vision/v1/model/text_annotation.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/vision/lib/google_api/vision/v1/model/text_annotation.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Vision.V1.Model.TextAnnotation do
@moduledoc """
TextAnnotation contains a structured representation of OCR extracted text. The hierarchy of an OCR extracted text structure is like this: TextAnnotation -> Page -> Block -> Paragraph -> Word -> Symbol Each structural component, starting from Page, may further have their own properties. Properties describe detected languages, breaks etc.. Please refer to the TextAnnotation.TextProperty message definition below for more detail.
## Attributes
* `pages` (*type:* `list(GoogleApi.Vision.V1.Model.Page.t)`, *default:* `nil`) - List of pages detected by OCR.
* `text` (*type:* `String.t`, *default:* `nil`) - UTF-8 text detected on the pages.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:pages => list(GoogleApi.Vision.V1.Model.Page.t()) | nil,
:text => String.t() | nil
}
field(:pages, as: GoogleApi.Vision.V1.Model.Page, type: :list)
field(:text)
end
defimpl Poison.Decoder, for: GoogleApi.Vision.V1.Model.TextAnnotation do
def decode(value, options) do
GoogleApi.Vision.V1.Model.TextAnnotation.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Vision.V1.Model.TextAnnotation do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.46 | 431 | 0.734058 |
7314883bc39f673a24077d0f63cac49f0f74bd1b | 123 | exs | Elixir | fw/test/fw_test.exs | ejc123/meeting_indicator | 7557971f1a8fece55b01c25dc633be1bb1b9cc2c | [
"Apache-2.0"
] | 31 | 2018-12-25T19:52:35.000Z | 2022-03-20T01:06:46.000Z | fw/test/fw_test.exs | QuantumProductions/scenic_font_test | ff8d0df6ade399039b9d9e816e398cb1ad80a7db | [
"BSD-3-Clause"
] | 8 | 2020-11-16T09:59:12.000Z | 2020-11-16T10:13:07.000Z | fw/test/fw_test.exs | QuantumProductions/scenic_font_test | ff8d0df6ade399039b9d9e816e398cb1ad80a7db | [
"BSD-3-Clause"
] | 2 | 2019-04-09T18:35:51.000Z | 2020-12-22T15:19:18.000Z | defmodule FwTest do
use ExUnit.Case
doctest Fw
test "greets the world" do
assert Fw.hello() == :world
end
end
| 13.666667 | 31 | 0.674797 |
7314c26782136ae68add2160a2538ec1f6884d0f | 5,542 | ex | Elixir | apps/language_server/lib/language_server/json_rpc.ex | E14/elixir-ls | bfa468117d853a30e36e2d3009b7b755e2fca6bf | [
"Apache-2.0"
] | 1 | 2020-06-01T11:25:33.000Z | 2020-06-01T11:25:33.000Z | apps/language_server/lib/language_server/json_rpc.ex | E14/elixir-ls | bfa468117d853a30e36e2d3009b7b755e2fca6bf | [
"Apache-2.0"
] | null | null | null | apps/language_server/lib/language_server/json_rpc.ex | E14/elixir-ls | bfa468117d853a30e36e2d3009b7b755e2fca6bf | [
"Apache-2.0"
] | 1 | 2021-06-15T21:08:17.000Z | 2021-06-15T21:08:17.000Z | defmodule ElixirLS.LanguageServer.JsonRpc do
@moduledoc """
Macros and functions for JSON RPC
Contains macros for creating or pattern-matching against packets and helper functions for sending
responses and notifications
"""
use GenServer
alias ElixirLS.Utils.WireProtocol
defstruct language_server: ElixirLS.LanguageServer.Server,
next_id: 1,
outgoing_requests: %{}
## Macros
defmacro notification(method, params) do
quote do
%{"method" => unquote(method), "params" => unquote(params), "jsonrpc" => "2.0"}
end
end
defmacro notification(method) do
quote do
%{"method" => unquote(method), "jsonrpc" => "2.0"}
end
end
defmacro request(id, method) do
quote do
%{
"id" => unquote(id),
"method" => unquote(method),
"jsonrpc" => "2.0"
}
end
end
defmacro request(id, method, params) do
quote do
%{
"id" => unquote(id),
"method" => unquote(method),
"params" => unquote(params),
"jsonrpc" => "2.0"
}
end
end
defmacro response(id, result) do
quote do
%{"result" => unquote(result), "id" => unquote(id), "jsonrpc" => "2.0"}
end
end
defmacro error_response(id, code, message) do
quote do
%{
"error" => %{"code" => unquote(code), "message" => unquote(message)},
"id" => unquote(id),
"jsonrpc" => "2.0"
}
end
end
## Utils
def notify(method, params) do
WireProtocol.send(notification(method, params))
end
def respond(id, result) do
WireProtocol.send(response(id, result))
end
def respond_with_error(id, type, message) do
{code, default_message} = error_code_and_message(type)
WireProtocol.send(error_response(id, code, message || default_message))
end
def show_message(type, message) do
notify("window/showMessage", %{type: message_type_code(type), message: to_string(message)})
end
def log_message(type, message) do
notify("window/logMessage", %{type: message_type_code(type), message: to_string(message)})
end
def register_capability_request(server \\ __MODULE__, method, options) do
send_request(server, "client/registerCapability", %{
"registrations" => [
%{
"id" => :crypto.hash(:sha, method) |> Base.encode16(),
"method" => method,
"registerOptions" => options
}
]
})
end
def show_message_request(server \\ __MODULE__, type, message, actions) do
send_request(server, "window/showMessageRequest", %{
"type" => message_type_code(type),
"message" => message,
"actions" => actions
})
end
# Used to intercept :user/:standard_io output
def print(str) do
log_message(:log, String.replace_suffix(str, "\n", ""))
end
# Used to intercept :standard_error output
def print_err(str) do
log_message(:warning, String.replace_suffix(str, "\n", ""))
end
## Client API
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, Keyword.delete(opts, :name), name: opts[:name])
end
def receive_packet(server \\ __MODULE__, packet) do
GenServer.call(server, {:packet, packet})
end
def send_request(server \\ __MODULE__, method, params) do
GenServer.call(server, {:request, method, params}, :infinity)
end
## Server callbacks
@impl GenServer
def init(opts) do
state =
if language_server = opts[:language_server] do
%__MODULE__{language_server: language_server}
else
%__MODULE__{}
end
{:ok, state}
end
@impl GenServer
def handle_call({:packet, notification(_) = packet}, _from, state) do
ElixirLS.LanguageServer.Server.receive_packet(packet)
{:reply, :ok, state}
end
@impl GenServer
def handle_call({:packet, request(_, _, _) = packet}, _from, state) do
ElixirLS.LanguageServer.Server.receive_packet(packet)
{:reply, :ok, state}
end
@impl GenServer
def handle_call({:packet, response(id, result)}, _from, state) do
%{^id => from} = state.outgoing_requests
GenServer.reply(from, {:ok, result})
state = update_in(state.outgoing_requests, &Map.delete(&1, id))
{:reply, :ok, state}
end
@impl GenServer
def handle_call({:packet, error_response(id, code, message)}, _from, state) do
%{^id => from} = state.outgoing_requests
GenServer.reply(from, {:error, code, message})
state = update_in(state.outgoing_requests, &Map.delete(&1, id))
{:reply, :ok, state}
end
@impl GenServer
def handle_call({:request, method, params}, from, state) do
WireProtocol.send(request(state.next_id, method, params))
state = update_in(state.outgoing_requests, &Map.put(&1, state.next_id, from))
state = %__MODULE__{state | next_id: state.next_id + 1}
{:noreply, state}
end
## Helpers
defp message_type_code(type) do
case type do
:error -> 1
:warning -> 2
:info -> 3
:log -> 4
end
end
defp error_code_and_message(:parse_error), do: {-32700, "Parse error"}
defp error_code_and_message(:invalid_request), do: {-32600, "Invalid Request"}
defp error_code_and_message(:method_not_found), do: {-32601, "Method not found"}
defp error_code_and_message(:invalid_params), do: {-32602, "Invalid params"}
defp error_code_and_message(:internal_error), do: {-32603, "Internal error"}
defp error_code_and_message(:server_error), do: {-32000, "Server error"}
defp error_code_and_message(:request_cancelled), do: {-32800, "Request cancelled"}
end
| 27.300493 | 99 | 0.648863 |
7314fd31be9c661a09aed3685b0a93ad843103d0 | 1,545 | ex | Elixir | sequence_supervisor/lib/sequence/server.ex | karlosmid/book_programming_elixir_12 | 53769b35728a82eddde3a21d4cbd45c1c21596a4 | [
"MIT"
] | null | null | null | sequence_supervisor/lib/sequence/server.ex | karlosmid/book_programming_elixir_12 | 53769b35728a82eddde3a21d4cbd45c1c21596a4 | [
"MIT"
] | null | null | null | sequence_supervisor/lib/sequence/server.ex | karlosmid/book_programming_elixir_12 | 53769b35728a82eddde3a21d4cbd45c1c21596a4 | [
"MIT"
] | null | null | null | defmodule Sequence.Server do
use GenServer
require Logger
@vsn "1"
#api interface
defmodule State do
defstruct current_number: 0, stash_pid: nil, delta: 1
end
def start_link(stash_pid) do
{:ok, _pid} = GenServer.start_link(__MODULE__,stash_pid, name: __MODULE__)
end
def next_number do
with number = GenServer.call(__MODULE__, :next_number),
do: "The next number is #{number}"
end
def increment_number(delta) do
GenServer.cast __MODULE__, {:increment_number,delta}
end
#api implementation
def init(stash_pid) do
current_number = Sequence.Stash.get_value stash_pid
{:ok, %State{current_number: current_number, stash_pid: stash_pid}}
end
def handle_call(:next_number, _from, state) do
{:reply, state.current_number, %{state | current_number: state.current_number + state.delta}}
end
def handle_cast({:increment_number,delta},state) do
{:noreply, %{state | current_number: state.current_number+delta,delta: delta}}
end
def terminate(_reason,state) do
Sequence.Stash.save_value state.stash_pid, state.current_number
end
def format_status(_reason,[_pdict,state]) do
[data: [{'State', "My current state is '#{inspect state}', and I'm happy"}]]
end
def code_change("0", old_state = {current_number, stash_pid}, _extra) do
new_state = %State{current_number: current_number, stash_pid: stash_pid, delta: 1}
Logger.info "Changing code from 0 to 1"
Logger.info inspect(old_state)
Logger.info inspect(new_state)
{ :ok, new_state }
end
end
| 32.87234 | 97 | 0.721683 |
73153940aff562e799a6e6041abcf4196c2e41a5 | 1,462 | ex | Elixir | lib/voxpop/grammar.ex | zovafit/voxpop | 0af5903c010eb1164cabfd468719468cc909fcf0 | [
"MIT"
] | 10 | 2016-03-06T01:57:44.000Z | 2021-02-05T02:01:10.000Z | lib/voxpop/grammar.ex | zovafit/voxpop | 0af5903c010eb1164cabfd468719468cc909fcf0 | [
"MIT"
] | 5 | 2016-03-06T00:59:21.000Z | 2016-03-07T05:00:26.000Z | lib/voxpop/grammar.ex | zovafit/voxpop | 0af5903c010eb1164cabfd468719468cc909fcf0 | [
"MIT"
] | 3 | 2016-03-06T22:56:49.000Z | 2017-10-10T05:37:29.000Z | defmodule Voxpop.Grammar.Generator do
alias Voxpop.Registry
def generate(definition, _context \\ %{}) do
case definition.registry do
nil -> Registry.parse(definition) |> Registry.evaluate
%Registry{} -> Registry.evaluate(definition.registry)
end
end
end
defmodule Voxpop.Grammar do
alias Voxpop.Grammar.Generator
defmacro __using__(_opts) do
quote do
import Voxpop.Grammar
@registry %Voxpop.Registry{}
@before_compile unquote(__MODULE__)
end
end
defmacro rule(key, rule) do
quote do
@registry Voxpop.Registry.add_rule(@registry, unquote(key), unquote(rule))
end
end
defmacro start(rule) do
quote do
@registry Voxpop.Registry.add_rule(@registry, :start, unquote(rule))
end
end
defmacro __before_compile__(_env) do
quote do
def generate do
Generator.generate %Voxpop.Grammar.Definition{registry: @registry}
end
def generate(start) when is_binary(start) do
registry = Voxpop.Registry.add_rule(@registry, :start, start)
Generator.generate %Voxpop.Grammar.Definition{registry: registry}
end
def generate(context) when is_list(context) do
registry = Voxpop.Registry.add_rules(@registry, context)
Generator.generate %Voxpop.Grammar.Definition{registry: registry}
end
end
end
end
defmodule Voxpop.Grammar.Definition do
defstruct rules: %{}, start: "", registry: nil
end
| 23.206349 | 80 | 0.692202 |
73153d9d66f50bd0c97be3ce42fb42ac45ad3053 | 2,617 | ex | Elixir | clients/testing/lib/google_api/testing/v1/model/ios_model.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/testing/lib/google_api/testing/v1/model/ios_model.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/testing/lib/google_api/testing/v1/model/ios_model.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Testing.V1.Model.IosModel do
@moduledoc """
A description of an iOS device tests may be run on.
## Attributes
* `deviceCapabilities` (*type:* `list(String.t)`, *default:* `nil`) - Device capabilities.
Copied from
https://developer.apple.com/library/archive/documentation/DeviceInformation/Reference/iOSDeviceCompatibility/DeviceCompatibilityMatrix/DeviceCompatibilityMatrix.html
* `formFactor` (*type:* `String.t`, *default:* `nil`) - Whether this device is a phone, tablet, wearable, etc.
* `id` (*type:* `String.t`, *default:* `nil`) - The unique opaque id for this model.
Use this for invoking the TestExecutionService.
* `name` (*type:* `String.t`, *default:* `nil`) - The human-readable name for this device model.
Examples: "iPhone 4s", "iPad Mini 2".
* `supportedVersionIds` (*type:* `list(String.t)`, *default:* `nil`) - The set of iOS major software versions this device supports.
* `tags` (*type:* `list(String.t)`, *default:* `nil`) - Tags for this dimension.
Examples: "default", "preview", "deprecated".
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:deviceCapabilities => list(String.t()),
:formFactor => String.t(),
:id => String.t(),
:name => String.t(),
:supportedVersionIds => list(String.t()),
:tags => list(String.t())
}
field(:deviceCapabilities, type: :list)
field(:formFactor)
field(:id)
field(:name)
field(:supportedVersionIds, type: :list)
field(:tags, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Testing.V1.Model.IosModel do
def decode(value, options) do
GoogleApi.Testing.V1.Model.IosModel.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Testing.V1.Model.IosModel do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.059701 | 171 | 0.692396 |
731554ab8df127123fe93748e50df6a8d9394590 | 1,474 | ex | Elixir | lib/livebook_web/live/settings_live/file_systems_component.ex | kianmeng/livebook | 8fe8d27d3d46b64d22126d1b97157330b87e611c | [
"Apache-2.0"
] | null | null | null | lib/livebook_web/live/settings_live/file_systems_component.ex | kianmeng/livebook | 8fe8d27d3d46b64d22126d1b97157330b87e611c | [
"Apache-2.0"
] | null | null | null | lib/livebook_web/live/settings_live/file_systems_component.ex | kianmeng/livebook | 8fe8d27d3d46b64d22126d1b97157330b87e611c | [
"Apache-2.0"
] | 1 | 2021-12-18T03:42:04.000Z | 2021-12-18T03:42:04.000Z | defmodule LivebookWeb.SettingsLive.FileSystemsComponent do
use LivebookWeb, :live_component
alias Livebook.FileSystem
@impl true
def render(assigns) do
~H"""
<div class="flex flex-col space-y-4">
<div class="flex flex-col space-y-4">
<%= for {file_system, index} <- Enum.with_index(@file_systems) do %>
<div class="flex items-center justify-between border border-gray-200 rounded-lg p-4">
<div class="flex items-center space-x-12">
<.file_system_info file_system={file_system} />
</div>
<%= unless is_struct(file_system, FileSystem.Local) do %>
<%= live_patch "Detach",
to: Routes.settings_path(@socket, :detach_file_system, index),
class: "button button-outlined-red" %>
<% end %>
</div>
<% end %>
</div>
<div class="flex">
<%= live_patch "Add file system",
to: Routes.settings_path(@socket, :add_file_system),
class: "button button-blue" %>
</div>
</div>
"""
end
defp file_system_info(%{file_system: %FileSystem.Local{}} = assigns) do
~H"""
<.labeled_text label="Type" text="Local disk" />
"""
end
defp file_system_info(%{file_system: %FileSystem.S3{}} = assigns) do
~H"""
<.labeled_text label="Type" text="S3" />
<.labeled_text label="Bucket URL" text={@file_system.bucket_url} />
"""
end
end
| 32.043478 | 95 | 0.580733 |
73155e889898bf938d70d503ab3e84e97114d9d7 | 4,584 | ex | Elixir | lib/musiccast/network.ex | almightycouch/musiccast | eba8107d4e0829f988693625d59d72090133a78a | [
"MIT"
] | 6 | 2017-03-18T21:04:52.000Z | 2020-09-27T00:43:02.000Z | lib/musiccast/network.ex | almightycouch/musiccast | eba8107d4e0829f988693625d59d72090133a78a | [
"MIT"
] | null | null | null | lib/musiccast/network.ex | almightycouch/musiccast | eba8107d4e0829f988693625d59d72090133a78a | [
"MIT"
] | 1 | 2021-01-11T17:23:22.000Z | 2021-01-11T17:23:22.000Z | defmodule MusicCast.Network do
@moduledoc """
A module for supervising a network of MusicCast™ devices.
The network is the heart of this MusicCast application. It is responsible for discovering
devices on the local network (see `MusicCast.UPnP.SSDPClient`) and keeping their state synchronized.
## Registry
The network act as a global registry for running `MusicCast.Network.Entity` processes. You can find a device on the registry
with `whereis/1`. To get a list of registered devices, see `which_devices/1`.
## Pub/Sub
You have the possibility to subscribe to network topoligy changes (for example, when a new device is discovered
or when a device goes offline). Additionally, you can subscribe to a device's changefeed directly.
See `subscribe/1` and `unsubscribe/1` for more details.
"""
use Supervisor
alias MusicCast.Network.Entity
@type device_id :: String.t
@doc """
Starts a network supervisor as part of a supervision tree.
"""
@spec start_link(Keyword.t) :: Supervisor.on_start
def start_link(options \\ []) do
options = Keyword.put(options, :name, __MODULE__)
Supervisor.start_link(__MODULE__, [], options)
end
@doc """
Stops the network supervisor.
"""
@spec stop(pid, term, timeout) :: :ok
def stop(pid, reason \\ :normal, timeout \\ :infinity) do
Supervisor.stop(pid, reason, timeout)
end
@doc """
Adds a new device entity to the network.
"""
@spec add_device(MusicCast.Network.Entity.ip_address, MusicCast.UPnP.Service.t) :: Supervisor.on_start_child
def add_device(addr, upnp_desc) do
Supervisor.start_child(__MODULE__, [addr, upnp_desc])
end
@doc """
Returns the device state value(s) for the given lookup key(s).
See `MusicCast.Network.Entity.__lookup__/2` for a list of allowed lookup keys.
"""
@spec lookup(device_id, MusicCast.Network.Entity.lookup_query) :: any
def lookup(device_id, keys \\ :all)
def lookup(pid, keys) when is_pid(pid), do: Entity.__lookup__(pid, keys)
def lookup(device_id, keys) do
if pid = whereis(device_id), do: lookup(pid, keys)
end
@doc """
Subscribes the current process to notifications from the given entity.
You can subscribe to network topology changes:
iex> MusicCast.subscribe(:network)
{:ok, #PID<0.80.0>}
iex> flush()
{:musiccast, :online, %MusicCast.Network.Entity{...}}
Or subscribe to status notifications from a specific device:
iex> MusicCast.subscribe("00A0DEDCF73E")
{:ok, #PID<0.200.0>}
iex> flush()
{:musiccast, :update, "00A0DEDCF73E", %{...}}
"""
@spec subscribe(:network | device_id) :: {:ok, pid}
def subscribe(entity)
def subscribe(:network), do: Registry.register(MusicCast.PubSub, "network", nil)
def subscribe(device_id), do: Registry.register(MusicCast.PubSub, device_id, nil)
@doc """
Unsubscribes the current process from notification from the given entity.
"""
@spec unsubscribe(:network | device_id) :: :ok
def unsubscribe(entity)
def unsubscribe(:network), do: Registry.unregister(MusicCast.PubSub, "network")
def unsubscribe(device_id), do: Registry.unregister(MusicCast.PubSub, device_id)
@doc """
Returns the PID for the registered device id or `nil` if the given `device_id` is not available.
"""
@spec whereis(device_id) :: pid | nil
def whereis(device_id) do
case Registry.lookup(MusicCast.Registry, device_id) do
[{pid, _host}] -> pid
[] -> nil
end
end
@doc """
Returns a list of all registered devices.
If you pass `:lazy` to this function, it will return a list of `{pid, device_id}` tuples:
iex> MusicCast.which_devices(:lazy)
[{#PID<0.200.0>, "00A0DEDCF73E"}]
Otherwise, you can specify a list of keys to lookup for:
iex> MusicCast.which_devices([:network_name, :host])
[{#PID<0.200.0>, ["Schlafzimmer", "192.168.0.63"]}]
See `lookup/2` for more informations about available lookup options.
"""
@spec which_devices(:lazy | MusicCast.Network.Entity.lookup_query) :: [tuple]
def which_devices(keys \\ :lazy)
def which_devices(:lazy), do: Enum.map(fetch_devices(), &{&1, List.first(Registry.keys(MusicCast.Registry, &1))})
def which_devices(keys), do: Enum.map(fetch_devices(), &{&1, Entity.__lookup__(&1, keys)})
#
# Callbacks
#
def init([]) do
children = [
worker(Entity, [], restart: :transient)
]
supervise(children, strategy: :simple_one_for_one)
end
#
# Helpers
#
defp fetch_devices do
Enum.map(Supervisor.which_children(__MODULE__), &elem(&1, 1))
end
end
| 32.28169 | 126 | 0.692845 |
731565343f3939fd29cd4c2aa4de8a9858740e17 | 526 | ex | Elixir | lib/edgedb/protocol/datatypes/int64.ex | nsidnev/edgedb-elixir | bade2b9daba2e83bfaa5915b2addb74f41610968 | [
"MIT"
] | 30 | 2021-05-19T08:54:44.000Z | 2022-03-11T22:52:25.000Z | lib/edgedb/protocol/datatypes/int64.ex | nsidnev/edgedb-elixir | bade2b9daba2e83bfaa5915b2addb74f41610968 | [
"MIT"
] | 3 | 2021-11-17T21:26:01.000Z | 2022-03-12T09:49:25.000Z | lib/edgedb/protocol/datatypes/int64.ex | nsidnev/edgedb-elixir | bade2b9daba2e83bfaa5915b2addb74f41610968 | [
"MIT"
] | 3 | 2021-08-29T14:55:41.000Z | 2022-03-12T01:30:35.000Z | defmodule EdgeDB.Protocol.Datatypes.Int64 do
use EdgeDB.Protocol.Datatype
@int64_max 0x7FFFFFFFFFFFFFFF
@int64_min -0x8000000000000000
defguard is_int64(number)
when is_integer(number) and @int64_min <= number and number <= @int64_max
defdatatype(type: integer())
@impl EdgeDB.Protocol.Datatype
def encode_datatype(number) when is_int64(number) do
<<number::int64>>
end
@impl EdgeDB.Protocol.Datatype
def decode_datatype(<<number::int64, rest::binary>>) do
{number, rest}
end
end
| 23.909091 | 84 | 0.731939 |
73156edd4f4de7ba5d93e52063e1058e48c922ef | 375 | ex | Elixir | ex_mon/lib/player.ex | herminiotorres/curso_elixir | 19519a2ac27b7c3e4973cdb2053c32dd61647cb5 | [
"MIT"
] | 1 | 2021-08-08T22:50:44.000Z | 2021-08-08T22:50:44.000Z | ex_mon/lib/player.ex | herminiotorres/curso-elixir | 19519a2ac27b7c3e4973cdb2053c32dd61647cb5 | [
"MIT"
] | null | null | null | ex_mon/lib/player.ex | herminiotorres/curso-elixir | 19519a2ac27b7c3e4973cdb2053c32dd61647cb5 | [
"MIT"
] | null | null | null | defmodule ExMon.Player do
@required_keys ~w(life moves name)a
@max_file 100
@enforce_keys @required_keys
defstruct @required_keys
def build(name, move_avg, move_rnd, move_heal) do
%__MODULE__{
life: @max_file,
moves: %{
move_avg: move_avg,
move_heal: move_heal,
move_rnd: move_rnd
},
name: name
}
end
end
| 18.75 | 51 | 0.634667 |
731577ec53ee54656519e039b5b7e1fdf7ef6e6c | 113 | ex | Elixir | lib/blue_jet/app/notification/trigger/proxy.ex | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 44 | 2018-05-09T01:08:57.000Z | 2021-01-19T07:25:26.000Z | lib/blue_jet/app/notification/trigger/proxy.ex | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 36 | 2018-05-08T23:59:54.000Z | 2018-09-28T13:50:30.000Z | lib/blue_jet/app/notification/trigger/proxy.ex | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 9 | 2018-05-09T14:09:19.000Z | 2021-03-21T21:04:04.000Z | defmodule BlueJet.Notification.Trigger.Proxy do
use BlueJet, :proxy
def put(trigger, _, _), do: trigger
end
| 18.833333 | 47 | 0.743363 |
7315870e9f5c73fcc0857932468300e00598f9f3 | 3,651 | ex | Elixir | kousa/lib/beef/access/users.ex | isaboo/dogehouse | 51cb149fcd8c287919749c5f8d22b396c38011ee | [
"MIT"
] | null | null | null | kousa/lib/beef/access/users.ex | isaboo/dogehouse | 51cb149fcd8c287919749c5f8d22b396c38011ee | [
"MIT"
] | null | null | null | kousa/lib/beef/access/users.ex | isaboo/dogehouse | 51cb149fcd8c287919749c5f8d22b396c38011ee | [
"MIT"
] | null | null | null | defmodule Beef.Access.Users do
import Ecto.Query, warn: false
alias Beef.Queries.Users, as: Query
alias Beef.Repo
alias Beef.Schemas.User
alias Beef.Schemas.Room
alias Beef.Rooms
def find_by_github_ids(ids) do
Query.start()
|> Query.filter_by_github_ids(ids)
|> Query.select_id()
|> Repo.all()
end
def search_username(<<first_letter>> <> rest) when first_letter == ?@ do
search_username(rest)
end
def search_username(start_of_username) do
search_str = start_of_username <> "%"
Query.start()
|> where([u], ilike(u.username, ^search_str))
|> limit([], 15)
|> Repo.all()
end
@spec get_by_id_with_follow_info(any, any) :: any
def get_by_id_with_follow_info(me_id, them_id) do
Query.start()
|> Query.filter_by_id(them_id)
|> Query.follow_info(me_id)
|> Query.limit_one()
|> Repo.one()
end
def get_by_id(user_id) do
Repo.get(User, user_id)
end
def get_by_id_with_room_permissions(user_id) do
from(u in User,
where: u.id == ^user_id,
left_join: rp in Beef.Schemas.RoomPermission,
on: rp.userId == u.id and rp.roomId == u.currentRoomId,
select: %{u | roomPermissions: rp},
limit: 1
)
|> Repo.one()
end
def get_by_username(username) do
Query.start()
|> Query.filter_by_username(username)
|> Repo.one()
end
def get_by_username_with_follow_info(user_id, username) do
Query.start()
|> Query.filter_by_username(username)
|> Query.follow_info(user_id)
|> Query.limit_one()
|> Repo.one()
end
@fetch_limit 16
def search(query, offset) do
query_with_percent = "%" <> query <> "%"
items =
from(u in User,
where:
ilike(u.username, ^query_with_percent) or
ilike(u.displayName, ^query_with_percent),
left_join: cr in Room,
on: u.currentRoomId == cr.id and cr.isPrivate == false,
select: %{u | currentRoom: cr},
limit: @fetch_limit,
offset: ^offset
)
|> Repo.all()
{Enum.slice(items, 0, -1 + @fetch_limit),
if(length(items) == @fetch_limit, do: -1 + offset + @fetch_limit, else: nil)}
end
def get_users_in_current_room(user_id) do
case tuple_get_current_room_id(user_id) do
{:ok, nil} ->
{nil, []}
{:ok, current_room_id} ->
{current_room_id,
from(u in User,
where: u.currentRoomId == ^current_room_id,
left_join: rp in Beef.Schemas.RoomPermission,
on: rp.userId == u.id and rp.roomId == u.currentRoomId,
select: %{u | roomPermissions: rp}
)
|> Repo.all()}
_ ->
{nil, []}
end
end
# NB: Anything that touches Gen will have to be refactored away
# out of the database layer, but we are keeping it here for now
# to keep the transition smooth.
def tuple_get_current_room_id(user_id) do
case Onion.UserSession.get_current_room_id(user_id) do
{:ok, nil} ->
{nil, nil}
x ->
{:ok, x}
end
end
@spec get_by_id_with_current_room(any) :: any
def get_by_id_with_current_room(user_id) do
from(u in User,
left_join: a0 in assoc(u, :currentRoom),
where: u.id == ^user_id,
limit: 1,
preload: [
currentRoom: a0
]
)
|> Repo.one()
end
def get_current_room(user_id) do
room_id = get_current_room_id(user_id)
case room_id do
nil -> nil
id -> Rooms.get_room_by_id(id)
end
end
def get_current_room_id(user_id) do
try do
Onion.UserSession.get_current_room_id(user_id)
catch
_, _ -> nil
end
end
end
| 24.019737 | 82 | 0.617639 |
73159a6e42532100ad3a403534dc694f8f9a2997 | 10,422 | ex | Elixir | lib/web/conn.ex | wses-yoshida/antikythera | e108e59d2339edd0b0fad31ad4f41f56df45be55 | [
"Apache-2.0"
] | null | null | null | lib/web/conn.ex | wses-yoshida/antikythera | e108e59d2339edd0b0fad31ad4f41f56df45be55 | [
"Apache-2.0"
] | null | null | null | lib/web/conn.ex | wses-yoshida/antikythera | e108e59d2339edd0b0fad31ad4f41f56df45be55 | [
"Apache-2.0"
] | null | null | null | # Copyright(c) 2015-2019 ACCESS CO., LTD. All rights reserved.
use Croma
alias Antikythera.Http
defmodule Antikythera.Request do
@moduledoc """
Definition of `Antikythera.Request` struct.
"""
defmodule PathMatches do
use Croma.SubtypeOfMap, key_module: Croma.Atom, value_module: Croma.String
end
defmodule Sender do
alias Antikythera.GearName
@type sender_ip :: String.t
@type t :: {:web, sender_ip} | {:gear, GearName.t}
defun valid?(v :: term) :: boolean do
{:web, s} when is_binary(s) -> true
{:gear, n} -> GearName.valid?(n)
_ -> false
end
end
use Croma.Struct, recursive_new?: true, fields: [
method: Http.Method,
path_info: Antikythera.PathInfo,
path_matches: PathMatches,
query_params: Http.QueryParams,
headers: Http.Headers,
cookies: Http.ReqCookiesMap,
raw_body: Http.RawBody, # can be used to e.g. check HMAC of body
body: Http.Body,
sender: Sender,
]
end
defmodule Antikythera.Conn do
@moduledoc """
Definition of `Antikythera.Conn` struct, which represents a client-server connection.
This module also defines many functions to work with `Antikythera.Conn`.
"""
alias Antikythera.{Request, Context}
alias Antikythera.Session
alias Antikythera.FastJasonEncoder
defmodule BeforeSend do
use Croma.SubtypeOfList, elem_module: Croma.Function, default: []
end
defmodule Assigns do
use Croma.SubtypeOfMap, key_module: Croma.Atom, value_module: Croma.Any, default: %{}
end
use Croma.Struct, recursive_new?: true, fields: [
request: Request,
context: Context,
status: Croma.TypeGen.nilable(Http.Status.Int),
resp_headers: Http.Headers,
resp_cookies: Http.SetCookiesMap,
resp_body: Http.RawBody,
before_send: BeforeSend,
assigns: Assigns,
]
#
# Lower-level interfaces to manipulate `Conn.t`.
#
defun get_req_header(%__MODULE__{request: request}, key :: v[String.t]) :: nil | String.t do
request.headers[key]
end
defun get_req_query(%__MODULE__{request: request}, key :: v[String.t]) :: nil | String.t do
request.query_params[key]
end
defun put_status(conn :: v[t], status :: v[Http.Status.t]) :: t do
%__MODULE__{conn | status: Http.Status.code(status)}
end
defun put_resp_header(%__MODULE__{resp_headers: resp_headers} = conn, key :: v[String.t], value :: v[String.t]) :: t do
%__MODULE__{conn | resp_headers: Map.put(resp_headers, key, value)}
end
defun put_resp_headers(%__MODULE__{resp_headers: resp_headers} = conn, headers :: v[%{String.t => String.t}]) :: t do
%__MODULE__{conn | resp_headers: Map.merge(resp_headers, headers)}
end
defun put_resp_body(conn :: v[t], body :: v[String.t]) :: t do
%__MODULE__{conn | resp_body: body}
end
@doc """
Returns all request cookies.
"""
defun get_req_cookies(%__MODULE__{request: %Request{cookies: cookies}}) :: Http.ReqCookiesMap.t do
cookies
end
@doc """
Returns a request cookie specified by `name`.
"""
defun get_req_cookie(conn :: v[t], name :: v[String.t]) :: nil | String.t do
get_req_cookies(conn)[name]
end
@default_cookie_opts (if Antikythera.Env.compiling_for_cloud?(), do: %{path: "/", secure: true}, else: %{path: "/"})
@doc """
Adds a `set-cookie` response header to the given `Antikythera.Conn.t`.
`path` directive of `set-cookie` header is automatically filled with `"/"` if not explicitly given.
Also `secure` directive is filled by default in the cloud environments (assuming that it's serving with HTTPS).
Note that response cookies are stored separately from the other response headers,
as cookies require special treatment according to the HTTP specs.
"""
defun put_resp_cookie(%__MODULE__{resp_cookies: resp_cookies} = conn,
name :: v[String.t],
value :: v[String.t],
opts0 :: Http.SetCookie.options_t \\ %{}) :: t do
opts = Map.merge(@default_cookie_opts, opts0)
set_cookie = %Http.SetCookie{value: value} |> Http.SetCookie.update!(opts)
%__MODULE__{conn | resp_cookies: Map.put(resp_cookies, name, set_cookie)}
end
@doc """
Tells the client to delete an existing cookie specified by `name`.
This is a wrapper around `put_resp_cookie/4` that sets an immediately expiring cookie (whose value is an empty string).
"""
defun put_resp_cookie_to_revoke(conn :: v[t], name :: v[String.t]) :: t do
put_resp_cookie(conn, name, "", %{max_age: 0})
end
defun register_before_send(%__MODULE__{before_send: before_send} = conn, callback :: (t -> t)) :: t do
%__MODULE__{conn | before_send: [callback | before_send]}
end
# These session-related functions assume that the `conn` is processed by `Antikythera.Plug.Session`
# and thus it contains `:session` field in `:assigns`.
defun get_session(%__MODULE__{assigns: %{session: session}}, key :: v[String.t]) :: any do
Session.get(session, key)
end
defun put_session(%__MODULE__{assigns: %{session: session}} = conn, key :: v[String.t], value :: any) :: t do
assign(conn, :session, Session.put(session, key, value))
end
defun delete_session(%__MODULE__{assigns: %{session: session}} = conn, key :: v[String.t]) :: t do
assign(conn, :session, Session.delete(session, key))
end
defun clear_session(%__MODULE__{assigns: %{session: session}} = conn) :: t do
assign(conn, :session, Session.clear(session))
end
defun renew_session(%__MODULE__{assigns: %{session: session}} = conn) :: t do
assign(conn, :session, Session.renew(session))
end
defun destroy_session(%__MODULE__{assigns: %{session: session}} = conn) :: t do
assign(conn, :session, Session.destroy(session))
end
defun assign(%__MODULE__{assigns: assigns} = conn, key :: v[atom], value :: any) :: t do
%__MODULE__{conn | assigns: Map.put(assigns, key, value)}
end
#
# Higher-level interfaces: Conveniences for common operations on `Conn.t`,
# (implemented using the lower-level interfaces defined above).
#
@doc """
Put `cache-control` response header for responses that must not be cached.
The actual header value to be set is: `"private, no-cache, no-store, max-age=0"`.
"""
defun no_cache(conn :: v[t]) :: t do
put_resp_header(conn, "cache-control", "private, no-cache, no-store, max-age=0")
end
@doc """
Returns an HTTP response that make the client redirect to the specified `url`.
"""
defun redirect(conn :: v[t], url :: v[String.t], status :: v[Http.Status.t] \\ 302) :: t do
conn
|> put_resp_header("location", url)
|> put_status(status)
end
@doc """
Returns a JSON response.
"""
defun json(%__MODULE__{resp_headers: resp_headers} = conn, status :: v[Http.Status.t], body :: v[%{(atom | String.t) => any} | [any]]) :: t do
{:ok, json} = FastJasonEncoder.encode(body)
%__MODULE__{conn |
status: Http.Status.code(status),
resp_headers: Map.put(resp_headers, "content-type", "application/json"),
resp_body: json,
}
end
@doc """
Renders a HAML template file and returns the dynamic content as an HTML response.
"""
defun render(%__MODULE__{context: context, resp_headers: resp_headers, assigns: assigns} = conn,
status :: v[Http.Status.t],
template_name :: v[String.t],
render_params :: Keyword.t(any),
opts :: Keyword.t(atom) \\ [layout: :application]) :: t do
flash = Map.get(assigns, :flash, %{})
template_module = AntikytheraCore.GearModule.template_module_from_context(context)
%__MODULE__{conn |
status: Http.Status.code(status),
resp_headers: Map.put(resp_headers, "content-type", "text/html; charset=utf-8"),
resp_body: html_content(template_module, template_name, [flash: flash] ++ render_params, opts[:layout]),
}
end
defunp html_content(template_module :: v[module],
template_name :: v[String.t],
render_params :: Keyword.t(any),
layout_name :: v[nil | atom]) :: String.t do
content = template_module.content_for(template_name, render_params)
{:safe, str} =
case layout_name do
nil -> content
layout ->
params_with_internal_content = [yield: content] ++ render_params
template_module.content_for("layout/#{layout}", params_with_internal_content)
end
str
end
@doc """
Sends a file which resides in `priv/` directory as a response.
`path` must be a file path relative to the `priv/` directory.
content-type header is inferred from the file's extension.
Don't use this function for sending large files; you should use CDN for large files (see `Antikythera.Asset`).
Also, if all you need to do is just to return a file (i.e. you don't need any authentication),
you should not use this function; just placing the file under `priv/static/` directory should suffice.
"""
defun send_priv_file(%__MODULE__{context: context, resp_headers: resp_headers} = conn, status :: v[Http.Status.t], path :: Path.t) :: t do
# Protect from directory traversal attack
if String.contains?(path, "..") do
raise "path must not contain `..`"
end
%__MODULE__{conn |
status: Http.Status.code(status),
resp_headers: Map.put(resp_headers, "content-type", mimetype(path)),
resp_body: File.read!(filepath(context, path)),
}
end
defunp mimetype(path :: Path.t) :: String.t do
{top, sub, _} = :cow_mimetypes.all(path)
"#{top}/#{sub}"
end
defunp filepath(%Context{gear_entry_point: {mod, _}}, path :: Path.t) :: Path.t do
gear_name = Module.split(mod) |> hd() |> Macro.underscore() |> String.to_existing_atom()
Path.join(:code.priv_dir(gear_name), path)
end
@doc """
Gets a flash message stored in the given `t:Antikythera.Conn.t/0`.
"""
defun get_flash(%__MODULE__{assigns: assigns}, key :: v[String.t]) :: nil | String.t do
assigns.flash[key]
end
@doc """
Stores the flash message into the current `t:Antikythera.Conn.t/0`.
"""
defun put_flash(%__MODULE__{assigns: assigns} = conn, key :: v[String.t], value :: v[String.t]) :: t do
assign(conn, :flash, Map.put(assigns.flash, key, value))
end
end
| 36.440559 | 144 | 0.657743 |
73159bfade950ec147c5b0a26584125b2bcc228f | 544 | ex | Elixir | test/factories/factory.ex | ZiHawkEye/cadet | f7f9143699054d12bf08ef94e6e20a8ac58aea50 | [
"Apache-2.0"
] | null | null | null | test/factories/factory.ex | ZiHawkEye/cadet | f7f9143699054d12bf08ef94e6e20a8ac58aea50 | [
"Apache-2.0"
] | null | null | null | test/factories/factory.ex | ZiHawkEye/cadet | f7f9143699054d12bf08ef94e6e20a8ac58aea50 | [
"Apache-2.0"
] | null | null | null | defmodule Cadet.Factory do
@moduledoc """
Factory for testing
"""
use ExMachina.Ecto, repo: Cadet.Repo
use Cadet.Accounts.{AuthorizationFactory, UserFactory}
use Cadet.Assessments.{
AnswerFactory,
AssessmentFactory,
LibraryFactory,
QuestionFactory,
SubmissionFactory
}
use Cadet.Course.{GroupFactory, MaterialFactory}
def upload_factory do
%Plug.Upload{
content_type: "text/plain",
filename: sequence(:upload, &"upload#{&1}.txt"),
path: "test/fixtures/upload.txt"
}
end
end
| 20.148148 | 56 | 0.6875 |
7315e8f2a01170b9dcc4c24898b04fd5bd971c8a | 434 | ex | Elixir | flatten-array/lib/flatten_array.ex | tarcisiooliveira/Exercise-form-Exercism-in-Elixir | 2c9f14c264345d8aae09c701dc56128072b7f3c1 | [
"MIT"
] | null | null | null | flatten-array/lib/flatten_array.ex | tarcisiooliveira/Exercise-form-Exercism-in-Elixir | 2c9f14c264345d8aae09c701dc56128072b7f3c1 | [
"MIT"
] | null | null | null | flatten-array/lib/flatten_array.ex | tarcisiooliveira/Exercise-form-Exercism-in-Elixir | 2c9f14c264345d8aae09c701dc56128072b7f3c1 | [
"MIT"
] | null | null | null | defmodule FlattenArray do
@doc """
Accept a list and return the list flattened without nil values.
## Examples
iex> FlattenArray.flatten([1, [2], 3, nil])
[1,2,3]
iex> FlattenArray.flatten([nil, nil])
[]
"""
@spec flatten(list) :: list
def flatten([]), do: []
def flatten([head | tail]), do: flatten(head) ++ flatten(tail)
def flatten(nil), do: []
def flatten(head ), do: [head]
end
| 20.666667 | 67 | 0.589862 |
73160a306234fa506706b26882a8c5315af8ebec | 21,277 | exs | Elixir | lib/elixir/test/elixir/inspect_test.exs | jwarwick/elixir | de103c0f4e3240aa38967298ccb5f483a9e40c16 | [
"Apache-2.0"
] | 243 | 2020-02-03T03:48:51.000Z | 2021-11-08T12:56:25.000Z | lib/elixir/test/elixir/inspect_test.exs | jwarwick/elixir | de103c0f4e3240aa38967298ccb5f483a9e40c16 | [
"Apache-2.0"
] | 6 | 2021-03-19T12:33:21.000Z | 2021-04-02T17:52:45.000Z | lib/elixir/test/elixir/inspect_test.exs | jwarwick/elixir | de103c0f4e3240aa38967298ccb5f483a9e40c16 | [
"Apache-2.0"
] | null | null | null | Code.require_file("test_helper.exs", __DIR__)
defmodule Inspect.AtomTest do
use ExUnit.Case, async: true
doctest Inspect
test "basic" do
assert inspect(:foo) == ":foo"
end
test "empty" do
assert inspect(:"") == ":\"\""
end
test "true, false, nil" do
assert inspect(false) == "false"
assert inspect(true) == "true"
assert inspect(nil) == "nil"
end
test "with uppercase letters" do
assert inspect(:fOO) == ":fOO"
assert inspect(:FOO) == ":FOO"
end
test "aliases" do
assert inspect(Foo) == "Foo"
assert inspect(Foo.Bar) == "Foo.Bar"
assert inspect(Elixir) == "Elixir"
assert inspect(Elixir.Foo) == "Foo"
assert inspect(Elixir.Elixir) == "Elixir.Elixir"
assert inspect(Elixir.Elixir.Foo) == "Elixir.Elixir.Foo"
end
test "with integers" do
assert inspect(User1) == "User1"
assert inspect(:user1) == ":user1"
end
test "with trailing ? or !" do
assert inspect(:foo?) == ":foo?"
assert inspect(:bar!) == ":bar!"
assert inspect(:Foo?) == ":Foo?"
end
test "operators" do
assert inspect(:+) == ":+"
assert inspect(:<~) == ":<~"
assert inspect(:~>) == ":~>"
assert inspect(:&&&) == ":&&&"
assert inspect(:~~~) == ":~~~"
assert inspect(:<<~) == ":<<~"
assert inspect(:~>>) == ":~>>"
assert inspect(:<~>) == ":<~>"
assert inspect(:<|>) == ":<|>"
assert inspect(:+++) == ":+++"
assert inspect(:---) == ":---"
end
test "::" do
assert inspect(:"::") == ~s[:"::"]
end
test "with @" do
assert inspect(:@) == ":@"
assert inspect(:foo@bar) == ":foo@bar"
assert inspect(:foo@bar@) == ":foo@bar@"
assert inspect(:foo@bar@baz) == ":foo@bar@baz"
end
test "others" do
assert inspect(:...) == ":..."
assert inspect(:<<>>) == ":<<>>"
assert inspect(:{}) == ":{}"
assert inspect(:%{}) == ":%{}"
assert inspect(:%) == ":%"
assert inspect(:->) == ":->"
end
test "escaping" do
assert inspect(:"hy-phen") == ~s(:"hy-phen")
assert inspect(:"@hello") == ~s(:"@hello")
assert inspect(:"Wat!?") == ~s(:"Wat!?")
assert inspect(:"'quotes' and \"double quotes\"") == ~S(:"'quotes' and \"double quotes\"")
end
test "colors" do
opts = [syntax_colors: [atom: :red]]
assert inspect(:hello, opts) == "\e[31m:hello\e[0m"
opts = [syntax_colors: [reset: :cyan]]
assert inspect(:hello, opts) == ":hello"
end
test "Unicode" do
assert inspect(:olá) == ":olá"
assert inspect(:Olá) == ":Olá"
assert inspect(:Ólá) == ":Ólá"
assert inspect(:こんにちは世界) == ":こんにちは世界"
nfd = :unicode.characters_to_nfd_binary("olá")
assert inspect(String.to_atom(nfd)) == ":\"#{nfd}\""
end
end
defmodule Inspect.BitStringTest do
use ExUnit.Case, async: true
test "bitstring" do
assert inspect(<<1::12-integer-signed>>) == "<<0, 1::size(4)>>"
assert inspect(<<1, 2, 3, 4, 5>>, pretty: true, width: 10) == "<<1, 2, 3,\n 4, 5>>"
end
test "binary" do
assert inspect("foo") == "\"foo\""
assert inspect(<<?a, ?b, ?c>>) == "\"abc\""
end
test "escaping" do
assert inspect("f\no") == "\"f\\no\""
assert inspect("f\\o") == "\"f\\\\o\""
assert inspect("f\ao") == "\"f\\ao\""
assert inspect("\a\b\d\e\f\n\r\s\t\v") == "\"\\a\\b\\d\\e\\f\\n\\r \\t\\v\""
end
test "UTF-8" do
assert inspect(" ゆんゆん") == "\" ゆんゆん\""
# BOM
assert inspect("\uFEFFhello world") == "\"\\uFEFFhello world\""
end
test "infer" do
assert inspect(<<"john", 193, "doe">>, binaries: :infer) ==
~s(<<106, 111, 104, 110, 193, 100, 111, 101>>)
assert inspect(<<"john">>, binaries: :infer) == ~s("john")
assert inspect(<<193>>, binaries: :infer) == ~s(<<193>>)
end
test "as strings" do
assert inspect(<<"john", 193, "doe">>, binaries: :as_strings) == ~s("john\\xC1doe")
assert inspect(<<"john">>, binaries: :as_strings) == ~s("john")
assert inspect(<<193>>, binaries: :as_strings) == ~s("\\xC1")
end
test "as binaries" do
assert inspect(<<"john", 193, "doe">>, binaries: :as_binaries) ==
"<<106, 111, 104, 110, 193, 100, 111, 101>>"
assert inspect(<<"john">>, binaries: :as_binaries) == "<<106, 111, 104, 110>>"
assert inspect(<<193>>, binaries: :as_binaries) == "<<193>>"
# Any base other than :decimal implies "binaries: :as_binaries"
assert inspect("abc", base: :hex) == "<<0x61, 0x62, 0x63>>"
assert inspect("abc", base: :octal) == "<<0o141, 0o142, 0o143>>"
# Size is still represented as decimal
assert inspect(<<10, 11, 12::4>>, base: :hex) == "<<0xA, 0xB, 0xC::size(4)>>"
end
test "unprintable with limit" do
assert inspect(<<193, 193, 193, 193>>, limit: 3) == "<<193, 193, 193, ...>>"
end
test "printable limit" do
assert inspect("hello world", printable_limit: 4) == ~s("hell" <> ...)
# Non-printable characters after the limit don't matter
assert inspect("hello world" <> <<0>>, printable_limit: 4) == ~s("hell" <> ...)
# Non printable strings aren't affected by printable limit
assert inspect(<<0, 1, 2, 3, 4>>, printable_limit: 3) == ~s(<<0, 1, 2, 3, 4>>)
end
end
defmodule Inspect.NumberTest do
use ExUnit.Case, async: true
test "integer" do
assert inspect(100) == "100"
end
test "decimal" do
assert inspect(100, base: :decimal) == "100"
end
test "hex" do
assert inspect(100, base: :hex) == "0x64"
assert inspect(-100, base: :hex) == "-0x64"
end
test "octal" do
assert inspect(100, base: :octal) == "0o144"
assert inspect(-100, base: :octal) == "-0o144"
end
test "binary" do
assert inspect(86, base: :binary) == "0b1010110"
assert inspect(-86, base: :binary) == "-0b1010110"
end
test "float" do
assert inspect(1.0) == "1.0"
assert inspect(1.0e10) == "1.0e10"
assert inspect(1.0e10) == "1.0e10"
assert inspect(1.0e-10) == "1.0e-10"
end
test "integer colors" do
opts = [syntax_colors: [number: :red]]
assert inspect(123, opts) == "\e[31m123\e[0m"
opts = [syntax_colors: [reset: :cyan]]
assert inspect(123, opts) == "123"
end
test "float colors" do
opts = [syntax_colors: [number: :red]]
assert inspect(1.3, opts) == "\e[31m1.3\e[0m"
opts = [syntax_colors: [reset: :cyan]]
assert inspect(1.3, opts) == "1.3"
end
end
defmodule Inspect.TupleTest do
use ExUnit.Case
test "basic" do
assert inspect({1, "b", 3}) == "{1, \"b\", 3}"
assert inspect({1, "b", 3}, pretty: true, width: 1) == "{1,\n \"b\",\n 3}"
assert inspect({1, "b", 3}, pretty: true, width: 10) == "{1, \"b\",\n 3}"
end
test "empty" do
assert inspect({}) == "{}"
end
test "with limit" do
assert inspect({1, 2, 3, 4}, limit: 3) == "{1, 2, 3, ...}"
end
test "colors" do
opts = [syntax_colors: []]
assert inspect({}, opts) == "{}"
opts = [syntax_colors: [reset: :cyan]]
assert inspect({}, opts) == "{}"
assert inspect({:x, :y}, opts) == "{:x, :y}"
opts = [syntax_colors: [reset: :cyan, atom: :red]]
assert inspect({}, opts) == "{}"
assert inspect({:x, :y}, opts) == "{\e[31m:x\e[36m, \e[31m:y\e[36m}"
opts = [syntax_colors: [tuple: :green, reset: :cyan, atom: :red]]
assert inspect({}, opts) == "\e[32m{\e[36m\e[32m}\e[36m"
assert inspect({:x, :y}, opts) ==
"\e[32m{\e[36m\e[31m:x\e[36m\e[32m,\e[36m \e[31m:y\e[36m\e[32m}\e[36m"
end
end
defmodule Inspect.ListTest do
use ExUnit.Case, async: true
test "basic" do
assert inspect([1, "b", 3]) == "[1, \"b\", 3]"
assert inspect([1, "b", 3], pretty: true, width: 1) == "[1,\n \"b\",\n 3]"
end
test "printable" do
assert inspect('abc') == "'abc'"
end
test "printable limit" do
assert inspect('hello world', printable_limit: 4) == ~s('hell' ++ ...)
# Non printable characters after the limit don't matter
assert inspect('hello world' ++ [0], printable_limit: 4) == ~s('hell' ++ ...)
# Non printable strings aren't affected by printable limit
assert inspect([0, 1, 2, 3, 4], printable_limit: 3) == ~s([0, 1, 2, 3, 4])
end
test "keyword" do
assert inspect(a: 1) == "[a: 1]"
assert inspect(a: 1, b: 2) == "[a: 1, b: 2]"
assert inspect(a: 1, a: 2, b: 2) == "[a: 1, a: 2, b: 2]"
assert inspect("123": 1) == ~s(["123": 1])
assert inspect([foo: [1, 2, 3], baz: [4, 5, 6]], pretty: true, width: 20) ==
"[\n foo: [1, 2, 3],\n baz: [4, 5, 6]\n]"
end
test "keyword operators" do
assert inspect("::": 1, +: 2) == ~s(["::": 1, +: 2])
end
test "opt infer" do
assert inspect('john' ++ [0] ++ 'doe', charlists: :infer) ==
"[106, 111, 104, 110, 0, 100, 111, 101]"
assert inspect('john', charlists: :infer) == "'john'"
assert inspect([0], charlists: :infer) == "[0]"
end
test "opt as strings" do
assert inspect('john' ++ [0] ++ 'doe', charlists: :as_charlists) == "'john\\0doe'"
assert inspect('john', charlists: :as_charlists) == "'john'"
assert inspect([0], charlists: :as_charlists) == "'\\0'"
end
test "opt as lists" do
assert inspect('john' ++ [0] ++ 'doe', charlists: :as_lists) ==
"[106, 111, 104, 110, 0, 100, 111, 101]"
assert inspect('john', charlists: :as_lists) == "[106, 111, 104, 110]"
assert inspect([0], charlists: :as_lists) == "[0]"
end
test "non printable" do
assert inspect([{:b, 1}, {:a, 1}]) == "[b: 1, a: 1]"
end
test "improper" do
assert inspect([:foo | :bar]) == "[:foo | :bar]"
assert inspect([1, 2, 3, 4, 5 | 42], pretty: true, width: 1) ==
"[1,\n 2,\n 3,\n 4,\n 5 |\n 42]"
end
test "nested" do
assert inspect(Enum.reduce(1..100, [0], &[&2, Integer.to_string(&1)]), limit: 5) ==
"[[[[[[...], ...], \"97\"], \"98\"], \"99\"], \"100\"]"
assert inspect(Enum.reduce(1..100, [0], &[&2 | Integer.to_string(&1)]), limit: 5) ==
"[[[[[[...] | \"96\"] | \"97\"] | \"98\"] | \"99\"] | \"100\"]"
end
test "codepoints" do
assert inspect('é') == "[233]"
end
test "empty" do
assert inspect([]) == "[]"
end
test "with limit" do
assert inspect([1, 2, 3, 4], limit: 3) == "[1, 2, 3, ...]"
end
test "colors" do
opts = [syntax_colors: []]
assert inspect([], opts) == "[]"
opts = [syntax_colors: [reset: :cyan]]
assert inspect([], opts) == "[]"
assert inspect([:x, :y], opts) == "[:x, :y]"
opts = [syntax_colors: [reset: :cyan, atom: :red]]
assert inspect([], opts) == "[]"
assert inspect([:x, :y], opts) == "[\e[31m:x\e[36m, \e[31m:y\e[36m]"
opts = [syntax_colors: [reset: :cyan, atom: :red, list: :green]]
assert inspect([], opts) == "\e[32m[]\e[36m"
assert inspect([:x, :y], opts) ==
"\e[32m[\e[36m\e[31m:x\e[36m\e[32m,\e[36m \e[31m:y\e[36m\e[32m]\e[36m"
end
test "keyword with colors" do
opts = [syntax_colors: [reset: :cyan, list: :green, number: :blue]]
assert inspect([], opts) == "\e[32m[]\e[36m"
assert inspect([a: 9999], opts) == "\e[32m[\e[36ma: \e[34m9999\e[36m\e[32m]\e[36m"
opts = [syntax_colors: [reset: :cyan, atom: :red, list: :green, number: :blue]]
assert inspect([], opts) == "\e[32m[]\e[36m"
assert inspect([a: 9999], opts) == "\e[32m[\e[36m\e[31ma:\e[36m \e[34m9999\e[36m\e[32m]\e[36m"
end
test "limit with colors" do
opts = [limit: 1, syntax_colors: [reset: :cyan, list: :green, atom: :red]]
assert inspect([], opts) == "\e[32m[]\e[36m"
assert inspect([:x, :y], opts) == "\e[32m[\e[36m\e[31m:x\e[36m\e[32m,\e[36m ...\e[32m]\e[36m"
end
end
defmodule Inspect.MapTest do
use ExUnit.Case
test "basic" do
assert inspect(%{1 => "b"}) == "%{1 => \"b\"}"
assert inspect(%{1 => "b", 2 => "c"}, pretty: true, width: 1) ==
"%{\n 1 => \"b\",\n 2 => \"c\"\n}"
end
test "keyword" do
assert inspect(%{a: 1}) == "%{a: 1}"
assert inspect(%{a: 1, b: 2}) == "%{a: 1, b: 2}"
assert inspect(%{a: 1, b: 2, c: 3}) == "%{a: 1, b: 2, c: 3}"
end
test "with limit" do
assert inspect(%{1 => 1, 2 => 2, 3 => 3, 4 => 4}, limit: 3) ==
"%{1 => 1, 2 => 2, 3 => 3, ...}"
end
defmodule Public do
defstruct key: 0
end
defmodule Private do
end
test "public struct" do
assert inspect(%Public{key: 1}) == "%Inspect.MapTest.Public{key: 1}"
end
test "public modified struct" do
public = %Public{key: 1}
assert inspect(Map.put(public, :foo, :bar)) ==
"%{__struct__: Inspect.MapTest.Public, foo: :bar, key: 1}"
end
test "private struct" do
assert inspect(%{__struct__: Private, key: 1}) ==
"%{__struct__: Inspect.MapTest.Private, key: 1}"
end
defmodule Failing do
defstruct key: 0
defimpl Inspect do
def inspect(struct, _) do
struct.unknown
end
end
end
test "bad implementation unsafe" do
msg =
"got KeyError with message \"key :unknown not found in: " <>
"%{__struct__: Inspect.MapTest.Failing, key: 0}\" while " <>
"inspecting %{__struct__: Inspect.MapTest.Failing, key: 0}"
try do
inspect(%Failing{}, safe: false)
rescue
e in Inspect.Error ->
assert Exception.message(e) =~ msg
assert [{Inspect.Inspect.MapTest.Failing, :inspect, 2, _} | _] = __STACKTRACE__
else
_ -> flunk("expected failure")
end
end
test "bad implementation safe" do
msg =
"got KeyError with message \"key :unknown not found in: " <>
"%{__struct__: Inspect.MapTest.Failing, key: 0}\" while " <>
"inspecting %{__struct__: Inspect.MapTest.Failing, key: 0}"
assert inspect(%Failing{}) == inspect(%Inspect.Error{message: "#{msg}"})
end
test "bad implementation safe disables colors" do
msg =
"got KeyError with message \\\"key :unknown not found in: " <>
"%{__struct__: Inspect.MapTest.Failing, key: 0}\\\" while " <>
"inspecting %{__struct__: Inspect.MapTest.Failing, key: 0}"
assert inspect(%Failing{}, syntax_colors: [atom: [:green]]) =~ msg
end
test "exception" do
assert inspect(%RuntimeError{message: "runtime error"}) ==
"%RuntimeError{message: \"runtime error\"}"
end
test "colors" do
opts = [syntax_colors: [reset: :cyan, atom: :red, number: :magenta]]
assert inspect(%{1 => 2}, opts) == "%{\e[35m1\e[36m => \e[35m2\e[36m}"
assert inspect(%{a: 1}, opts) == "%{\e[31ma:\e[36m \e[35m1\e[36m}"
assert inspect(%Public{key: 1}, opts) ==
"%Inspect.MapTest.Public{\e[31mkey:\e[36m \e[35m1\e[36m}"
opts = [syntax_colors: [reset: :cyan, atom: :red, map: :green, number: :blue]]
assert inspect(%{a: 9999}, opts) ==
"\e[32m%{\e[36m" <> "\e[31ma:\e[36m " <> "\e[34m9999\e[36m" <> "\e[32m}\e[36m"
end
defmodule StructWithoutOptions do
@derive Inspect
defstruct [:a, :b, :c, :d]
end
test "struct without options" do
struct = %StructWithoutOptions{a: 1, b: 2, c: 3, d: 4}
assert inspect(struct) == "%Inspect.MapTest.StructWithoutOptions{a: 1, b: 2, c: 3, d: 4}"
assert inspect(struct, pretty: true, width: 1) ==
"%Inspect.MapTest.StructWithoutOptions{\n a: 1,\n b: 2,\n c: 3,\n d: 4\n}"
end
defmodule StructWithOnlyOption do
@derive {Inspect, only: [:b, :c]}
defstruct [:a, :b, :c, :d]
end
test "struct with :only option" do
struct = %StructWithOnlyOption{a: 1, b: 2, c: 3, d: 4}
assert inspect(struct) == "#Inspect.MapTest.StructWithOnlyOption<b: 2, c: 3, ...>"
assert inspect(struct, pretty: true, width: 1) ==
"#Inspect.MapTest.StructWithOnlyOption<\n b: 2,\n c: 3,\n ...\n>"
struct = %{struct | c: [1, 2, 3, 4]}
assert inspect(struct) == "#Inspect.MapTest.StructWithOnlyOption<b: 2, c: [1, 2, 3, 4], ...>"
end
defmodule StructWithEmptyOnlyOption do
@derive {Inspect, only: []}
defstruct [:a, :b, :c, :d]
end
test "struct with empty :only option" do
struct = %StructWithEmptyOnlyOption{a: 1, b: 2, c: 3, d: 4}
assert inspect(struct) == "#Inspect.MapTest.StructWithEmptyOnlyOption<...>"
end
defmodule StructWithAllFieldsInOnlyOption do
@derive {Inspect, only: [:a, :b]}
defstruct [:a, :b]
end
test "struct with all fields in the :only option" do
struct = %StructWithAllFieldsInOnlyOption{a: 1, b: 2}
assert inspect(struct) == "%Inspect.MapTest.StructWithAllFieldsInOnlyOption{a: 1, b: 2}"
assert inspect(struct, pretty: true, width: 1) ==
"%Inspect.MapTest.StructWithAllFieldsInOnlyOption{\n a: 1,\n b: 2\n}"
end
defmodule StructWithExceptOption do
@derive {Inspect, except: [:b, :c]}
defstruct [:a, :b, :c, :d]
end
test "struct with :except option" do
struct = %StructWithExceptOption{a: 1, b: 2, c: 3, d: 4}
assert inspect(struct) == "#Inspect.MapTest.StructWithExceptOption<a: 1, d: 4, ...>"
assert inspect(struct, pretty: true, width: 1) ==
"#Inspect.MapTest.StructWithExceptOption<\n a: 1,\n d: 4,\n ...\n>"
end
defmodule StructWithBothOnlyAndExceptOptions do
@derive {Inspect, only: [:a, :b], except: [:b, :c]}
defstruct [:a, :b, :c, :d]
end
test "struct with both :only and :except options" do
struct = %StructWithBothOnlyAndExceptOptions{a: 1, b: 2, c: 3, d: 4}
assert inspect(struct) == "#Inspect.MapTest.StructWithBothOnlyAndExceptOptions<a: 1, ...>"
assert inspect(struct, pretty: true, width: 1) ==
"#Inspect.MapTest.StructWithBothOnlyAndExceptOptions<\n a: 1,\n ...\n>"
end
end
defmodule Inspect.OthersTest do
use ExUnit.Case, async: true
def fun() do
fn -> :ok end
end
def unquote(:"weirdly named/fun-")() do
fn -> :ok end
end
test "external Elixir funs" do
bin = inspect(&Enum.map/2)
assert bin == "&Enum.map/2"
assert inspect(&__MODULE__."weirdly named/fun-"/0) ==
~s(&Inspect.OthersTest."weirdly named/fun-"/0)
end
test "external Erlang funs" do
bin = inspect(&:lists.map/2)
assert bin == "&:lists.map/2"
end
test "outdated functions" do
defmodule V do
def fun do
fn -> 1 end
end
end
Application.put_env(:elixir, :anony, V.fun())
Application.put_env(:elixir, :named, &V.fun/0)
:code.delete(V)
:code.purge(V)
anony = Application.get_env(:elixir, :anony)
named = Application.get_env(:elixir, :named)
assert inspect(anony) =~ ~r"#Function<0.\d+/0 in Inspect.OthersTest.V>"
assert inspect(named) =~ ~r"&Inspect.OthersTest.V.fun/0"
after
Application.delete_env(:elixir, :anony)
Application.delete_env(:elixir, :named)
end
test "other funs" do
assert "#Function<" <> _ = inspect(fn x -> x + 1 end)
assert "#Function<" <> _ = inspect(fun())
opts = [syntax_colors: []]
assert "#Function<" <> _ = inspect(fun(), opts)
opts = [syntax_colors: [reset: :red]]
assert "#Function<" <> rest = inspect(fun(), opts)
assert String.ends_with?(rest, ">")
inspected = inspect(__MODULE__."weirdly named/fun-"())
assert inspected =~ ~r(#Function<\d+\.\d+/0 in Inspect\.OthersTest\."weirdly named/fun-"/0>)
end
test "map set" do
assert "#MapSet<" <> _ = inspect(MapSet.new())
end
test "PIDs" do
assert "#PID<" <> _ = inspect(self())
opts = [syntax_colors: []]
assert "#PID<" <> _ = inspect(self(), opts)
opts = [syntax_colors: [reset: :cyan]]
assert "#PID<" <> rest = inspect(self(), opts)
assert String.ends_with?(rest, ">")
end
test "references" do
assert "#Reference<" <> _ = inspect(make_ref())
end
test "regex" do
assert inspect(~r(foo)m) == "~r/foo/m"
assert inspect(Regex.compile!("a\\/b")) == "~r/a\\/b/"
assert inspect(Regex.compile!("\a\b\d\e\f\n\r\s\t\v/")) ==
"~r/\\a\\x08\\x7F\\x1B\\f\\n\\r \\t\\v\\//"
assert inspect(~r<\a\b\d\e\f\n\r\s\t\v/>) == "~r/\\a\\b\\d\\e\\f\\n\\r\\s\\t\\v\\//"
assert inspect(~r" \\/ ") == "~r/ \\\\\\/ /"
assert inspect(~r/hi/, syntax_colors: [regex: :red]) == "\e[31m~r/hi/\e[0m"
end
test "inspect_fun" do
fun = fn
integer, _opts when is_integer(integer) ->
"<#{integer}>"
%URI{} = uri, _opts ->
"#URI<#{uri}>"
term, opts ->
Inspect.inspect(term, opts)
end
opts = [inspect_fun: fun]
assert inspect(1000, opts) == "<1000>"
assert inspect([1000], opts) == "[<1000>]"
uri = URI.parse("https://elixir-lang.org")
assert inspect(uri, opts) == "#URI<https://elixir-lang.org>"
assert inspect([uri], opts) == "[#URI<https://elixir-lang.org>]"
end
defmodule Nested do
defstruct nested: nil
defimpl Inspect do
import Inspect.Algebra
def inspect(%Nested{nested: nested}, opts) do
indent = Keyword.get(opts.custom_options, :indent, 2)
level = Keyword.get(opts.custom_options, :level, 1)
nested_str =
Kernel.inspect(nested, custom_options: [level: level + 1, indent: indent + 2])
concat(
nest(line("#Nested[##{level}/#{indent}]<", nested_str), indent),
nest(line("", ">"), indent - 2)
)
end
end
end
test "custom_options" do
assert inspect(%Nested{nested: %Nested{nested: 42}}) ==
"#Nested[#1/2]<\n #Nested[#2/4]<\n 42\n >\n>"
end
end
| 29.79972 | 98 | 0.562908 |
731616a727181b54ca420360a75c79348b8d8f59 | 418 | exs | Elixir | priv/repo/tenant_migrations/20170825154312_create_products.exs | dreamingechoes/multi_tenancex | cfe3feb6b7eb25559f9abaa4da89e4aafc9ad2ec | [
"MIT"
] | 30 | 2018-06-27T17:51:53.000Z | 2021-04-24T03:17:55.000Z | priv/repo/tenant_migrations/20170825154312_create_products.exs | dreamingechoes/multi_tenancex | cfe3feb6b7eb25559f9abaa4da89e4aafc9ad2ec | [
"MIT"
] | null | null | null | priv/repo/tenant_migrations/20170825154312_create_products.exs | dreamingechoes/multi_tenancex | cfe3feb6b7eb25559f9abaa4da89e4aafc9ad2ec | [
"MIT"
] | 7 | 2018-07-24T17:56:14.000Z | 2019-12-31T02:10:13.000Z | defmodule MultiTenancex.Repo.Migrations.CreateProducts do
use Ecto.Migration
def change do
create table(:products) do
add(:name, :string)
add(:description, :string)
add(:image, :string)
add(:price, :float)
add(:units, :integer)
add(:company_id, references(:companies, on_delete: :nothing))
timestamps()
end
create(index(:products, [:company_id]))
end
end
| 22 | 67 | 0.648325 |
7316203921aebc33a5cddb36ae8724d4eb487426 | 5,958 | ex | Elixir | lib/glimesh_web/controllers/user_auth.ex | Heiwa1580/glimesh.tv | c5e1ed4d1011b4e2a54c173d142e7eb857457477 | [
"MIT"
] | 1 | 2020-08-02T00:12:28.000Z | 2020-08-02T00:12:28.000Z | lib/glimesh_web/controllers/user_auth.ex | Heiwa1580/glimesh.tv | c5e1ed4d1011b4e2a54c173d142e7eb857457477 | [
"MIT"
] | null | null | null | lib/glimesh_web/controllers/user_auth.ex | Heiwa1580/glimesh.tv | c5e1ed4d1011b4e2a54c173d142e7eb857457477 | [
"MIT"
] | null | null | null | defmodule GlimeshWeb.UserAuth do
import Plug.Conn
import Phoenix.Controller
alias Glimesh.Accounts
alias GlimeshWeb.Endpoint
alias GlimeshWeb.Router.Helpers, as: Routes
# Make the remember me cookie valid for 60 days.
# If you want bump or reduce this value, also change
# the token expiry itself in UserToken.
@max_age 60 * 60 * 24 * 60
@remember_me_cookie "user_remember_me"
@remember_me_options [sign: true, max_age: @max_age]
@doc """
Logs the user in.
It renews the session ID and clears the whole session
to avoid fixation attacks. See the renew_session
function to customize this behaviour.
It also sets a `:live_socket_id` key in the session,
so LiveView sessions are identified and automatically
disconnected on log out. The line can be safely removed
if you are not using LiveView.
"""
def log_in_user(conn, user, params \\ %{}) do
token = Accounts.generate_user_session_token(user)
user_return_to = get_session(conn, :user_return_to)
conn
|> renew_session()
|> put_session(:user_token, token)
|> put_session(:live_socket_id, "users_sessions:#{Base.url_encode64(token)}")
|> put_session(:locale, user.locale)
|> maybe_write_remember_me_cookie(token, params)
|> redirect(to: user_return_to || signed_in_path(conn))
end
defp maybe_write_remember_me_cookie(conn, token, %{"remember_me" => "true"}) do
put_resp_cookie(conn, @remember_me_cookie, token, @remember_me_options)
end
defp maybe_write_remember_me_cookie(conn, _token, _params) do
conn
end
# This function renews the session ID and erases the whole
# session to avoid fixation attacks. If there is any data
# in the session you may want to preserve after log in/log out,
# you must explicitly fetch the session data before clearing
# and then immediately set it after clearing, for example:
#
# defp renew_session(conn) do
# preferred_locale = get_session(conn, :preferred_locale)
#
# conn
# |> configure_session(renew: true)
# |> clear_session()
# |> put_session(:preferred_locale, preferred_locale)
# end
#
defp renew_session(conn) do
conn
|> configure_session(renew: true)
|> clear_session()
end
@doc """
Logs the user out.
It clears all session data for safety. See renew_session.
"""
def log_out_user(conn) do
user_token = get_session(conn, :user_token)
user_token && Accounts.delete_session_token(user_token)
if live_socket_id = get_session(conn, :live_socket_id) do
Endpoint.broadcast(live_socket_id, "disconnect", %{})
end
conn
|> renew_session()
|> delete_resp_cookie(@remember_me_cookie)
|> redirect(to: "/")
end
def ban_user(conn) do
user_token = get_session(conn, :user_token)
user_token && Accounts.delete_session_token(user_token)
if live_socket_id = get_session(conn, :live_socket_id) do
Endpoint.broadcast(live_socket_id, "disconnect", %{})
end
conn
|> renew_session()
|> delete_resp_cookie(@remember_me_cookie)
end
@doc """
Authenticates the user by looking into the session
and remember me token.
"""
def fetch_current_user(conn, _opts) do
{user_token, conn} = ensure_user_token(conn)
user = user_token && Accounts.get_user_by_session_token(user_token)
assign(conn, :current_user, user)
end
defp ensure_user_token(conn) do
if user_token = get_session(conn, :user_token) do
{user_token, conn}
else
conn = fetch_cookies(conn, signed: [@remember_me_cookie])
if user_token = conn.cookies[@remember_me_cookie] do
{user_token, put_session(conn, :user_token, user_token)}
else
{nil, conn}
end
end
end
@doc """
Used for routes that require the user to not be authenticated.
"""
def redirect_if_user_is_authenticated(conn, _opts) do
if conn.assigns[:current_user] do
conn
|> redirect(to: signed_in_path(conn))
|> halt()
else
conn
end
end
@doc """
Used for routes that require the user to be authenticated.
If you want to enforce the user e-mail is confirmed before
they use the application at all, here would be a good place.
"""
def require_authenticated_user(conn, _opts) do
if conn.assigns[:current_user] do
conn
else
conn
|> put_flash(:error, "You must log in to access this page.")
|> maybe_store_return_to()
|> redirect(to: Routes.user_session_path(conn, :new))
|> halt()
end
end
@doc """
Used for routes that require the user to have a channel.
"""
def require_user_has_channel(conn, _opts) do
if Map.has_key?(conn.assigns, :current_user) and
Glimesh.Streams.get_channel_for_user(conn.assigns[:current_user]) do
conn
else
conn
|> put_flash(:error, "You must have a channel to access this page.")
|> redirect(to: Routes.user_settings_path(conn, :stream))
|> halt()
end
end
@doc """
Used for routes that require the user to be an administrator.
"""
def require_admin_user(conn, _opts) do
if conn.assigns[:current_user] && conn.assigns[:current_user].is_admin do
conn
else
conn
|> put_flash(:error, "You must be an administrator to access this page.")
|> maybe_store_return_to()
|> redirect(to: Routes.user_session_path(conn, :new))
|> halt()
end
end
@doc """
Used for api routes that require authentication.
"""
def require_authenticated_user_api(conn, _opts) do
if conn.assigns[:current_user] do
conn
else
conn
|> json(%{error: "You must be logged in to access the api"})
|> halt()
end
end
defp maybe_store_return_to(%{method: "GET", request_path: request_path} = conn) do
put_session(conn, :user_return_to, request_path)
end
defp maybe_store_return_to(conn), do: conn
defp signed_in_path(_conn), do: "/"
end
| 28.644231 | 84 | 0.684794 |
73162c5df0e65da06483553dc192089ba19ed234 | 569 | ex | Elixir | apps/ewallet/lib/ewallet/policies/transaction_request_policy.ex | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | null | null | null | apps/ewallet/lib/ewallet/policies/transaction_request_policy.ex | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | null | null | null | apps/ewallet/lib/ewallet/policies/transaction_request_policy.ex | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | null | null | null | defmodule EWallet.TransactionRequestPolicy do
@moduledoc """
The authorization policy for accounts.
"""
@behaviour Bodyguard.Policy
alias EWallet.WalletPolicy
alias EWalletDB.Wallet
def authorize(:all, _admin_user_or_key, nil), do: true
def authorize(:get, _params, _request) do
true
end
# Check with the passed attributes if the current accessor can
# create a request for the account
def authorize(:create, params, %Wallet{} = wallet) do
WalletPolicy.authorize(:admin, params, wallet)
end
def authorize(_, _, _), do: false
end
| 24.73913 | 64 | 0.72935 |
731647e3c6f38d17136bf97ebd2ff831daede3b4 | 1,800 | ex | Elixir | lib/miss/list.ex | zekus/miss-elixir | 675107573b30b9f1843930be980c72f755503678 | [
"Apache-2.0"
] | 40 | 2020-10-05T13:02:56.000Z | 2021-09-17T05:45:11.000Z | lib/miss/list.ex | zekus/miss-elixir | 675107573b30b9f1843930be980c72f755503678 | [
"Apache-2.0"
] | 3 | 2020-10-05T08:15:16.000Z | 2022-01-25T14:10:42.000Z | lib/miss/list.ex | zekus/miss-elixir | 675107573b30b9f1843930be980c72f755503678 | [
"Apache-2.0"
] | 4 | 2020-10-04T04:45:47.000Z | 2022-03-16T17:36:33.000Z | defmodule Miss.List do
@moduledoc """
Functions to extend the Elixir `List` module.
"""
@doc """
Returns a list containing only the elements that `list1` and `list2` have in common.
`Miss.List.intersection/2` uses the [list subtraction operator](https://hexdocs.pm/elixir/Kernel.html#--/2)
that before Erlang/OTP 22 it would be very slow if both lists to intersect are long. In such
cases, consider converting each list to a `MapSet`, using `MapSet.intersection/2`, and
converting back to a list.
As of Erlang/OTP 22, this list subtraction operation is significantly faster even if both lists
are very long, that means `Miss.List.intersection/2` is usually faster and uses less memory than
using the MapSet-based alternative mentioned above.
That is also mentioned in the [Erlang Efficiency Guide](https://erlang.org/doc/efficiency_guide/retired_myths.html#myth--list-subtraction-------operator--is-slow):
> List subtraction used to have a run-time complexity proportional to the product of the length
> of its operands, so it was extremely slow when both lists were long.
>
> As of OTP 22 the run-time complexity is "n log n" and the operation will complete quickly even
> when both lists are very long. In fact, it is faster and uses less memory than the commonly
> used workaround to convert both lists to ordered sets before subtracting them with
>`ordsets:subtract/2`.
## Examples
iex> Miss.List.intersection([1, 2, 3, 4, 5], [3, 4, 5, 6, 7])
[3, 4, 5]
iex> Miss.List.intersection([4, 2, 5, 3, 1], [12, 1, 9, 5, 0])
[5, 1]
iex> Miss.List.intersection([1, 2, 3, 4, 5], [6, 7, 8, 9, 0])
[]
"""
@spec intersection(list(), list()) :: list()
def intersection(list1, list2), do: list1 -- list1 -- list2
end
| 41.860465 | 165 | 0.696667 |
73165b667fb845fb5b5d1ca74e8a91bb80ceec4c | 211 | exs | Elixir | priv/repo/migrations/20210503080631_add_collect_folder_to_article_collect_record.exs | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 240 | 2018-11-06T09:36:54.000Z | 2022-02-20T07:12:36.000Z | priv/repo/migrations/20210503080631_add_collect_folder_to_article_collect_record.exs | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 363 | 2018-07-11T03:38:14.000Z | 2021-12-14T01:42:40.000Z | priv/repo/migrations/20210503080631_add_collect_folder_to_article_collect_record.exs | mydearxym/mastani_server | f24034a4a5449200165cf4a547964a0961793eab | [
"Apache-2.0"
] | 22 | 2019-01-27T11:47:56.000Z | 2021-02-28T13:17:52.000Z | defmodule GroupherServer.Repo.Migrations.AddCollectFolderToArticleCollectRecord do
use Ecto.Migration
def change do
alter table(:article_collects) do
add(:collect_folders, :map)
end
end
end
| 21.1 | 82 | 0.767773 |
7316b811ace49a583c07a6787e349312cf4bba12 | 1,255 | ex | Elixir | lib/web_driver_client/json_wire_protocol_client/commands/delete_cookies.ex | fimassuda/web_driver_client | 09d373c9a8a923c5e2860f107f84b16565e338f7 | [
"MIT"
] | 8 | 2019-11-24T18:33:12.000Z | 2020-12-09T10:20:09.000Z | lib/web_driver_client/json_wire_protocol_client/commands/delete_cookies.ex | fimassuda/web_driver_client | 09d373c9a8a923c5e2860f107f84b16565e338f7 | [
"MIT"
] | 67 | 2019-12-20T16:33:30.000Z | 2021-09-14T03:50:10.000Z | lib/web_driver_client/json_wire_protocol_client/commands/delete_cookies.ex | fimassuda/web_driver_client | 09d373c9a8a923c5e2860f107f84b16565e338f7 | [
"MIT"
] | 10 | 2020-06-19T16:15:03.000Z | 2021-09-13T17:56:25.000Z | defmodule WebDriverClient.JSONWireProtocolClient.Commands.DeleteCookies do
@moduledoc false
alias WebDriverClient.Config
alias WebDriverClient.ConnectionError
alias WebDriverClient.HTTPResponse
alias WebDriverClient.JSONWireProtocolClient.ResponseParser
alias WebDriverClient.JSONWireProtocolClient.TeslaClientBuilder
alias WebDriverClient.JSONWireProtocolClient.UnexpectedResponseError
alias WebDriverClient.JSONWireProtocolClient.WebDriverError
alias WebDriverClient.Session
@spec send_request(Session.t()) :: {:ok, HTTPResponse.t()} | {:error, ConnectionError.t()}
def send_request(%Session{id: id, config: %Config{} = config}) do
client = TeslaClientBuilder.build_simple(config)
case Tesla.delete(client, "/session/#{id}/cookie") do
{:ok, env} ->
{:ok, HTTPResponse.build(env)}
{:error, reason} ->
{:error, reason}
end
end
@spec parse_response(HTTPResponse.t()) ::
:ok | {:error, UnexpectedResponseError.t() | WebDriverError.t()}
def parse_response(%HTTPResponse{} = http_response) do
with {:ok, jwp_response} <- ResponseParser.parse_response(http_response),
:ok <- ResponseParser.ensure_successful_jwp_status(jwp_response) do
:ok
end
end
end
| 35.857143 | 92 | 0.741036 |
7316d154b76f47f53025c1618d6f4b9ab350ab23 | 1,869 | exs | Elixir | clients/workflows/mix.exs | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/workflows/mix.exs | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/workflows/mix.exs | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Workflows.Mixfile do
use Mix.Project
@version "0.4.0"
def project() do
[
app: :google_api_workflows,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/workflows"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.4"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Workflows API client library. Manage workflow definitions. To execute workflows and manage executions, see the Workflows Executions API.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/workflows",
"Homepage" => "https://cloud.google.com/workflows"
}
]
end
end
| 27.895522 | 140 | 0.659176 |
7316df8a8f7956f0fdb8f6eddf0b2403a65877b4 | 288 | ex | Elixir | apps/ello_events/lib/ello_events/application.ex | ello/apex | 4acb096b3ce172ff4ef9a51e5d068d533007b920 | [
"MIT"
] | 16 | 2017-06-21T21:31:20.000Z | 2021-05-09T03:23:26.000Z | apps/ello_events/lib/ello_events/application.ex | ello/apex | 4acb096b3ce172ff4ef9a51e5d068d533007b920 | [
"MIT"
] | 25 | 2017-06-07T12:18:28.000Z | 2018-06-08T13:27:43.000Z | apps/ello_events/lib/ello_events/application.ex | ello/apex | 4acb096b3ce172ff4ef9a51e5d068d533007b920 | [
"MIT"
] | 3 | 2018-06-14T15:34:07.000Z | 2022-02-28T21:06:13.000Z | defmodule Ello.Events.Application do
@moduledoc false
use Application
def start(_type, _args) do
import Supervisor.Spec, warn: false
children = [
]
opts = [strategy: :one_for_one, name: Ello.Events.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 18 | 65 | 0.701389 |
7316fef6695eb90ec1747768bf49661681bba441 | 7,838 | ex | Elixir | clients/classroom/lib/google_api/classroom/v1/model/course_work.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/classroom/lib/google_api/classroom/v1/model/course_work.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/classroom/lib/google_api/classroom/v1/model/course_work.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Classroom.V1.Model.CourseWork do
@moduledoc """
Course work created by a teacher for students of the course.
## Attributes
* `alternateLink` (*type:* `String.t`, *default:* `nil`) - Absolute link to this course work in the Classroom web UI. This is only populated if `state` is `PUBLISHED`. Read-only.
* `assigneeMode` (*type:* `String.t`, *default:* `nil`) - Assignee mode of the coursework. If unspecified, the default value is `ALL_STUDENTS`.
* `assignment` (*type:* `GoogleApi.Classroom.V1.Model.Assignment.t`, *default:* `nil`) - Assignment details. This is populated only when `work_type` is `ASSIGNMENT`. Read-only.
* `associatedWithDeveloper` (*type:* `boolean()`, *default:* `nil`) - Whether this course work item is associated with the Developer Console project making the request. See CreateCourseWork for more details. Read-only.
* `courseId` (*type:* `String.t`, *default:* `nil`) - Identifier of the course. Read-only.
* `creationTime` (*type:* `DateTime.t`, *default:* `nil`) - Timestamp when this course work was created. Read-only.
* `creatorUserId` (*type:* `String.t`, *default:* `nil`) - Identifier for the user that created the coursework. Read-only.
* `description` (*type:* `String.t`, *default:* `nil`) - Optional description of this course work. If set, the description must be a valid UTF-8 string containing no more than 30,000 characters.
* `dueDate` (*type:* `GoogleApi.Classroom.V1.Model.Date.t`, *default:* `nil`) - Optional date, in UTC, that submissions for this course work are due. This must be specified if `due_time` is specified.
* `dueTime` (*type:* `GoogleApi.Classroom.V1.Model.TimeOfDay.t`, *default:* `nil`) - Optional time of day, in UTC, that submissions for this course work are due. This must be specified if `due_date` is specified.
* `gradeCategory` (*type:* `GoogleApi.Classroom.V1.Model.GradeCategory.t`, *default:* `nil`) - The category that this coursework's grade contributes to. Present only when a category has been chosen for the coursework. May be used in calculating the overall grade. Read-only.
* `id` (*type:* `String.t`, *default:* `nil`) - Classroom-assigned identifier of this course work, unique per course. Read-only.
* `individualStudentsOptions` (*type:* `GoogleApi.Classroom.V1.Model.IndividualStudentsOptions.t`, *default:* `nil`) - Identifiers of students with access to the coursework. This field is set only if `assigneeMode` is `INDIVIDUAL_STUDENTS`. If the `assigneeMode` is `INDIVIDUAL_STUDENTS`, then only students specified in this field are assigned the coursework.
* `materials` (*type:* `list(GoogleApi.Classroom.V1.Model.Material.t)`, *default:* `nil`) - Additional materials. CourseWork must have no more than 20 material items.
* `maxPoints` (*type:* `float()`, *default:* `nil`) - Maximum grade for this course work. If zero or unspecified, this assignment is considered ungraded. This must be a non-negative integer value.
* `multipleChoiceQuestion` (*type:* `GoogleApi.Classroom.V1.Model.MultipleChoiceQuestion.t`, *default:* `nil`) - Multiple choice question details. For read operations, this field is populated only when `work_type` is `MULTIPLE_CHOICE_QUESTION`. For write operations, this field must be specified when creating course work with a `work_type` of `MULTIPLE_CHOICE_QUESTION`, and it must not be set otherwise.
* `scheduledTime` (*type:* `DateTime.t`, *default:* `nil`) - Optional timestamp when this course work is scheduled to be published.
* `state` (*type:* `String.t`, *default:* `nil`) - Status of this course work. If unspecified, the default state is `DRAFT`.
* `submissionModificationMode` (*type:* `String.t`, *default:* `nil`) - Setting to determine when students are allowed to modify submissions. If unspecified, the default value is `MODIFIABLE_UNTIL_TURNED_IN`.
* `title` (*type:* `String.t`, *default:* `nil`) - Title of this course work. The title must be a valid UTF-8 string containing between 1 and 3000 characters.
* `topicId` (*type:* `String.t`, *default:* `nil`) - Identifier for the topic that this coursework is associated with. Must match an existing topic in the course.
* `updateTime` (*type:* `DateTime.t`, *default:* `nil`) - Timestamp of the most recent change to this course work. Read-only.
* `workType` (*type:* `String.t`, *default:* `nil`) - Type of this course work. The type is set when the course work is created and cannot be changed.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:alternateLink => String.t() | nil,
:assigneeMode => String.t() | nil,
:assignment => GoogleApi.Classroom.V1.Model.Assignment.t() | nil,
:associatedWithDeveloper => boolean() | nil,
:courseId => String.t() | nil,
:creationTime => DateTime.t() | nil,
:creatorUserId => String.t() | nil,
:description => String.t() | nil,
:dueDate => GoogleApi.Classroom.V1.Model.Date.t() | nil,
:dueTime => GoogleApi.Classroom.V1.Model.TimeOfDay.t() | nil,
:gradeCategory => GoogleApi.Classroom.V1.Model.GradeCategory.t() | nil,
:id => String.t() | nil,
:individualStudentsOptions =>
GoogleApi.Classroom.V1.Model.IndividualStudentsOptions.t() | nil,
:materials => list(GoogleApi.Classroom.V1.Model.Material.t()) | nil,
:maxPoints => float() | nil,
:multipleChoiceQuestion =>
GoogleApi.Classroom.V1.Model.MultipleChoiceQuestion.t() | nil,
:scheduledTime => DateTime.t() | nil,
:state => String.t() | nil,
:submissionModificationMode => String.t() | nil,
:title => String.t() | nil,
:topicId => String.t() | nil,
:updateTime => DateTime.t() | nil,
:workType => String.t() | nil
}
field(:alternateLink)
field(:assigneeMode)
field(:assignment, as: GoogleApi.Classroom.V1.Model.Assignment)
field(:associatedWithDeveloper)
field(:courseId)
field(:creationTime, as: DateTime)
field(:creatorUserId)
field(:description)
field(:dueDate, as: GoogleApi.Classroom.V1.Model.Date)
field(:dueTime, as: GoogleApi.Classroom.V1.Model.TimeOfDay)
field(:gradeCategory, as: GoogleApi.Classroom.V1.Model.GradeCategory)
field(:id)
field(:individualStudentsOptions, as: GoogleApi.Classroom.V1.Model.IndividualStudentsOptions)
field(:materials, as: GoogleApi.Classroom.V1.Model.Material, type: :list)
field(:maxPoints)
field(:multipleChoiceQuestion, as: GoogleApi.Classroom.V1.Model.MultipleChoiceQuestion)
field(:scheduledTime, as: DateTime)
field(:state)
field(:submissionModificationMode)
field(:title)
field(:topicId)
field(:updateTime, as: DateTime)
field(:workType)
end
defimpl Poison.Decoder, for: GoogleApi.Classroom.V1.Model.CourseWork do
def decode(value, options) do
GoogleApi.Classroom.V1.Model.CourseWork.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Classroom.V1.Model.CourseWork do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 68.156522 | 409 | 0.703751 |
731700fdb879a42143069f5f33708cbddbf22cf3 | 3,923 | ex | Elixir | lib/new_relic/harvest/collector/agent_run.ex | TheRealReal/elixir_agent | db757b24543c3d213495435e71340897f9455d16 | [
"Apache-2.0"
] | null | null | null | lib/new_relic/harvest/collector/agent_run.ex | TheRealReal/elixir_agent | db757b24543c3d213495435e71340897f9455d16 | [
"Apache-2.0"
] | null | null | null | lib/new_relic/harvest/collector/agent_run.ex | TheRealReal/elixir_agent | db757b24543c3d213495435e71340897f9455d16 | [
"Apache-2.0"
] | null | null | null | defmodule NewRelic.Harvest.Collector.AgentRun do
use GenServer
# This GenServer is responsible for connecting to the collector
# and holding onto the Agent Run state
@moduledoc false
alias NewRelic.Harvest.Collector
def start_link do
GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
end
def init(:ok) do
NewRelic.sample_process()
:ets.new(__MODULE__, [:named_table, :public, :set])
if NewRelic.Config.enabled?() do
case Collector.Protocol.preconnect() do
{:error, _reason} ->
:ignore
%{"redirect_host" => redirect_host} ->
Application.put_env(:new_relic_agent, :collector_instance_host, redirect_host)
send(self(), :connect)
end
end
{:ok, :unknown}
end
def agent_run_id, do: lookup(:agent_run_id)
def trusted_account_key, do: lookup(:trusted_account_key)
def account_id, do: lookup(:account_id)
def primary_application_id, do: lookup(:primary_application_id)
def reconnect, do: send(__MODULE__, :connect)
def handle_info(:connect, _state) do
state =
connect_payload()
|> Collector.Protocol.connect()
|> parse_connect
store_agent_run(state)
{:noreply, state}
end
def handle_call(:connected, _from, state) do
{:reply, true, state}
end
def connect_payload do
[
%{
language: "elixir",
pid: NewRelic.Util.pid(),
host: NewRelic.Util.hostname(),
app_name: NewRelic.Config.app_name(),
utilization: NewRelic.Util.utilization(),
environment: NewRelic.Util.elixir_environment(),
agent_version: NewRelic.Config.agent_version()
}
]
end
defp store_agent_run(%{"agent_run_id" => _} = state) do
store(:agent_run_id, state["agent_run_id"])
store(:trusted_account_key, state["trusted_account_key"])
store(:account_id, state["account_id"])
store(:primary_application_id, state["primary_application_id"])
store(:sampling_target, state["sampling_target"])
store(:sampling_target_period, state["sampling_target_period_in_seconds"] * 1000)
transaction_event = state["data_methods"]["analytic_event_data"]
store(:transaction_event_reservoir_size, transaction_event["max_samples_stored"])
store(:transaction_event_harvest_cycle, transaction_event["report_period_in_seconds"] * 1000)
custom_event = state["data_methods"]["custom_event_data"]
store(:custom_event_reservoir_size, custom_event["max_samples_stored"])
store(:custom_event_harvest_cycle, custom_event["report_period_in_seconds"] * 1000)
error_event = state["data_methods"]["error_event_data"]
store(:error_event_reservoir_size, error_event["max_samples_stored"])
store(:error_event_harvest_cycle, error_event["report_period_in_seconds"] * 1000)
span_event = state["data_methods"]["span_event_data"]
store(:span_event_reservoir_size, span_event["max_samples_stored"])
store(:span_event_harvest_cycle, span_event["report_period_in_seconds"] * 1000)
store(:data_report_period, state["data_report_period"] * 1000)
end
defp store_agent_run(_error), do: :ignore
defp parse_connect(
%{"agent_run_id" => _, "messages" => [%{"message" => message}]} = connect_response
) do
NewRelic.log(:info, message)
connect_response
end
defp parse_connect(%{"error_type" => _, "message" => message}) do
NewRelic.log(:error, message)
:error
end
defp parse_connect({:error, reason}) do
NewRelic.log(:error, "Failed connect #{inspect(reason)}")
:error
end
defp parse_connect(503) do
NewRelic.log(:error, "Collector unavailable")
:error
end
def store(key, value) do
:ets.insert(__MODULE__, {key, value})
end
def lookup(key) do
Application.get_env(:new_relic_agent, key) ||
case :ets.lookup(__MODULE__, key) do
[{^key, value}] -> value
[] -> nil
end
end
end
| 29.496241 | 97 | 0.695131 |
731715c75d2c5c28ba65eb391f4f3117f39df7c4 | 464 | exs | Elixir | priv/repo/migrations/20170808125429_create_user.exs | deadmp/core | 4c244af3326f362ac6411e8996fb1cff0b5a29e6 | [
"MIT"
] | null | null | null | priv/repo/migrations/20170808125429_create_user.exs | deadmp/core | 4c244af3326f362ac6411e8996fb1cff0b5a29e6 | [
"MIT"
] | null | null | null | priv/repo/migrations/20170808125429_create_user.exs | deadmp/core | 4c244af3326f362ac6411e8996fb1cff0b5a29e6 | [
"MIT"
] | null | null | null | defmodule Core.Repo.Migrations.CreateUser do
@moduledoc """
Create user table
"""
use Ecto.Migration
def change do
create table(:users) do
add :oid, :binary_id
add :email, :string
add :nick, :string
add :password, :string
add :is_admin, :boolean, default: false
timestamps()
end
create index(:users, [:nick])
create unique_index(:users, [:email])
create unique_index(:users, [:oid])
end
end
| 18.56 | 45 | 0.625 |
731728805efbd1eade2f6ebe7afd6432b468b030 | 304 | ex | Elixir | lib/plotex/output/formatters/numeric_formatter.ex | k-cross/plotex | 5b5634bf47d677172e7c8da4877ebe38f62d16a1 | [
"Apache-2.0"
] | null | null | null | lib/plotex/output/formatters/numeric_formatter.ex | k-cross/plotex | 5b5634bf47d677172e7c8da4877ebe38f62d16a1 | [
"Apache-2.0"
] | null | null | null | lib/plotex/output/formatters/numeric_formatter.ex | k-cross/plotex | 5b5634bf47d677172e7c8da4877ebe38f62d16a1 | [
"Apache-2.0"
] | null | null | null |
defmodule Plotex.Output.Formatter.NumericDefault do
defstruct precision: 8, decimals: 2
end
defimpl Plotex.Output.Formatter, for: Plotex.Output.Formatter.NumericDefault do
def output(formatter, _axis, val) do
:io_lib.format("~#{formatter.precision}.#{formatter.decimals}f", [val])
end
end
| 25.333333 | 79 | 0.756579 |
731731e8e815b95a94612eada1c49d6b1782bbde | 2,011 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/target_vpn_gateway_list_warning_data.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/target_vpn_gateway_list_warning_data.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/compute/lib/google_api/compute/v1/model/target_vpn_gateway_list_warning_data.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Model.TargetVpnGatewayListWarningData do
@moduledoc """
## Attributes
* `key` (*type:* `String.t`, *default:* `nil`) - [Output Only] A key that provides more detail on the warning being returned. For example, for warnings where there are no results in a list request for a particular zone, this key might be scope and the key value might be the zone name. Other examples might be a key indicating a deprecated resource and a suggested replacement, or a warning about invalid network settings (for example, if an instance attempts to perform IP forwarding but is not enabled for IP forwarding).
* `value` (*type:* `String.t`, *default:* `nil`) - [Output Only] A warning data value corresponding to the key.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:key => String.t(),
:value => String.t()
}
field(:key)
field(:value)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.TargetVpnGatewayListWarningData do
def decode(value, options) do
GoogleApi.Compute.V1.Model.TargetVpnGatewayListWarningData.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.TargetVpnGatewayListWarningData do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.22 | 527 | 0.742914 |
73175064ff528d1805c1f5197930538726f9dc67 | 921 | ex | Elixir | test/support/channel_case.ex | Mehonoshin/pmj | d16e6af7f136cb69c9008e55886ef4619974fc1e | [
"MIT"
] | 1 | 2020-01-12T20:08:37.000Z | 2020-01-12T20:08:37.000Z | test/support/channel_case.ex | Mehonoshin/pmj | d16e6af7f136cb69c9008e55886ef4619974fc1e | [
"MIT"
] | null | null | null | test/support/channel_case.ex | Mehonoshin/pmj | d16e6af7f136cb69c9008e55886ef4619974fc1e | [
"MIT"
] | null | null | null | defmodule PmjWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common datastructures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
# The default endpoint for testing
@endpoint PmjWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Pmj.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Pmj.Repo, {:shared, self()})
end
:ok
end
end
| 24.236842 | 65 | 0.711183 |
73175390ac65432385c8eeca8b3ce68a247f3654 | 385 | ex | Elixir | lib/google_auth/supervisor.ex | DinaWork/google-auth | bd9bf156c9f2efeed874fc7af9f6b78e2be31be4 | [
"MIT"
] | null | null | null | lib/google_auth/supervisor.ex | DinaWork/google-auth | bd9bf156c9f2efeed874fc7af9f6b78e2be31be4 | [
"MIT"
] | null | null | null | lib/google_auth/supervisor.ex | DinaWork/google-auth | bd9bf156c9f2efeed874fc7af9f6b78e2be31be4 | [
"MIT"
] | null | null | null | defmodule GoogleAuth.Supervisor do
@moduledoc false
use Supervisor
alias GoogleAuth.Config
alias GoogleAuth.TokenStore
def start_link(envs) do
Supervisor.start_link(__MODULE__, envs, name: __MODULE__)
end
def init(envs) do
children = [
worker(Config, [envs]),
worker(TokenStore, [])
]
supervise(children, strategy: :one_for_one)
end
end
| 18.333333 | 61 | 0.698701 |
73179dde3baac5be5712dfded5449af23b9fe124 | 3,534 | exs | Elixir | test/recorder_httpc_test.exs | junsumida/exvcr | 0f8aeeb19e5164917302ce257a2a1c324e02ff3b | [
"MIT"
] | null | null | null | test/recorder_httpc_test.exs | junsumida/exvcr | 0f8aeeb19e5164917302ce257a2a1c324e02ff3b | [
"MIT"
] | null | null | null | test/recorder_httpc_test.exs | junsumida/exvcr | 0f8aeeb19e5164917302ce257a2a1c324e02ff3b | [
"MIT"
] | 1 | 2018-10-10T05:57:37.000Z | 2018-10-10T05:57:37.000Z | defmodule ExVCR.RecorderHttpcTest do
use ExUnit.Case, async: false
use ExVCR.Mock, adapter: ExVCR.Adapter.Httpc
@dummy_cassette_dir "tmp/vcr_tmp/vcr_cassettes_httpc"
@port 34001
@url 'http://localhost:#{@port}/server'
@url_with_query 'http://localhost:#{@port}/server?password=sample'
setup_all do
on_exit fn ->
File.rm_rf(@dummy_cassette_dir)
HttpServer.stop(@port)
:ok
end
Application.ensure_started(:inets)
HttpServer.start(path: "/server", port: @port, response: "test_response")
ExVCR.Config.cassette_library_dir(@dummy_cassette_dir)
:ok
end
test "forcefully getting response from server by removing json in advance" do
use_cassette "server1" do
{:ok, {_, _, body}} = :httpc.request(@url)
assert body =~ ~r/test_response/
end
end
test "forcefully getting response from server, then loading from cache by recording twice" do
use_cassette "server2" do
{:ok, {_, _, body}} = :httpc.request(@url)
assert body =~ ~r/test_response/
end
use_cassette "server2" do
{:ok, {_, _, body}} = :httpc.request(@url)
assert body =~ ~r/test_response/
end
end
test "replace sensitive data in body" do
ExVCR.Config.filter_sensitive_data("test_response", "PLACEHOLDER")
use_cassette "server_sensitive_data_in_body" do
{:ok, {_, _, body}} = :httpc.request(@url)
assert body =~ ~r/PLACEHOLDER/
end
ExVCR.Config.filter_sensitive_data(nil)
end
test "replace sensitive data in query " do
ExVCR.Config.filter_sensitive_data("password=[a-z]+", "password=***")
use_cassette "server_sensitive_data_in_query" do
{:ok, {_, _, body}} = :httpc.request(@url_with_query)
assert body =~ ~r/test_response/
end
# The recorded cassette should contain replaced data.
cassette = File.read!("#{@dummy_cassette_dir}/server_sensitive_data_in_query.json")
assert cassette =~ "password=***"
refute cassette =~ "password=sample"
ExVCR.Config.filter_sensitive_data(nil)
end
test "replace sensitive data in request header" do
ExVCR.Config.filter_request_headers("X-My-Secret-Token")
use_cassette "sensitive_data_in_request_header" do
{:ok, {_, _, body}} = :httpc.request(:get, {@url_with_query, [{'X-My-Secret-Token', 'my-secret-token'}]}, [], [])
assert body == "test_response"
end
# The recorded cassette should contain replaced data.
cassette = File.read!("#{@dummy_cassette_dir}/sensitive_data_in_request_header.json")
assert cassette =~ "\"X-My-Secret-Token\": \"***\""
refute cassette =~ "\"X-My-Secret-Token\": \"my-secret-token\""
ExVCR.Config.filter_request_headers(nil)
end
test "filter url param flag removes url params when recording cassettes" do
ExVCR.Config.filter_url_params(true)
use_cassette "example_ignore_url_params" do
{:ok, {_, _, body}} = :httpc.request('#{@url}?should_not_be_contained')
assert body =~ ~r/test_response/
end
json = File.read!("#{__DIR__}/../#{@dummy_cassette_dir}/example_ignore_url_params.json")
refute String.contains?(json, "should_not_be_contained")
ExVCR.Config.filter_url_params(false)
end
test "remove blacklisted headers" do
ExVCR.Config.response_headers_blacklist(["Date"])
use_cassette "remove_blacklisted_headers" do
{:ok, {_, headers, _}} = :httpc.request(@url)
assert headers == [{'server', 'Cowboy'}, {'content-length', '13'}]
end
ExVCR.Config.response_headers_blacklist([])
end
end
| 35.34 | 119 | 0.685342 |
7317b5d9840ce6eea5d60312a4ed551bddb79a0e | 435 | ex | Elixir | chessboard/lib/chessboard.ex | quatauta/exercism-elixir | 3635610f25111f2afd3bb748e86f9b478ec239b0 | [
"MIT"
] | 1 | 2022-01-23T20:34:09.000Z | 2022-01-23T20:34:09.000Z | chessboard/lib/chessboard.ex | quatauta/exercism-elixir | 3635610f25111f2afd3bb748e86f9b478ec239b0 | [
"MIT"
] | null | null | null | chessboard/lib/chessboard.ex | quatauta/exercism-elixir | 3635610f25111f2afd3bb748e86f9b478ec239b0 | [
"MIT"
] | null | null | null | defmodule Chessboard do
@moduledoc false
@type rank_range :: %Range{first: 1, last: 8, step: 1}
@type file_range :: %Range{first: ?A, last: ?H, step: 1}
@spec rank_range :: rank_range()
def rank_range, do: 1..8
@spec file_range :: file_range
def file_range, do: ?A..?H
@spec ranks :: list()
def ranks, do: Enum.to_list(rank_range())
@spec files :: [char()]
def files, do: Enum.map(file_range(), &<<&1>>)
end
| 22.894737 | 58 | 0.632184 |
7317c0bf48dcd8a547cd7ca76ee8ea3b2d267ff3 | 928 | ex | Elixir | lib/rocketpay/users/create.ex | henrique-tavares/rocketpay | 0c37071cd3e63dd51cae16d773522eb6a5de84d4 | [
"MIT"
] | null | null | null | lib/rocketpay/users/create.ex | henrique-tavares/rocketpay | 0c37071cd3e63dd51cae16d773522eb6a5de84d4 | [
"MIT"
] | null | null | null | lib/rocketpay/users/create.ex | henrique-tavares/rocketpay | 0c37071cd3e63dd51cae16d773522eb6a5de84d4 | [
"MIT"
] | null | null | null | defmodule Rocketpay.Users.Create do
alias Rocketpay.{Repo, User, Account}
alias Ecto.Multi
def call(params) do
Multi.new()
|> Multi.insert(:create_user, User.changeset(params))
|> Multi.run(:create_account, fn repo, %{create_user: user} ->
insert_account(repo, user.id)
end)
|> Multi.run(:preload_data, fn repo, %{create_user: user} ->
preload_data(repo, user)
end)
|> run_transaction()
end
defp insert_account(repo, user_id) do
user_id
|> account_changeset()
|> repo.insert()
end
defp account_changeset(user_id), do: Account.changeset(%{user_id: user_id, balance: "0.00"})
defp preload_data(repo, user) do
{:ok, repo.preload(user, :account)}
end
defp run_transaction(multi) do
case Repo.transaction(multi) do
{:error, _operaions, reason, _changes} -> {:error, reason}
{:ok, %{preload_data: user}} -> {:ok, user}
end
end
end
| 25.777778 | 94 | 0.653017 |
7317c42611570570a7f28475640ff263bb91d88a | 1,935 | exs | Elixir | mix.exs | mvanlamz/nflrushing | 7b796fe7633e54795dc3a60193cdb66c578988c7 | [
"MIT"
] | null | null | null | mix.exs | mvanlamz/nflrushing | 7b796fe7633e54795dc3a60193cdb66c578988c7 | [
"MIT"
] | null | null | null | mix.exs | mvanlamz/nflrushing | 7b796fe7633e54795dc3a60193cdb66c578988c7 | [
"MIT"
] | null | null | null | defmodule Nflrushing.MixProject do
use Mix.Project
def project do
[
app: :nflrushing,
version: "0.1.0",
elixir: "~> 1.7",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Nflrushing.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.5.7"},
{:phoenix_ecto, "~> 4.1"},
{:ecto_sql, "~> 3.4"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 2.11"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:phoenix_live_dashboard, "~> 0.4"},
{:telemetry_metrics, "~> 0.4"},
{:telemetry_poller, "~> 0.4"},
{:gettext, "~> 0.11"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"},
{:nimble_csv, "~> 1.1.0"},
{:mix_test_watch, "~> 1.0.2", only: :dev, runtime: false}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to install project dependencies and perform other setup tasks, run:
#
# $ mix setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
setup: ["deps.get", "ecto.setup", "cmd npm install --prefix assets"],
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate --quiet", "test"]
]
end
end
| 28.455882 | 84 | 0.577778 |
731821f1a3e226d4aede1911587f16f0f850a865 | 1,829 | exs | Elixir | test/unit/configuration/repo_test.exs | gabheadz/annon.api | 9921d98e44c8472c133554dd55ea649c0f55726c | [
"MIT"
] | 328 | 2017-05-05T15:19:46.000Z | 2022-03-11T10:52:45.000Z | test/unit/configuration/repo_test.exs | gabheadz/annon.api | 9921d98e44c8472c133554dd55ea649c0f55726c | [
"MIT"
] | 83 | 2017-04-30T10:36:15.000Z | 2019-10-14T13:14:34.000Z | test/unit/configuration/repo_test.exs | gabheadz/annon.api | 9921d98e44c8472c133554dd55ea649c0f55726c | [
"MIT"
] | 29 | 2017-05-02T14:36:50.000Z | 2021-09-03T13:36:17.000Z | defmodule Annon.Configuration.RepoTest do
@moduledoc false
use ExUnit.Case, async: true
alias Annon.Configuration.Repo
setup do
%{config: [
database: "db",
username: "name",
password: "pwd",
hostname: "host",
port: "port",
]}
end
test "CONFIGURATION_DATABASE_URL environment variable is overriding defaults", %{config: config} do
System.put_env("CONFIGURATION_DATABASE_URL", "postgres://my_user:password@pghost:1234/db_name")
on_exit(fn ->
System.delete_env("CONFIGURATION_DATABASE_URL")
end)
assert {:ok, [
username: "my_user",
password: "password",
database: "db_name",
hostname: "pghost",
port: 1234
]} = Repo.init(Repo, config)
end
test "raises when database name is not set", %{config: config} do
assert_raise RuntimeError, "Set DB_NAME environment variable!", fn ->
Repo.init(Repo, Keyword.delete(config, :database))
end
end
test "raises when database username is not set", %{config: config} do
assert_raise RuntimeError, "Set DB_USER environment variable!", fn ->
Repo.init(Repo, Keyword.delete(config, :username))
end
end
test "raises when database password is not set", %{config: config} do
assert_raise RuntimeError, "Set DB_PASSWORD environment variable!", fn ->
Repo.init(Repo, Keyword.delete(config, :password))
end
end
test "raises when database host is not set", %{config: config} do
assert_raise RuntimeError, "Set DB_HOST environment variable!", fn ->
Repo.init(Repo, Keyword.delete(config, :hostname))
end
end
test "raises when database port is not set", %{config: config} do
assert_raise RuntimeError, "Set DB_PORT environment variable!", fn ->
Repo.init(Repo, Keyword.delete(config, :port))
end
end
end
| 29.983607 | 101 | 0.674139 |
731832a2a8d387d494ebfd00287a0ae7caca6b64 | 651 | ex | Elixir | chapter6/queue/lib/queue.ex | mCodex/rocketseat-ignite-elixir | bdb48db778c36b2325c75a41b4d6f7ef77b03cf5 | [
"MIT"
] | 1 | 2021-07-23T19:48:27.000Z | 2021-07-23T19:48:27.000Z | chapter6/queue/lib/queue.ex | mCodex/rocketseat-ignite-elixir | bdb48db778c36b2325c75a41b4d6f7ef77b03cf5 | [
"MIT"
] | null | null | null | chapter6/queue/lib/queue.ex | mCodex/rocketseat-ignite-elixir | bdb48db778c36b2325c75a41b4d6f7ef77b03cf5 | [
"MIT"
] | null | null | null | defmodule Queue do
use GenServer
def start_link(initial_stack) when is_list(initial_stack) do
GenServer.start_link(__MODULE__, initial_stack)
end
def enqueue(pid, element) do
GenServer.cast(pid, {:enqueue, element})
end
def dequeue(pid) do
GenServer.call(pid, :dequeue)
end
@impl true
def init(stack) do
{:ok, stack}
end
@impl true
def handle_cast({:enqueue, element}, stack) do
{:noreply, stack ++ [element]}
end
@impl true
def handle_call(:dequeue, _from, [head | tail]) do
{:reply, head, tail}
end
@impl true
def handle_call(:dequeue, _from, []) do
{:reply, nil, []}
end
end
| 18.083333 | 62 | 0.65745 |
731835fdc3c9ebf50054adb67de88966d49d5bb5 | 818 | ex | Elixir | elixir/little-elixir-otp/mp3_id3_reader/reader.ex | marcinbiegun/exercises | 36ad942e8d40d6471136326a3f6d09285bbd90aa | [
"MIT"
] | 1 | 2018-12-11T14:09:14.000Z | 2018-12-11T14:09:14.000Z | elixir/little-elixir-otp/mp3_id3_reader/reader.ex | marcinbiegun/exercises | 36ad942e8d40d6471136326a3f6d09285bbd90aa | [
"MIT"
] | null | null | null | elixir/little-elixir-otp/mp3_id3_reader/reader.ex | marcinbiegun/exercises | 36ad942e8d40d6471136326a3f6d09285bbd90aa | [
"MIT"
] | null | null | null | defmodule ID3Parser do
def parse(filepath) do
case File.read(filepath) do
{:ok, content} ->
read_id3(content)
_ ->
IO.puts "Unable to open file #{filepath}"
end
end
defp read_id3(content) do
music_byte_size = byte_size(content) - 128
<< _music_content :: binary-size(music_byte_size), id3_content :: binary >> = content
<<
"TAG",
title :: binary-size(30),
artist :: binary-size(30),
album :: binary-size(30),
year :: binary-size(4),
_rest :: binary
>> = id3_content
%{
title: title |> to_utf8,
artist: artist |> to_utf8,
album: album |> to_utf8,
year: year |> to_utf8
}
end
end
ID3Parser.parse("assets/sample.mp3")
|> Enum.map(fn {key, value} ->
IO.puts("#{key}: #{value}")
end)
| 22.108108 | 89 | 0.572127 |
731852612d4a4b67c96b7c8328b656edd51ad07a | 499 | ex | Elixir | test/support/unit_test_handler.ex | ckreiling/stargate | 76f7d8a95314a842ac9340739fbd1f1f142ab383 | [
"Apache-2.0"
] | null | null | null | test/support/unit_test_handler.ex | ckreiling/stargate | 76f7d8a95314a842ac9340739fbd1f1f142ab383 | [
"Apache-2.0"
] | null | null | null | test/support/unit_test_handler.ex | ckreiling/stargate | 76f7d8a95314a842ac9340739fbd1f1f142ab383 | [
"Apache-2.0"
] | null | null | null | defmodule UnitTestHandler do
use Stargate.Receiver.MessageHandler
def init({source, expected}) do
{:ok, %{source: source, processed: 0, expected: expected}}
end
def handle_message(%{payload: payload}, state) do
send(state.source, "message #{payload} received")
processed_count = state.processed + 1
if processed_count == state.expected,
do: send(state.source, "all #{processed_count} messages received")
{:ack, %{state | processed: processed_count}}
end
end
| 26.263158 | 72 | 0.699399 |
7318b5c32485e0eb8d54bb6fd2887a600949844a | 2,121 | ex | Elixir | clients/android_management/lib/google_api/android_management/v1/model/user_facing_message.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/android_management/lib/google_api/android_management/v1/model/user_facing_message.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/android_management/lib/google_api/android_management/v1/model/user_facing_message.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.AndroidManagement.V1.Model.UserFacingMessage do
@moduledoc """
Provides a user-facing message with locale info. The maximum message length is 4096 characters.
## Attributes
- defaultMessage (String.t): The default message displayed if no localized message is specified or the user's locale doesn't match with any of the localized messages. A default message must be provided if any localized messages are provided. Defaults to: `null`.
- localizedMessages (%{optional(String.t) => String.t}): A map containing <locale, message> pairs, where locale is a well-formed BCP 47 language (https://www.w3.org/International/articles/language-tags/) code, such as en-US, es-ES, or fr. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:defaultMessage => any(),
:localizedMessages => map()
}
field(:defaultMessage)
field(:localizedMessages, type: :map)
end
defimpl Poison.Decoder, for: GoogleApi.AndroidManagement.V1.Model.UserFacingMessage do
def decode(value, options) do
GoogleApi.AndroidManagement.V1.Model.UserFacingMessage.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AndroidManagement.V1.Model.UserFacingMessage do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.588235 | 275 | 0.751532 |
7318dca568e6f6391e94b94889a912aaa4a743e5 | 468 | ex | Elixir | lib/shopify_api/rate_limiting/graphql.ex | ProtoJazz/elixir-shopifyapi | 759e20baff5afdff235386193bc42b2ecd343f5d | [
"Apache-2.0"
] | 18 | 2019-06-07T13:36:39.000Z | 2021-08-03T21:06:36.000Z | lib/shopify_api/rate_limiting/graphql.ex | ProtoJazz/elixir-shopifyapi | 759e20baff5afdff235386193bc42b2ecd343f5d | [
"Apache-2.0"
] | 158 | 2018-08-30T22:09:00.000Z | 2021-09-22T01:18:59.000Z | lib/shopify_api/rate_limiting/graphql.ex | ProtoJazz/elixir-shopifyapi | 759e20baff5afdff235386193bc42b2ecd343f5d | [
"Apache-2.0"
] | 4 | 2020-09-05T00:48:46.000Z | 2020-09-30T15:53:50.000Z | defmodule ShopifyAPI.RateLimiting.GraphQL do
@plus_bucket 2000
@nonplus_bucket 1000
@plus_restore_rate 100
@nonplus_restore_rate 50
@max_query_cost 1000
def request_bucket(%{plus: true}), do: @plus_bucket
def request_bucket(%{plus: false}), do: @nonplus_bucket
def restore_rate_per_second(%{plus: true}), do: @plus_restore_rate
def restore_rate_per_second(%{plus: false}), do: @nonplus_restore_rate
def max_query_cost, do: @max_query_cost
end
| 26 | 72 | 0.767094 |
7318e72a49210cddd4e18e23de1866ff26a436c0 | 3,173 | ex | Elixir | clients/content/lib/google_api/content/v2/model/datafeed_fetch_schedule.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/content/lib/google_api/content/v2/model/datafeed_fetch_schedule.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/datafeed_fetch_schedule.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Content.V2.Model.DatafeedFetchSchedule do
@moduledoc """
The required fields vary based on the frequency of fetching. For a monthly fetch schedule, day_of_month and hour are required. For a weekly fetch schedule, weekday and hour are required. For a daily fetch schedule, only hour is required.
## Attributes
* `dayOfMonth` (*type:* `integer()`, *default:* `nil`) - The day of the month the feed file should be fetched (1-31).
* `fetchUrl` (*type:* `String.t`, *default:* `nil`) - The URL where the feed file can be fetched. Google Merchant Center will support automatic scheduled uploads using the HTTP, HTTPS, FTP, or SFTP protocols, so the value will need to be a valid link using one of those four protocols.
* `hour` (*type:* `integer()`, *default:* `nil`) - The hour of the day the feed file should be fetched (0-23).
* `minuteOfHour` (*type:* `integer()`, *default:* `nil`) - The minute of the hour the feed file should be fetched (0-59). Read-only.
* `password` (*type:* `String.t`, *default:* `nil`) - An optional password for fetch_url.
* `paused` (*type:* `boolean()`, *default:* `nil`) - Whether the scheduled fetch is paused or not.
* `timeZone` (*type:* `String.t`, *default:* `nil`) - Time zone used for schedule. UTC by default. E.g., "America/Los_Angeles".
* `username` (*type:* `String.t`, *default:* `nil`) - An optional user name for fetch_url.
* `weekday` (*type:* `String.t`, *default:* `nil`) - The day of the week the feed file should be fetched.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:dayOfMonth => integer(),
:fetchUrl => String.t(),
:hour => integer(),
:minuteOfHour => integer(),
:password => String.t(),
:paused => boolean(),
:timeZone => String.t(),
:username => String.t(),
:weekday => String.t()
}
field(:dayOfMonth)
field(:fetchUrl)
field(:hour)
field(:minuteOfHour)
field(:password)
field(:paused)
field(:timeZone)
field(:username)
field(:weekday)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.DatafeedFetchSchedule do
def decode(value, options) do
GoogleApi.Content.V2.Model.DatafeedFetchSchedule.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.DatafeedFetchSchedule do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 44.690141 | 289 | 0.685471 |
73190e85bc9f2808bb23a07001d84bfa52368a95 | 7,891 | ex | Elixir | lib/changelog/schema/person.ex | yanokwa/changelog.com | 88093bada9ff294159246b8200b3121cf41666f7 | [
"MIT"
] | 1 | 2021-03-14T21:12:49.000Z | 2021-03-14T21:12:49.000Z | lib/changelog/schema/person.ex | yanokwa/changelog.com | 88093bada9ff294159246b8200b3121cf41666f7 | [
"MIT"
] | null | null | null | lib/changelog/schema/person.ex | yanokwa/changelog.com | 88093bada9ff294159246b8200b3121cf41666f7 | [
"MIT"
] | 1 | 2018-10-03T20:55:52.000Z | 2018-10-03T20:55:52.000Z | defmodule Changelog.Person do
use Changelog.Schema
alias Changelog.{EpisodeHost, EpisodeGuest, Faker, Files, NewsItem,
NewsItemComment, PodcastHost, Post, Regexp, Subscription}
defmodule Settings do
use Changelog.Schema
@primary_key false
embedded_schema do
field :subscribe_to_contributed_news, :boolean, default: true
field :subscribe_to_participated_episodes, :boolean, default: true
field :email_on_authored_news, :boolean, default: true
field :email_on_submitted_news, :boolean, default: true
field :email_on_comment_replies, :boolean, default: true
field :email_on_comment_mentions, :boolean, default: true
end
def changeset(struct, attrs) do
cast(struct, attrs, __MODULE__.__schema__(:fields))
end
def is_valid(name) when is_binary(name) do
__MODULE__.__schema__(:fields)
|> Enum.map(&Atom.to_string/1)
|> Enum.any?(&(&1 == name))
end
def is_valid(_), do: false
end
schema "people" do
field :name, :string
field :email, :string
field :handle, :string
field :github_handle, :string
field :linkedin_handle, :string
field :twitter_handle, :string
field :slack_id, :string
field :website, :string
field :bio, :string
field :location, :string
field :auth_token, :string
field :auth_token_expires_at, :utc_datetime
field :joined_at, :utc_datetime
field :signed_in_at, :utc_datetime
field :avatar, Files.Avatar.Type
field :admin, :boolean, default: false
field :host, :boolean, default: false
field :editor, :boolean, default: false
embeds_one :settings, Settings, on_replace: :update
has_many :podcast_hosts, PodcastHost, on_delete: :delete_all
has_many :episode_hosts, EpisodeHost, on_delete: :delete_all
has_many :host_episodes, through: [:episode_hosts, :episode]
has_many :episode_guests, EpisodeGuest, on_delete: :delete_all
has_many :guest_episodes, through: [:episode_guests, :episode]
has_many :authored_posts, Post, foreign_key: :author_id, on_delete: :delete_all
has_many :authored_news_items, NewsItem, foreign_key: :author_id
has_many :logged_news_items, NewsItem, foreign_key: :logger_id
has_many :submitted_news_items, NewsItem, foreign_key: :submitter_id
has_many :comments, NewsItemComment, foreign_key: :author_id
has_many :subscriptions, Subscription, where: [unsubscribed_at: nil]
timestamps()
end
def admins(query \\ __MODULE__), do: from(q in query, where: q.admin)
def editors(query \\ __MODULE__), do: from(q in query, where: q.editor)
def hosts(query \\ __MODULE__), do: from(q in query, where: q.host)
def in_slack(query \\ __MODULE__), do: from(q in query, where: not(is_nil(q.slack_id)))
def joined(query \\ __MODULE__), do: from(a in query, where: not(is_nil(a.joined_at)))
def never_signed_in(query \\ __MODULE__), do: from(q in query, where: is_nil(q.signed_in_at))
def faked(query \\ __MODULE__), do: from(q in query, where: q.name in ^Changelog.Faker.names())
def with_email(query \\ __MODULE__, email), do: from(q in query, where: q.email == ^email)
def with_handles(query \\ __MODULE__, handles), do: from(q in query, where: q.handle in ^handles)
def joined_today(query \\ __MODULE__) do
today = Timex.subtract(Timex.now, Timex.Duration.from_days(1))
from(p in query, where: p.joined_at > ^today)
end
def get_by_encoded_auth(token) do
case __MODULE__.decoded_data(token) do
[email, auth_token] -> Repo.get_by(__MODULE__, email: email, auth_token: auth_token)
_else -> nil
end
end
def get_by_encoded_id(token) do
case __MODULE__.decoded_data(token) do
[id, email] -> Repo.get_by(__MODULE__, id: id, email: email)
_else -> nil
end
end
def get_by_ueberauth(%{provider: :twitter, info: %{nickname: handle}}) do
Repo.get_by(__MODULE__, twitter_handle: handle)
end
def get_by_ueberauth(%{provider: :github, info: %{nickname: handle}}) do
Repo.get_by(__MODULE__, github_handle: handle)
end
def get_by_ueberauth(_), do: nil
def auth_changeset(person, attrs \\ %{}), do: cast(person, attrs, ~w(auth_token auth_token_expires_at)a)
def admin_insert_changeset(person, attrs \\ %{}) do
allowed = ~w(name email handle github_handle linkedin_handle twitter_handle bio website location admin host editor)a
changeset_with_allowed_attrs(person, attrs, allowed)
end
def admin_update_changeset(person, attrs \\ %{}) do
person
|> admin_insert_changeset(attrs)
|> file_changeset(attrs)
end
def file_changeset(person, attrs \\ %{}), do: cast_attachments(person, attrs, [:avatar], allow_urls: true)
def insert_changeset(person, attrs \\ %{}) do
allowed = ~w(name email handle github_handle linkedin_handle twitter_handle bio website location)a
changeset_with_allowed_attrs(person, attrs, allowed)
end
def update_changeset(person, attrs \\ %{}) do
person
|> insert_changeset(attrs)
|> file_changeset(attrs)
end
defp changeset_with_allowed_attrs(person, attrs, allowed) do
person
|> cast(attrs, allowed)
|> cast_embed(:settings)
|> validate_required([:name, :email, :handle])
|> validate_format(:email, Regexp.email())
|> validate_format(:website, Regexp.http(), message: Regexp.http_message())
|> validate_format(:handle, Regexp.slug(), message: Regexp.slug_message())
|> validate_length(:handle, max: 40, message: "max 40 chars")
|> validate_format(:github_handle, Regexp.social(), message: Regexp.social_message())
|> validate_format(:linkedin_handle, Regexp.social(), message: Regexp.social_message())
|> validate_format(:twitter_handle, Regexp.social(), message: Regexp.social_message())
|> unique_constraint(:email)
|> unique_constraint(:handle)
|> unique_constraint(:github_handle)
|> unique_constraint(:linkedin_handle)
|> unique_constraint(:twitter_handle)
end
def sign_in_changes(person) do
change(person, %{
auth_token: nil,
auth_token_expires_at: nil,
signed_in_at: now_in_seconds(),
joined_at: (person.joined_at || now_in_seconds())
})
end
def slack_changes(person, slack_id) do
change(person, %{slack_id: slack_id})
end
def refresh_auth_token(person, expires_in \\ 60 * 24) do
auth_token = Base.encode16(:crypto.strong_rand_bytes(8))
expires_at = Timex.add(Timex.now, Timex.Duration.from_minutes(expires_in))
changeset = auth_changeset(person, %{auth_token: auth_token, auth_token_expires_at: expires_at})
{:ok, person} = Repo.update(changeset)
person
end
def encoded_auth(person), do: {:ok, Base.encode16("#{person.email}|#{person.auth_token}")}
def encoded_id(person), do: {:ok, Base.encode16("#{person.id}|#{person.email}")}
def decoded_data(encoded) do
case Base.decode16(encoded) do
{:ok, decoded} -> String.split(decoded, "|")
:error -> ["", ""]
end
end
def episode_count(person) do
host_count = Repo.count(from(e in EpisodeHost, where: e.person_id == ^person.id))
guest_count = Repo.count(from(e in EpisodeGuest, where: e.person_id == ^person.id))
host_count + guest_count
end
def post_count(person) do
Repo.count(from(p in Post, where: p.author_id == ^person.id))
end
def preload_subscriptions(query = %Ecto.Query{}), do: Ecto.Query.preload(query, :subscriptions)
def preload_subscriptions(person), do: Repo.preload(person, :subscriptions)
def with_fake_data(person \\ %__MODULE__{}) do
fake_name = Faker.name()
fake_handle = Faker.handle(fake_name)
%{person | name: fake_name, handle: fake_handle}
end
def sans_fake_data(person) do
if Faker.name_fake?(person.name) do
%{person | name: nil, handle: nil}
else
person
end
end
end
| 37.398104 | 120 | 0.699531 |
731917e281cb9d48872eebbb782f2bb7e62eeaae | 737 | exs | Elixir | .formatter.exs | simpers/ash_graphql | c91f52569220524d5ddd4a07e1a183b74333e341 | [
"MIT"
] | null | null | null | .formatter.exs | simpers/ash_graphql | c91f52569220524d5ddd4a07e1a183b74333e341 | [
"MIT"
] | null | null | null | .formatter.exs | simpers/ash_graphql | c91f52569220524d5ddd4a07e1a183b74333e341 | [
"MIT"
] | null | null | null | # THIS FILE IS AUTOGENERATED USING `mix ash.formatter`
# DONT MODIFY IT BY HAND
locals_without_parens = [
allow_nil?: 1,
authorize?: 1,
create: 2,
create: 3,
debug?: 1,
destroy: 2,
destroy: 3,
get: 2,
get: 3,
identity: 1,
list: 2,
list: 3,
lookup_identities: 1,
lookup_with_primary_key?: 1,
managed_relationship: 2,
managed_relationship: 3,
primary_key_delimiter: 1,
read_action: 1,
read_one: 2,
read_one: 3,
stacktraces?: 1,
type: 1,
type_name: 1,
types: 1,
update: 2,
update: 3,
upsert?: 1
]
[
inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"],
locals_without_parens: locals_without_parens,
export: [
locals_without_parens: locals_without_parens
]
]
| 18.425 | 70 | 0.656716 |
73195cd48d15815c864576243abf1d297d5c4d16 | 248 | exs | Elixir | test/ex_litedb_test.exs | litedb/ex_litedb | ed1ca3a2721d05726e814b393b73c4e08df9350b | [
"MIT"
] | null | null | null | test/ex_litedb_test.exs | litedb/ex_litedb | ed1ca3a2721d05726e814b393b73c4e08df9350b | [
"MIT"
] | null | null | null | test/ex_litedb_test.exs | litedb/ex_litedb | ed1ca3a2721d05726e814b393b73c4e08df9350b | [
"MIT"
] | null | null | null | defmodule ExLitedbTest do
use ExUnit.Case
doctest ExLitedb
describe "path_for" do
test "works" do
ExLitedb.extensions()
|> Enum.map(fn ext ->
assert ExLitedb.path_for(ext) |> IO.inspect()
end)
end
end
end
| 17.714286 | 53 | 0.629032 |
73198bc395f9fff3aa1322d60d7b8afde9741984 | 404 | ex | Elixir | lib/strategy/consul/agent.ex | howleysv/libcluster_consul | 03557c348a2be007fc2b980b7a9dd694f93e362d | [
"Apache-2.0"
] | 25 | 2020-04-17T07:43:07.000Z | 2022-03-11T11:33:59.000Z | lib/strategy/consul/agent.ex | howleysv/libcluster_consul | 03557c348a2be007fc2b980b7a9dd694f93e362d | [
"Apache-2.0"
] | 3 | 2020-05-29T23:09:29.000Z | 2021-09-23T11:51:41.000Z | lib/strategy/consul/agent.ex | howleysv/libcluster_consul | 03557c348a2be007fc2b980b7a9dd694f93e362d | [
"Apache-2.0"
] | 3 | 2020-05-28T20:37:26.000Z | 2021-05-26T10:23:34.000Z | defmodule Cluster.Strategy.Consul.Agent do
@moduledoc """
This endpoint grab nodes from Consul using the
[Agent HTTP API](https://www.consul.io/api/agent.html).
"""
use Cluster.Strategy.Consul.Endpoint
@impl true
def build_url(%URI{} = url, config) do
%{url | path: "/agent/service/#{config[:service_name]}"}
end
@impl true
def parse_response(%{"Address" => ip}), do: [ip]
end
| 23.764706 | 60 | 0.673267 |
7319940f7b361aee53d7fba130f1ec6c04834800 | 2,256 | ex | Elixir | lib/purple_web/live/run_live/show.ex | knoebber/petaller | 3532db5a3688459127d2427af42e32ca0f494d44 | [
"MIT"
] | null | null | null | lib/purple_web/live/run_live/show.ex | knoebber/petaller | 3532db5a3688459127d2427af42e32ca0f494d44 | [
"MIT"
] | null | null | null | lib/purple_web/live/run_live/show.ex | knoebber/petaller | 3532db5a3688459127d2427af42e32ca0f494d44 | [
"MIT"
] | null | null | null | defmodule PurpleWeb.RunLive.Show do
use PurpleWeb, :live_view
import PurpleWeb.RunLive.RunHelpers
alias Purple.Activities
alias PurpleWeb.Markdown
defp page_title(:show), do: "Show Run"
defp page_title(:edit), do: "Edit Run"
@impl Phoenix.LiveView
def handle_params(%{"id" => id}, _, socket) do
run = Activities.get_run!(id)
run_rows =
run.description
|> String.split("\n")
|> length()
{
:noreply,
socket
|> assign(:page_title, page_title(socket.assigns.live_action))
|> assign(:run, run)
|> assign(:run_rows, run_rows + 1)
}
end
@impl Phoenix.LiveView
def mount(_, _, socket) do
{:ok, assign(socket, :side_nav, side_nav())}
end
@impl Phoenix.LiveView
def render(assigns) do
~H"""
<h1>
<%= live_patch("Runs", to: Routes.run_index_path(@socket, :index)) %> / <%= "#{@run.id}" %>
</h1>
<section class="mt-2 mb-2 window">
<div class="flex justify-between bg-purple-300 p-1">
<div class="inline-links">
<strong>
<%= @run.miles %> miles@<%= format_pace(@run.miles, @run.seconds) %>
</strong>
<span>|</span>
<%= if @live_action == :edit do %>
<strong>Edit Item</strong>
<span>|</span>
<%= live_patch("Cancel",
to: Routes.run_show_path(@socket, :show, @run)
) %>
<% else %>
<%= live_patch(
"Edit",
to: Routes.run_show_path(@socket, :edit, @run)
) %>
<% end %>
</div>
<i>
<%= format_date(@run.date) %>
</i>
</div>
<%= if @live_action == :edit do %>
<div class="m-2 p-2 border border-purple-500 bg-purple-50 rounded">
<.live_component
module={PurpleWeb.RunLive.RunForm}
id={@run.id}
action={@live_action}
rows={@run_rows}
run={@run}
return_to={Routes.run_show_path(@socket, :show, @run)}
/>
</div>
<% else %>
<div class="markdown-content">
<%= Markdown.markdown_to_html(@run.description, :run) %>
</div>
<% end %>
</section>
"""
end
end
| 26.541176 | 97 | 0.515514 |
7319c25d52c54fb5991541235351e584cd026d08 | 1,473 | ex | Elixir | clients/vault/lib/google_api/vault/v1/model/team_drive_info.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/vault/lib/google_api/vault/v1/model/team_drive_info.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/vault/lib/google_api/vault/v1/model/team_drive_info.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Vault.V1.Model.TeamDriveInfo do
@moduledoc """
Team Drives to search
## Attributes
* `teamDriveIds` (*type:* `list(String.t)`, *default:* `nil`) - List of Team Drive IDs, as provided by <a
href="https://developers.google.com/drive">Drive API</a>.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:teamDriveIds => list(String.t())
}
field(:teamDriveIds, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Vault.V1.Model.TeamDriveInfo do
def decode(value, options) do
GoogleApi.Vault.V1.Model.TeamDriveInfo.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Vault.V1.Model.TeamDriveInfo do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 30.6875 | 109 | 0.726409 |
7319cd74977da9a31f5b0ffed76e1c5165fc5575 | 1,439 | ex | Elixir | lib/phoenix_live_view_collection_web/telemetry.ex | leandrocp/phoenix_live_view_collection | 24767709c198f60a84a4a9384ca06857569802e8 | [
"MIT"
] | 5 | 2022-02-11T19:51:00.000Z | 2022-03-05T08:28:39.000Z | lib/phoenix_live_view_collection_web/telemetry.ex | craft-ex/phoenix_live_view_collection | e0e05e6a5008dcd66d6121c54ec879b1d283c48a | [
"MIT"
] | 2 | 2022-02-12T12:31:23.000Z | 2022-02-18T20:13:22.000Z | lib/phoenix_live_view_collection_web/telemetry.ex | craft-ex/phoenix_live_view_collection | e0e05e6a5008dcd66d6121c54ec879b1d283c48a | [
"MIT"
] | 2 | 2022-02-11T20:14:33.000Z | 2022-03-05T08:28:49.000Z | defmodule LiveViewCollectionWeb.Telemetry do
@moduledoc false
use Supervisor
import Telemetry.Metrics
def start_link(arg) do
Supervisor.start_link(__MODULE__, arg, name: __MODULE__)
end
@impl true
def init(_arg) do
children = [
# Telemetry poller will execute the given period measurements
# every 10_000ms. Learn more here: https://hexdocs.pm/telemetry_metrics
{:telemetry_poller, measurements: periodic_measurements(), period: 10_000}
# Add reporters as children of your supervision tree.
# {Telemetry.Metrics.ConsoleReporter, metrics: metrics()}
]
Supervisor.init(children, strategy: :one_for_one)
end
def metrics do
[
# Phoenix Metrics
summary("phoenix.endpoint.stop.duration",
unit: {:native, :millisecond}
),
summary("phoenix.router_dispatch.stop.duration",
tags: [:route],
unit: {:native, :millisecond}
),
# VM Metrics
summary("vm.memory.total", unit: {:byte, :kilobyte}),
summary("vm.total_run_queue_lengths.total"),
summary("vm.total_run_queue_lengths.cpu"),
summary("vm.total_run_queue_lengths.io")
]
end
defp periodic_measurements do
[
# A module, function and arguments to be invoked periodically.
# This function must call :telemetry.execute/3 and a metric must be added above.
# {LiveViewCollectionWeb, :count_users, []}
]
end
end
| 28.78 | 86 | 0.676164 |
7319fee196aff3629b4800fa73b0a204817da367 | 1,512 | ex | Elixir | farmbot_ext/lib/farmbot_ext/api/preloader.ex | va2ron1/farmbot_os | c80a38058713adc2ad91a4802664bcfe8da9d96c | [
"MIT"
] | null | null | null | farmbot_ext/lib/farmbot_ext/api/preloader.ex | va2ron1/farmbot_os | c80a38058713adc2ad91a4802664bcfe8da9d96c | [
"MIT"
] | null | null | null | farmbot_ext/lib/farmbot_ext/api/preloader.ex | va2ron1/farmbot_os | c80a38058713adc2ad91a4802664bcfe8da9d96c | [
"MIT"
] | null | null | null | defmodule FarmbotExt.API.Preloader do
@moduledoc """
Task to ensure download and insert or cache
all resources stored in the API.
"""
alias Ecto.Changeset
require FarmbotCore.Logger
alias FarmbotExt.API
alias FarmbotExt.API.{Reconciler, SyncGroup}
alias FarmbotCore.Asset.Sync
@doc """
Syncronous call to sync or preload assets.
Starts with `group_0`, syncs all resources.
"""
def preload_all() do
with {:ok, sync_changeset} <- API.get_changeset(Sync),
sync_changeset <- Reconciler.sync_group(sync_changeset, SyncGroup.group_0()) do
FarmbotCore.Logger.success(3, "Successfully preloaded resources.")
do_auto_sync(sync_changeset)
end
end
defp do_auto_sync(%Changeset{} = sync_changeset) do
FarmbotCore.Logger.busy(3, "Starting auto sync")
# This is where I left off.
with %Changeset{valid?: true} = sync_changeset <-
Reconciler.sync_group(sync_changeset, SyncGroup.group_1()),
%Changeset{valid?: true} = sync_changeset <-
Reconciler.sync_group(sync_changeset, SyncGroup.group_2()),
%Changeset{valid?: true} = sync_changeset <-
Reconciler.sync_group(sync_changeset, SyncGroup.group_3()),
%Changeset{valid?: true} <- Reconciler.sync_group(sync_changeset, SyncGroup.group_4()) do
FarmbotCore.Logger.success(3, "Auto sync complete")
:ok
else
error ->
FarmbotCore.Logger.error(3, "Auto sync failed #{inspect(error)}")
error
end
end
end
| 32.869565 | 98 | 0.687831 |
731a0ad511f41b8c05c3e8ae30e914792efd8edd | 254 | exs | Elixir | config/config.exs | PabloG6/link_preview | b00e6029e5f941e566e5b31b7dc7b9a7d490b11e | [
"Apache-2.0"
] | null | null | null | config/config.exs | PabloG6/link_preview | b00e6029e5f941e566e5b31b7dc7b9a7d490b11e | [
"Apache-2.0"
] | null | null | null | config/config.exs | PabloG6/link_preview | b00e6029e5f941e566e5b31b7dc7b9a7d490b11e | [
"Apache-2.0"
] | null | null | null | use Mix.Config
config :floki, :html_parser, Floki.HTMLParser.Html5ever
config :tesla, adapter: Tesla.Adapter.Hackney
if File.exists?("config/#{Mix.env()}.exs") do
IO.puts "exists"
IO.puts "#{Mix.env()}.exs"
import_config "#{Mix.env()}.exs"
end
| 19.538462 | 55 | 0.692913 |
731a0b6fd936af4eb110e7b1d910d73cddcebe86 | 1,674 | exs | Elixir | test/attachments_test.exs | bombinatetech/mailman | 7b69db23273c2de6c836ac187e191f145f346845 | [
"MIT"
] | null | null | null | test/attachments_test.exs | bombinatetech/mailman | 7b69db23273c2de6c836ac187e191f145f346845 | [
"MIT"
] | null | null | null | test/attachments_test.exs | bombinatetech/mailman | 7b69db23273c2de6c836ac187e191f145f346845 | [
"MIT"
] | null | null | null | defmodule AttachmentsTest do
use ExUnit.Case, async: true
test "#inline returns { :ok, attachment } when file exists" do
{ :ok, attachment } = Mailman.Attachment.inline("test/data/blank.png")
assert is_map(attachment)
end
test "#inline returns {:error, message} when file doesn't exist" do
file_path = "test/data/idontexist.png"
{:error, _} = Mailman.Attachment.inline(file_path)
end
test "Attachment with a different disposition filename" do
{:ok, attachment} = Mailman.Attachment.inline("test/data/blank.png", "another_name.png")
assert attachment.file_name == "another_name.png"
assert is_map(attachment)
end
test "Attachment with a manually set mime type" do
{:ok, attachment} = Mailman.Attachment.attach("test/data/blank.png", nil, {"image", "gif"})
assert attachment.mime_type == "image"
assert attachment.mime_sub_type == "gif"
assert is_map(attachment)
end
test "#mime_types returns the list of 648 types" do
assert Enum.count(Mailman.Attachment.mime_types) == 648
end
test "mime type getter returns proper type" do
assert Mailman.Attachment.mime_type_and_subtype_from_extension("image.gif") == {"image", "gif"}
assert Mailman.Attachment.mime_type_and_subtype_from_extension("image.png") == {"image", "png"}
assert Mailman.Attachment.mime_type_and_subtype_from_extension("invoice.pdf") == {"application", "pdf"}
assert Mailman.Attachment.mime_type_and_subtype_from_extension("file.strange") == {"application", "octet-stream"}
assert Mailman.Attachment.mime_type_and_subtype_from_extension("settings.mobileconfig") == {"application", "x-apple-aspen-config"}
end
end
| 41.85 | 134 | 0.729988 |
731a26ac06cf82f0882e4ebfc5d291f4f9b7386e | 574 | exs | Elixir | code/macros/eg1.exs | alvarocamillont/introdu-o_elixir | 1d72d4f4b01d9312c4b066ce3c0fe8d9bfaaade1 | [
"MIT"
] | null | null | null | code/macros/eg1.exs | alvarocamillont/introdu-o_elixir | 1d72d4f4b01d9312c4b066ce3c0fe8d9bfaaade1 | [
"MIT"
] | 1 | 2021-03-09T16:27:25.000Z | 2021-03-09T16:27:25.000Z | programming-elixir-book/code/macros/eg1.exs | jordanhubbard/elixir-projects | dee341d672e83a45a17a4a85abd54a480f95c506 | [
"BSD-2-Clause"
] | null | null | null | #---
# Excerpted from "Programming Elixir ≥ 1.6",
# published by The Pragmatic Bookshelf.
# Copyrights apply to this code. It may not be used to create training material,
# courses, books, articles, and the like. Contact us if you are in doubt.
# We make no guarantees that this code is fit for any purpose.
# Visit http://www.pragmaticprogrammer.com/titles/elixir16 for more book information.
#---
defmodule My do
defmacro macro(code) do
IO.inspect code
quote do: IO.puts "Different code"
end
end
defmodule Test do
require My
My.macro(IO.puts("hello"))
end
| 30.210526 | 85 | 0.733449 |
731a6e5ddc65fc2869a233040691af723b989ef1 | 2,926 | ex | Elixir | lib/bitcoin_simulator/bitcoin_core.ex | sidharth-shridhar/Bitcoin-Miner-Simulation | 2789dc8fe5f65269789540f675fac682e431e518 | [
"MIT"
] | 1 | 2021-12-16T08:31:24.000Z | 2021-12-16T08:31:24.000Z | lib/bitcoin_simulator/bitcoin_core.ex | hojason117/BitcoinSimulator | f85e623eec1923a2c0d418388f440cc06b6a5283 | [
"MIT"
] | null | null | null | lib/bitcoin_simulator/bitcoin_core.ex | hojason117/BitcoinSimulator | f85e623eec1923a2c0d418388f440cc06b6a5283 | [
"MIT"
] | null | null | null | defmodule BitcoinSimulator.BitcoinCore do
alias BitcoinSimulator.BitcoinCore.{Blockchain, Mining, Network, RawTransaction, Wallet}
# Block Chain
def get_new_blockchain, do: Blockchain.get_new_blockchain()
def get_best_block_hash(blockchain), do: Blockchain.get_best_block_hash(blockchain)
def block_header_hash(header), do: Blockchain.block_header_hash(header)
def transaction_hash(tx), do: Blockchain.transaction_hash(tx)
def verify_block(blockchain, block), do: Blockchain.verify_block(blockchain, block)
def verify_transaction(blockchain, tx), do: Blockchain.verify_transaction(blockchain, tx)
def add_block(block, blockchain, wallet, mempool, mining_process \\ nil, mining_txs \\ nil) do
Blockchain.add_block(block, blockchain, wallet, mempool, mining_process, mining_txs)
end
# Mining
def get_new_mempool, do: Mining.get_new_mempool()
def get_top_unconfirmed_transactions(mempool), do: Mining.get_top_unconfirmed_transactions(mempool)
def get_block_template(prev_hash, txs), do: Mining.get_block_template(prev_hash, txs)
def mine(block, coinbase_addr, self_id), do: Mining.mine(block, coinbase_addr, self_id)
def add_unconfirmed_tx(mempool, tx, tx_hash), do: Mining.add_unconfirmed_tx(mempool, tx, tx_hash)
def calc_cainbase_value(blockchain, txs), do: Mining.calc_cainbase_value(blockchain, txs)
# Network
def get_new_message_record, do: %Network.MessageRecord{}
def get_initial_neighbors(id), do: Network.get_initial_neighbors(id)
def get_initial_blockchain(neighbors), do: Network.get_initial_blockchain(neighbors)
def exchange_neighbors(neighbors), do: Network.exchange_neighbors(neighbors)
def mix_neighbors(neighbors, self_id), do: Network.mix_neighbors(neighbors, self_id)
def message_seen?(record, type, hash), do: Network.message_seen?(record, type, hash)
def saw_message(record, type, hash), do: Network.saw_message(record, type, hash)
def clean_message_record(record), do: Network.clean_message_record(record)
def broadcast_message(type, message, neighbors, sender), do: Network.broadcast_message(type, message, neighbors, sender)
# Raw Transaction
def create_raw_transaction(in_addresses, out_addresses, out_values, change_address, change_value) do
RawTransaction.create_raw_transaction(in_addresses, out_addresses, out_values, change_address, change_value)
end
def create_coinbase_transaction(out_addresses, out_values), do: RawTransaction.create_coinbase_transaction(out_addresses, out_values)
# Wallet
def get_new_wallet, do: Wallet.get_new_wallet()
def get_new_address(wallet), do: Wallet.get_new_address(wallet)
def combine_unspent_addresses(wallet, target_value), do: Wallet.combine_unspent_addresses(wallet, target_value)
def spend_address(wallet, address), do: Wallet.spend_address(wallet, address)
def import_address(wallet, address), do: Wallet.import_address(wallet, address)
end
| 37.512821 | 135 | 0.79836 |
731a72ec964df31941951b23de19a428e58e9d43 | 741 | exs | Elixir | iex.exs | felix-starman/dotfiles | 708172e11c82c9c6feb6ec02beeefbb7c864f655 | [
"CC0-1.0"
] | 2 | 2020-11-02T17:23:09.000Z | 2020-11-02T23:20:21.000Z | iex.exs | felix-starman/dotfiles | 708172e11c82c9c6feb6ec02beeefbb7c864f655 | [
"CC0-1.0"
] | 1 | 2021-03-26T16:04:50.000Z | 2021-03-26T16:04:50.000Z | iex.exs | felix-starman/dotfiles | 708172e11c82c9c6feb6ec02beeefbb7c864f655 | [
"CC0-1.0"
] | null | null | null | # IEx.configure colors: [enabled: true]
# IEx.configure colors: [ eval_result: [ :cyan, :bright ] ]
IO.puts IO.ANSI.magenta_background() <> IO.ANSI.white() <> " ❄❄❄ All things serve the BEAM ❄❄❄ " <> IO.ANSI.reset
Application.put_env(:elixir, :ansi_enabled, true)
IEx.configure(
colors: [
eval_result: [:green, :bright] ,
eval_error: [[:red,:bright,"Bug Bug ..!!"]],
eval_info: [:yellow, :bright ],
],
default_prompt: [
"\e[G", # ANSI CHA, move cursor to column 1
"%prefix",:white,"|",
:blue,
"%counter",
:white,
"|",
:red,
"▶" , # plain string
:white,
"▶▶" , # plain string
# ❤ ❤-»" , # plain string
:reset
] |> IO.ANSI.format |> IO.chardata_to_string
)
| 28.5 | 113 | 0.558704 |
731a9805869cd5417a3840091aab8d229310e9c2 | 655 | ex | Elixir | lib/excheck/generator.ex | devstopfix/excheck | c8cbf457e9bf7c13a0a4b70c60c8e02495ff53a1 | [
"MIT"
] | null | null | null | lib/excheck/generator.ex | devstopfix/excheck | c8cbf457e9bf7c13a0a4b70c60c8e02495ff53a1 | [
"MIT"
] | null | null | null | lib/excheck/generator.ex | devstopfix/excheck | c8cbf457e9bf7c13a0a4b70c60c8e02495ff53a1 | [
"MIT"
] | null | null | null | defmodule ExCheck.Generator do
@moduledoc """
Provides macros for generators.
"""
defmacro __using__(_opts \\ []) do
quote do
# Import generators defined in :triq minus redefined ones.
import :triq_dom, except: [atom: 0], only: :functions
import ExCheck.Generator
end
end
@doc """
Generates number, which is either integer or real number.
"""
defmacro number do
quote do
oneof([int(), real()])
end
end
@doc """
Generates atom, including the special ones :nil, :false and :true.
"""
defmacro atom do
quote do
oneof([:triq_dom.atom(), oneof([bool(), nil])])
end
end
end
| 20.46875 | 68 | 0.627481 |
731ac2dcb62b2baddb73a879ef4f269c7043fcf0 | 2,166 | ex | Elixir | clients/health_care/lib/google_api/health_care/v1beta1/model/crypto_hash_config.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/health_care/lib/google_api/health_care/v1beta1/model/crypto_hash_config.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/health_care/lib/google_api/health_care/v1beta1/model/crypto_hash_config.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.HealthCare.V1beta1.Model.CryptoHashConfig do
@moduledoc """
Pseudonymization method that generates surrogates via cryptographic hashing. Uses SHA-256. Outputs a base64-encoded representation of the hashed output. For example, `L7k0BHmF1ha5U3NfGykjro4xWi1MPVQPjhMAZbSV9mM=`.
## Attributes
* `cryptoKey` (*type:* `String.t`, *default:* `nil`) - An AES 128/192/256 bit key. Causes the hash to be computed based on this key. A default key is generated for each Deidentify operation and is used when neither `crypto_key` nor `kms_wrapped` is specified. Must not be set if `kms_wrapped` is set.
* `kmsWrapped` (*type:* `GoogleApi.HealthCare.V1beta1.Model.KmsWrappedCryptoKey.t`, *default:* `nil`) - KMS wrapped key. Must not be set if `crypto_key` is set.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:cryptoKey => String.t() | nil,
:kmsWrapped => GoogleApi.HealthCare.V1beta1.Model.KmsWrappedCryptoKey.t() | nil
}
field(:cryptoKey)
field(:kmsWrapped, as: GoogleApi.HealthCare.V1beta1.Model.KmsWrappedCryptoKey)
end
defimpl Poison.Decoder, for: GoogleApi.HealthCare.V1beta1.Model.CryptoHashConfig do
def decode(value, options) do
GoogleApi.HealthCare.V1beta1.Model.CryptoHashConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.HealthCare.V1beta1.Model.CryptoHashConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 43.32 | 304 | 0.753463 |
731ac5ef748e741ab705956c3cf0090c2e982fca | 11,415 | ex | Elixir | lib/plaid/sandbox.ex | ethangunderson/elixir-plaid | 53aa0a87a4a837df6a2d15684870e7a58a003db6 | [
"MIT"
] | 16 | 2021-03-09T02:29:32.000Z | 2022-03-13T07:18:03.000Z | lib/plaid/sandbox.ex | ethangunderson/elixir-plaid | 53aa0a87a4a837df6a2d15684870e7a58a003db6 | [
"MIT"
] | 5 | 2021-04-24T20:38:14.000Z | 2022-03-19T22:03:09.000Z | lib/plaid/sandbox.ex | ethangunderson/elixir-plaid | 53aa0a87a4a837df6a2d15684870e7a58a003db6 | [
"MIT"
] | 2 | 2021-06-11T02:15:01.000Z | 2022-03-15T18:39:59.000Z | defmodule Plaid.Sandbox do
@moduledoc """
[Plaid Sandbox API](https://plaid.com/docs/api/sandbox/) calls and schema.
> Only used for sandbox testing purposes. None of these calls will work in `development` or `production`.
🏗 I haven'tyet tested the `bank_transfer` endpoints against the actual plaid API because I can't
get the `bank_transfers` product from plaid yet. If you test it, let me know and I can remove
the in-progress status!
"""
alias Plaid.Castable
defmodule TransactionsOptions do
@moduledoc """
[Plaid API /sandbox/public_token/create transactions options schema.](https://plaid.com/docs/api/sandbox/#sandbox-public_token-create-request-transactions)
"""
@type t :: %__MODULE__{
start_date: String.t(),
end_date: String.t()
}
@derive Jason.Encoder
defstruct [:start_date, :end_date]
end
defmodule CreatePublicTokenResponse do
@moduledoc """
[Plaid API /sandbox/public_token/create response schema.](https://plaid.com/docs/api/sandbox/#sandboxpublic_tokencreate)
"""
@behaviour Castable
@type t :: %__MODULE__{
public_token: String.t(),
request_id: String.t()
}
defstruct [:public_token, :request_id]
@impl true
def cast(generic_map) do
%__MODULE__{
public_token: generic_map["public_token"],
request_id: generic_map["request_id"]
}
end
end
@doc """
Create a valid `public_token` with arbitrary details.
Does a `POST /sandbox/public_token/create` call to create a new
sandbox public token.
Params:
* `institution_id` - The ID of the institution the Item will be associated with.
* `initial_products` - The products to initially pull for the Item.
Options:
* `:webhook` - Specify a webhook to associate with the new Item.
* `:override_username` - Test username to use for the creation of the Sandbox Item.
* `:override_password` - Test password to use for the creation of the Sandbox Item.
* `:transactions` - Options for transactions on the new Item.
## Examples
Sandbox.create_public_token("ins_1", ["auth"], client_id: "123", secret: "abc")
{:ok, %Sandbox.CreatePublicTokenResponse{}}
"""
@spec create_public_token(String.t(), [String.t()], options, Plaid.config()) ::
{:ok, CreatePublicTokenResponse.t()} | {:error, Plaid.Error.t()}
when options: %{
optional(:webhook) => String.t(),
optional(:override_username) => String.t(),
optional(:override_password) => String.t(),
optional(:transactions) => TransactionsOptions.t()
}
def create_public_token(institution_id, initial_products, options \\ %{}, config) do
options_payload =
Map.take(options, [:webhook, :override_username, :override_password, :transactions])
payload =
%{}
|> Map.put(:institution_id, institution_id)
|> Map.put(:initial_products, initial_products)
|> Map.put(:options, options_payload)
Plaid.Client.call(
"/sandbox/public_token/create",
payload,
CreatePublicTokenResponse,
config
)
end
defmodule ResetItemLoginResponse do
@moduledoc """
[Plaid API /sandbox/item/reset_login response schema.](https://plaid.com/docs/api/sandbox/#sandboxitemreset_login)
"""
@behaviour Castable
@type t :: %__MODULE__{
reset_login: boolean(),
request_id: String.t()
}
defstruct [:reset_login, :request_id]
@impl true
def cast(generic_map) do
%__MODULE__{
reset_login: generic_map["reset_login"],
request_id: generic_map["request_id"]
}
end
end
@doc """
Force an item into a "login required" state.
Does a `POST /sandbox/item/reset_login` call which forces an item into the
`ITEM_LOGIN_REQUIRED` state to simulate and item whose login is no longer valid.
Params:
* `access_token` - The access token associated with the Item to reset the login for.
## Examples
Sandbox.reset_item_login("access-prod-123xxx", client_id: "123", secret: "abc")
{:ok, %Sandbox.ResetItemLoginResponse{}}
"""
@spec reset_item_login(String.t(), Plaid.config()) ::
{:ok, ResetItemLoginResponse.t()} | {:error, Plaid.Error.t()}
def reset_item_login(access_token, config) do
Plaid.Client.call(
"/sandbox/item/reset_login",
%{access_token: access_token},
ResetItemLoginResponse,
config
)
end
@doc """
Change the verification status of an item.
Does a `POST /sandbox/item/set_verification_status` call to change the
status of an item in order to simulate the Automated Micro-deposit flow.
Params:
* `access_token` - The access token associated with the Item data is being requested for.
* `account_id` - The ID of the account whose verification status is to be modified.
* `verification_status` - The verification status to set the account to.
## Examples
Sandbox.set_item_verification_status("access-prod-123xxx", "39flxk4ek2xs", "verification_expired", client_id: "123", secret: "abc")
{:ok, %Plaid.SimpleResponse{request_id: "9bkemelske"}}
"""
@spec set_item_verification_status(String.t(), String.t(), String.t(), Plaid.config()) ::
{:ok, Plaid.SimpleResponse.t()} | {:error, Plaid.Error.t()}
def set_item_verification_status(access_token, account_id, verification_status, config) do
payload = %{
access_token: access_token,
account_id: account_id,
verification_status: verification_status
}
Plaid.Client.call(
"/sandbox/item/set_verification_status",
payload,
Plaid.SimpleResponse,
config
)
end
defmodule FireItemWebhookResponse do
@moduledoc """
[Plaid API /sandbox/item/fire_webhook response schema.](https://plaid.com/docs/api/sandbox/#sandboxitemfire_webhook)
"""
@behaviour Castable
@type t :: %__MODULE__{
webhook_fired: boolean(),
request_id: String.t()
}
defstruct [:webhook_fired, :request_id]
@impl true
def cast(generic_map) do
%__MODULE__{
webhook_fired: generic_map["webhook_fired"],
request_id: generic_map["request_id"]
}
end
end
@doc """
Fire a fake webhook to an Item's webhook endpoint.
Does a `POST /sandbox/item/fire_webhook` call which forces an item into the
ITEM_LOGIN_REQUIRED state to simulate and item whose login is no longer valid.
Params:
* `access_token` - The access token associated with the Item to fire the webhook for.
* `webhook_code` - The webhook code to send.
> `webhook_code` only supports `DEFAULT_UPDATE` for now.
## Examples
Sandbox.fire_item_webhook("access-prod-123xxx", "DEFAULT_UPDATE", client_id: "123", secret: "abc")
{:ok, %Sandbox.FireItemWebhookResponse{}}
"""
@spec fire_item_webhook(String.t(), String.t(), Plaid.config()) ::
{:ok, FireItemWebhookResponse.t()} | {:error, Plaid.Error.t()}
def fire_item_webhook(access_token, webhook_code, config) do
Plaid.Client.call(
"/sandbox/item/fire_webhook",
%{access_token: access_token, webhook_code: webhook_code},
FireItemWebhookResponse,
config
)
end
@doc """
Simulate a bank transfer event in the Plaid Sandbox.
Does a `POST /sandbox/bank_transfer/simulate` call to simulate a bank transfer
in the plaid sandbox for testing purposes.
Params:
* `bank_transfer_id` - Plaid’s unique identifier for a bank transfer.
* `event_type` - The asynchronous event to be simulated. May be: posted, failed, or reversed.
Options:
* `:failure_reason` - The failure reason if the type of this transfer is "failed" or "reversed".
## Examples
Sandbox.simulate_bank_transfer("bt_123xxx", "posted", client_id: "123", secret: "abc")
{:ok, %Plaid.SimpleResponse{}}
"""
@spec simulate_bank_transfer(String.t(), String.t(), options, Plaid.config()) ::
{:ok, Plaid.SimpleResponse.t()} | {:error, Plaid.Error.t()}
when options: %{
optional(:failure_reason) => %{
optional(:ach_return_code) => String.t(),
optional(:description) => String.t()
}
}
def simulate_bank_transfer(bank_transfer_id, event_type, options \\ %{}, config) do
options_payload = Map.take(options, [:failure_reason])
payload =
%{}
|> Map.put(:bank_transfer_id, bank_transfer_id)
|> Map.put(:event_type, event_type)
|> Map.merge(options_payload)
Plaid.Client.call(
"/sandbox/bank_transfer/simulate",
payload,
Plaid.SimpleResponse,
config
)
end
@doc """
Manually fire a Bank Transfer webhook.
Does a `POST /sandbox/bank_transfer/fire_webhook` call to manually trigger
a bank transfer webhook.
Params:
* `webhook` - The URL to which the webhook should be sent.
## Examples
Sandbox.fire_bank_transfer_webhook("https://example.com/webhook", client_id: "123", secret: "abc")
{:ok, %Plaid.SimpleResponse{}}
"""
@spec fire_bank_transfer_webhook(String.t(), Plaid.config()) ::
{:ok, Plaid.SimpleResponse.t()} | {:error, Plaid.Error.t()}
def fire_bank_transfer_webhook(webhook, config) do
Plaid.Client.call(
"/sandbox/bank_transfer/fire_webhook",
%{webhook: webhook},
Plaid.SimpleResponse,
config
)
end
defmodule CreateProcessorTokenResponse do
@moduledoc """
[Plaid API /sandbox/processor_token/create response schema.](https://plaid.com/docs/api/sandbox/#sandboxprocessor_tokencreate)
"""
@behaviour Castable
@type t :: %__MODULE__{
processor_token: String.t(),
request_id: String.t()
}
defstruct [:processor_token, :request_id]
@impl true
def cast(generic_map) do
%__MODULE__{
processor_token: generic_map["processor_token"],
request_id: generic_map["request_id"]
}
end
end
@doc """
Create a valid `processor_token` for an arbitrary institution ID and test credentials.
Does a `POST /sandbox/processor_token/create` call to create a valid `processor_token`
to use with all the processor endpoints in the sandbox.
Params:
* `institution_id` - The ID of the institution the Item will be associated with.
Options:
* `:override_username` - Test username to use for the creation of the Sandbox Item.
* `:override_password` - Test password to use for the creation of the Sandbox Item.
## Examples
Sandbox.create_processor_token("ins_1", client_id: "123", secret: "abc")
{:ok, %Sandbox.CreateProcessorTokenResponse{}}
"""
@spec create_processor_token(String.t(), options, Plaid.config()) ::
{:ok, CreateProcessorTokenResponse.t()} | {:error, Plaid.Error.t()}
when options: %{
optional(:override_username) => String.t(),
optional(:override_password) => String.t()
}
def create_processor_token(institution_id, options \\ %{}, config) do
options_payload = Map.take(options, [:override_username, :override_password])
payload = %{institution_id: institution_id, options: options_payload}
Plaid.Client.call(
"/sandbox/processor_token/create",
payload,
CreateProcessorTokenResponse,
config
)
end
end
| 31.273973 | 159 | 0.665353 |
731b049ef7caea30df070b9f337d1d4820a3f16e | 136 | ex | Elixir | testData/org/elixir_lang/parser_definition/unmatched_expression_parsing_test_case/BlockItemCommentStab.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/parser_definition/unmatched_expression_parsing_test_case/BlockItemCommentStab.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/parser_definition/unmatched_expression_parsing_test_case/BlockItemCommentStab.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | try do
catch
# A user could create an error that looks like a builtin one
# causing an error.
:error, _ ->
inspect(reason)
end | 19.428571 | 62 | 0.683824 |
731b0cc165e369322faa8acc535290c818b455f4 | 1,174 | ex | Elixir | lib/asciinema/file_store/local.ex | acloudiator/asciinema-server | f0afa4cb4312822f5dd56008b0c5ec9c7a410f85 | [
"Apache-2.0"
] | 1 | 2021-07-07T12:36:10.000Z | 2021-07-07T12:36:10.000Z | lib/asciinema/file_store/local.ex | acloudiator/asciinema-server | f0afa4cb4312822f5dd56008b0c5ec9c7a410f85 | [
"Apache-2.0"
] | null | null | null | lib/asciinema/file_store/local.ex | acloudiator/asciinema-server | f0afa4cb4312822f5dd56008b0c5ec9c7a410f85 | [
"Apache-2.0"
] | null | null | null | defmodule Asciinema.FileStore.Local do
use Asciinema.FileStore
import Plug.Conn
def put_file(dst_path, src_local_path, _content_type, _compress \\ false) do
full_dst_path = base_path() <> dst_path
parent_dir = Path.dirname(full_dst_path)
with :ok <- File.mkdir_p(parent_dir),
{:ok, _} <- File.copy(src_local_path, full_dst_path) do
:ok
end
end
def serve_file(conn, path, nil) do
do_serve_file(conn, path)
end
def serve_file(conn, path, filename) do
conn
|> put_resp_header("content-disposition", "attachment; filename=#{filename}")
|> do_serve_file(path)
end
defp do_serve_file(conn, path) do
conn
|> put_resp_header("content-type", MIME.from_path(path))
|> send_file(200, base_path() <> path)
|> halt
end
def open_file(path) do
File.open(base_path() <> path, [:binary, :read])
end
def open_file(path, nil) do
open_file(path)
end
def open_file(path, function) do
File.open(base_path() <> path, [:binary, :read], function)
end
defp config do
Application.get_env(:asciinema, __MODULE__)
end
defp base_path do
Keyword.get(config(), :path)
end
end
| 23.959184 | 81 | 0.672913 |
731b1785e754ca5fa7151d29d97cba670f181f64 | 1,594 | exs | Elixir | mix.exs | primait/bridge_ex | 92c9322b105aba21a708588a1e0e02cc4c37c9ca | [
"MIT"
] | null | null | null | mix.exs | primait/bridge_ex | 92c9322b105aba21a708588a1e0e02cc4c37c9ca | [
"MIT"
] | 18 | 2022-01-17T09:13:27.000Z | 2022-03-28T13:39:51.000Z | mix.exs | primait/bridge_ex | 92c9322b105aba21a708588a1e0e02cc4c37c9ca | [
"MIT"
] | null | null | null | defmodule BridgeEx.MixProject do
use Mix.Project
@source_url "https://github.com/primait/bridge_ex"
@version "0.3.0-rc.2"
def project do
[
app: :bridge_ex,
version: @version,
elixir: "~> 1.10",
elixirc_paths: elixirc_paths(Mix.env()),
start_permanent: Mix.env() == :prod,
deps: deps(),
docs: docs(),
package: package()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
defp elixirc_paths(env) when env in [:dev, :test], do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:absinthe, "~> 1.6"},
{:bypass, "~> 2.1", only: :test},
{:credo, "~> 1.5", only: [:dev, :test], runtime: false},
{:dialyxir, "~> 1.1", only: [:dev, :test], runtime: false},
{:ex_doc, ">= 0.25.3", only: :dev, runtime: false},
{:jason, "~> 1.2"},
{:prima_auth0_ex, "~> 0.3.0-rc.1.2"},
{:telepoison, "~> 1.0.0-rc.4"}
]
end
defp docs do
[
extras: [
"LICENSE.md": [title: "License"],
"README.md": [title: "Overview"]
],
main: "readme",
source_url: @source_url,
source_ref: "v#{@version}",
formatters: ["html"]
]
end
def package do
[
description: "BridgeEx is a library to build bridges to other services.",
name: "bridge_ex",
maintainers: ["Prima"],
licenses: ["MIT"],
links: %{"GitHub" => @source_url}
]
end
end
| 23.791045 | 80 | 0.543915 |
731b49f33a96f1c7f052b6fba48adc8c475be173 | 6,863 | ex | Elixir | lib/teslamate/api.ex | SergioRius/teslamate | 747400f6b12c660c6ffad8a941eb27ada7d61c18 | [
"MIT"
] | null | null | null | lib/teslamate/api.ex | SergioRius/teslamate | 747400f6b12c660c6ffad8a941eb27ada7d61c18 | [
"MIT"
] | null | null | null | lib/teslamate/api.ex | SergioRius/teslamate | 747400f6b12c660c6ffad8a941eb27ada7d61c18 | [
"MIT"
] | null | null | null | defmodule TeslaMate.Api do
use GenServer
require Logger
alias TeslaMate.Auth.{Tokens, Credentials}
alias TeslaMate.{Vehicles, Convert}
alias TeslaApi.Auth
alias Finch.Response
import Core.Dependency, only: [call: 3, call: 2]
defstruct name: nil, deps: %{}
alias __MODULE__, as: State
@name __MODULE__
# API
def start_link(opts) do
opts = Keyword.put_new(opts, :name, @name)
name = Keyword.fetch!(opts, :name)
GenServer.start_link(__MODULE__, opts, name: name)
end
## State
def list_vehicles(name \\ @name) do
with {:ok, auth} <- fetch_auth(name) do
TeslaApi.Vehicle.list(auth)
|> handle_result(auth, name)
end
end
def get_vehicle(name \\ @name, id) do
with {:ok, auth} <- fetch_auth(name) do
TeslaApi.Vehicle.get(auth, id)
|> handle_result(auth, name)
end
end
def get_vehicle_with_state(name \\ @name, id) do
with {:ok, auth} <- fetch_auth(name) do
TeslaApi.Vehicle.get_with_state(auth, id)
|> handle_result(auth, name)
end
end
def stream(name \\ @name, vid, receiver) do
with {:ok, %Auth{} = auth} <- fetch_auth(name) do
TeslaApi.Stream.start_link(auth: auth, vehicle_id: vid, receiver: receiver)
end
end
## Internals
def signed_in?(name \\ @name) do
case fetch_auth(name) do
{:error, :not_signed_in} -> false
{:ok, _} -> true
end
end
def sign_in(name \\ @name, credentials) do
case fetch_auth(name) do
{:error, :not_signed_in} -> GenServer.call(name, {:sign_in, [credentials]}, 30_000)
{:ok, %Auth{}} -> {:error, :already_signed_in}
end
end
def sign_in(name \\ @name, device_id, mfa_passcode, %Auth.MFA.Ctx{} = ctx) do
case fetch_auth(name) do
{:error, :not_signed_in} ->
GenServer.call(name, {:sign_in, [device_id, mfa_passcode, ctx]}, 30_000)
{:ok, %Auth{}} ->
{:error, :already_signed_in}
end
end
# Callbacks
@impl true
def init(opts) do
name = Keyword.fetch!(opts, :name)
deps = %{
auth: Keyword.get(opts, :auth, TeslaMate.Auth),
vehicles: Keyword.get(opts, :vehicles, Vehicles)
}
^name = :ets.new(name, [:named_table, :set, :public, read_concurrency: true])
with %Tokens{access: at, refresh: rt} when is_binary(at) and is_binary(rt) <-
call(deps.auth, :get_tokens) do
restored_tokens = %Auth{token: at, refresh_token: rt, expires_in: 1.12 * 60 * 60}
case refresh_tokens(restored_tokens) do
{:ok, refreshed_tokens} ->
:ok = call(deps.auth, :save, [refreshed_tokens])
true = insert_auth(name, refreshed_tokens)
:ok = schedule_refresh(refreshed_tokens)
{:error, reason} ->
Logger.warning("Token refresh failed: #{inspect(reason, pretty: true)}")
true = insert_auth(name, restored_tokens)
:ok = schedule_refresh(restored_tokens)
end
end
{:ok, %State{name: name, deps: deps}}
end
@impl true
def handle_call({:sign_in, args}, _, state) do
case args do
[%Credentials{use_legacy_auth: true} = c] -> Auth.legacy_login(c.email, c.password)
[%Credentials{} = c] -> Auth.login(c.email, c.password)
[device_id, passcode, ctx] -> Auth.login(device_id, passcode, ctx)
end
|> case do
{:ok, %Auth{} = auth} ->
true = insert_auth(state.name, auth)
:ok = call(state.deps.auth, :save, [auth])
:ok = call(state.deps.vehicles, :restart)
:ok = schedule_refresh(auth)
{:reply, :ok, state}
{:ok, {:mfa, _devices, _ctx} = mfa} ->
{:reply, {:ok, mfa}, state}
{:error, %TeslaApi.Error{} = e} ->
{:reply, {:error, e}, state}
end
end
@impl true
def handle_info(:refresh_auth, %State{name: name} = state) do
case fetch_auth(name) do
{:ok, tokens} ->
Logger.info("Refreshing access token ...")
{:ok, refreshed_tokens} = Auth.refresh(tokens)
true = insert_auth(name, refreshed_tokens)
:ok = call(state.deps.auth, :save, [refreshed_tokens])
:ok = schedule_refresh(refreshed_tokens)
{:error, reason} ->
Logger.warning("Cannot refresh access token: #{inspect(reason)}")
end
{:noreply, state}
end
def handle_info(msg, state) do
Logger.info("#{__MODULE__} / unhandled message: #{inspect(msg, pretty: true)}")
{:noreply, state}
end
## Private
defp refresh_tokens(%Auth{} = tokens) do
case Application.get_env(:teslamate, :disable_token_refresh, false) do
true ->
Logger.info("Token refresh is disabled")
{:ok, tokens}
false ->
with {:ok, %Auth{} = refresh_tokens} <- Auth.refresh(tokens) do
Logger.info("Refreshed api tokens")
{:ok, refresh_tokens}
end
end
end
defp schedule_refresh(%Auth{} = auth) do
ms =
auth.expires_in
|> Kernel.*(0.9)
|> round()
|> :timer.seconds()
duration =
ms
|> div(1000)
|> Convert.sec_to_str()
|> Enum.reject(&String.ends_with?(&1, "s"))
|> Enum.join(" ")
Logger.info("Scheduling token refresh in #{duration}")
Process.send_after(self(), :refresh_auth, ms)
:ok
end
defp insert_auth(name, %Auth{} = auth) do
:ets.insert(name, auth: auth)
end
defp fetch_auth(name) do
case :ets.lookup(name, :auth) do
[auth: %Auth{} = auth] -> {:ok, auth}
[] -> {:error, :not_signed_in}
end
rescue
_ in ArgumentError -> {:error, :not_signed_in}
end
defp handle_result(result, auth, name) do
case result do
{:error, %TeslaApi.Error{reason: :unauthorized}} ->
true = :ets.delete(name, :auth)
{:error, :not_signed_in}
{:error, %TeslaApi.Error{reason: reason, env: %Response{status: status, body: body}}} ->
Logger.error("TeslaApi.Error / #{status} – #{inspect(body, pretty: true)}")
{:error, reason}
{:error, %TeslaApi.Error{reason: reason, message: msg}} ->
if is_binary(msg) and msg != "", do: Logger.warning("TeslaApi.Error / #{msg}")
{:error, reason}
{:ok, vehicles} when is_list(vehicles) ->
vehicles =
vehicles
|> Task.async_stream(&preload_vehicle(&1, auth), timeout: 32_500)
|> Enum.map(fn {:ok, vehicle} -> vehicle end)
{:ok, vehicles}
{:ok, %TeslaApi.Vehicle{} = vehicle} ->
{:ok, vehicle}
end
end
defp preload_vehicle(%TeslaApi.Vehicle{state: "online", id: id} = vehicle, auth) do
case TeslaApi.Vehicle.get_with_state(auth, id) do
{:ok, %TeslaApi.Vehicle{} = vehicle} ->
vehicle
{:error, reason} ->
Logger.warning("TeslaApi.Error / #{inspect(reason, pretty: true)}")
vehicle
end
end
defp preload_vehicle(%TeslaApi.Vehicle{} = vehicle, _state), do: vehicle
end
| 27.342629 | 94 | 0.602943 |
731b87cdc9a428b5d197b98e8b66cdee527e5e12 | 1,669 | exs | Elixir | priv/repo/seeds.exs | mgwidmann/slack_coder | 3ff4375777c1bdbc94ba1491fdf4e1c0fa465edf | [
"MIT"
] | 15 | 2015-09-23T16:03:28.000Z | 2018-12-04T21:48:04.000Z | priv/repo/seeds.exs | mgwidmann/slack_coder | 3ff4375777c1bdbc94ba1491fdf4e1c0fa465edf | [
"MIT"
] | 27 | 2016-01-12T16:44:31.000Z | 2017-10-13T16:09:36.000Z | priv/repo/seeds.exs | mgwidmann/slack_coder | 3ff4375777c1bdbc94ba1491fdf4e1c0fa465edf | [
"MIT"
] | 4 | 2016-09-01T12:08:24.000Z | 2017-09-21T15:07:57.000Z | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# SlackCoder.Repo.insert!(%SomeModel{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
SlackCoder.Github.synchronize("weddingwire", "weddingwire-ng")
SlackCoder.Github.synchronize("weddingwire", "components")
SlackCoder.Github.synchronize("weddingwire", "ww_constants")
SlackCoder.Github.synchronize("weddingwire", "guilds")
SlackCoder.Github.synchronize("weddingwire", "ww-contentful-cms")
SlackCoder.Github.synchronize("weddingwire", "newlywish")
SlackCoder.Github.synchronize("weddingwire", "weddingwire-android")
SlackCoder.Github.synchronize("weddingwire", "weddingwire-ios")
SlackCoder.Github.synchronize("weddingwire", "sem-catalog")
SlackCoder.Github.synchronize("weddingwire", "utils-android")
SlackCoder.Github.synchronize("weddingwire", "vendor-reviews-android")
SlackCoder.Github.synchronize("weddingwire", "weddingwire_api")
SlackCoder.Github.synchronize("weddingwire", "weddingwire-vendor-android")
require Logger
for {owner, type} <- [mgwidmann: :users] do
apply(Tentacat.Repositories, :"list_#{type}", [owner, SlackCoder.Github.client])
|> Enum.map(fn
%{"name" => name} -> name
{_status, _data} ->
Logger.warn "Unable to fetch data for #{owner}"
nil
end)
|> Enum.filter(&(&1))
|> Enum.each(fn repo ->
Logger.info "SlackCoder.Github.synchronize #{inspect to_string(owner)}, #{inspect repo}"
SlackCoder.Github.synchronize(to_string(owner), repo)
end)
end
| 39.738095 | 92 | 0.740563 |
731b8af000817c910b44f933d99de5e50586dc20 | 1,704 | ex | Elixir | clients/slides/lib/google_api/slides/v1/model/refresh_sheets_chart_request.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/slides/lib/google_api/slides/v1/model/refresh_sheets_chart_request.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/slides/lib/google_api/slides/v1/model/refresh_sheets_chart_request.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Slides.V1.Model.RefreshSheetsChartRequest do
@moduledoc """
Refreshes an embedded Google Sheets chart by replacing it with the latest version of the chart from Google Sheets. NOTE: Refreshing charts requires at least one of the spreadsheets.readonly, spreadsheets, drive.readonly, or drive OAuth scopes.
## Attributes
- objectId (String.t): The object ID of the chart to refresh. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:objectId => any()
}
field(:objectId)
end
defimpl Poison.Decoder, for: GoogleApi.Slides.V1.Model.RefreshSheetsChartRequest do
def decode(value, options) do
GoogleApi.Slides.V1.Model.RefreshSheetsChartRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Slides.V1.Model.RefreshSheetsChartRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.5 | 247 | 0.756455 |
731b9535987012ec4292fd2e8f3c9c34f821af81 | 974 | ex | Elixir | lib/pkg_deb.ex | sblaisot/pkg_deb | a9ec11fa119d6005d935c1b629960c9697864d27 | [
"MIT"
] | null | null | null | lib/pkg_deb.ex | sblaisot/pkg_deb | a9ec11fa119d6005d935c1b629960c9697864d27 | [
"MIT"
] | 12 | 2020-07-18T10:38:53.000Z | 2022-03-24T04:04:12.000Z | lib/pkg_deb.ex | sblaisot/pkg_deb | a9ec11fa119d6005d935c1b629960c9697864d27 | [
"MIT"
] | 1 | 2020-11-28T21:08:54.000Z | 2020-11-28T21:08:54.000Z | defmodule PkgDeb do
@moduledoc """
Documentation for PkgDeb.
"""
alias PkgDeb.Format.{Control, Data, Package}
@doc """
Receives an Mix.Release struct and creates a .deb file.
"""
def create(%Mix.Release{} = release, config) when is_list(config) do
{:ok, package_config} = PkgDeb.Format.Config.build_config(release, config)
PkgCore.Logger.debug("pkg_deb", "building .deb package..")
release
|> init_dir()
|> Data.build(package_config)
|> Control.build()
|> Package.build()
|> remove_dir()
release
end
def create(release, _), do: release
def create_deb(release, config), do: create(release, config)
defp remove_dir({_, deb_root, _}) do
deb_root
|> File.rm_rf()
end
defp init_dir(release) do
deb_root = Path.join([release.path, "..", "..", "..", "deb"])
:ok = File.mkdir_p(deb_root)
:ok = File.write(Path.join(deb_root, "debian-binary"), "2.0\n")
{release, deb_root}
end
end
| 21.644444 | 78 | 0.635524 |
731bd8ce06ba15f1b8526ea3c6a69ab234ba9f20 | 2,261 | exs | Elixir | rel/config.exs | glv/revista | 00ecb0780c62a5525155a773b959b169e0e0500d | [
"MIT"
] | 17 | 2019-01-31T18:33:09.000Z | 2022-01-18T12:38:49.000Z | rel/config.exs | glv/revista | 00ecb0780c62a5525155a773b959b169e0e0500d | [
"MIT"
] | null | null | null | rel/config.exs | glv/revista | 00ecb0780c62a5525155a773b959b169e0e0500d | [
"MIT"
] | 4 | 2018-11-10T01:56:17.000Z | 2020-06-09T21:10:41.000Z | # Import all plugins from `rel/plugins`
# They can then be used by adding `plugin MyPlugin` to
# either an environment, or release definition, where
# `MyPlugin` is the name of the plugin module.
~w(rel plugins *.exs)
|> Path.join()
|> Path.wildcard()
|> Enum.map(&Code.eval_file(&1))
use Mix.Releases.Config,
# This sets the default release built by `mix release`
default_release: :default,
# This sets the default environment used by `mix release`
default_environment: Mix.env()
# For a full list of config options for both releases
# and environments, visit https://hexdocs.pm/distillery/config/distillery.html
# You may define one or more environments in this file,
# an environment's settings will override those of a release
# when building in that environment, this combination of release
# and environment configuration is called a profile
environment :dev do
# If you are running Phoenix, you should make sure that
# server: true is set and the code reloader is disabled,
# even in dev mode.
# It is recommended that you build with MIX_ENV=prod and pass
# the --env flag to Distillery explicitly if you want to use
# dev mode.
set(dev_mode: true)
set(include_erts: false)
set(
cookie: :"I5k@_{(,QW|mYbuK7,jJsM_vbdw<H?L$kcV?^/;{Hk3j,p>vPxPtL_w!k4TwHUx3"
)
end
environment :prod do
set(include_erts: true)
set(include_src: false)
set(
cookie: :"d}V<:~k%uIt{9v:HW%V`wgjzZNa7BaZqNN7B!.]9RN370EN9IMCHtsZj(<@ZdgHQ"
)
end
# You may define one or more releases in this file.
# If you have not set a default release, or selected one
# when running `mix release`, the first release in the file
# will be used by default
release :revista do
set(version: "0.1.1")
set(
applications: [
:runtime_tools,
admin: :permanent,
auth: :permanent,
cms: :permanent,
twitter: :permanent,
web: :permanent
]
)
set(
commands: [
migrate_auth: "rel/commands/migrate_auth.sh",
migrate_cms: "rel/commands/migrate_cms.sh"
]
)
set(
config_providers: [
{Mix.Releases.Config.Providers.Elixir,
["${RELEASE_ROOT_DIR}/etc/config.exs"]}
]
)
set(
overlays: [
{:copy, "rel/config/config.exs", "etc/config.exs"}
]
)
end
| 25.988506 | 79 | 0.689076 |
731bddeca200fa73b11271d6549ae58e24826f98 | 2,020 | ex | Elixir | clients/apigee/lib/google_api/apigee/v1/model/google_cloud_apigee_v1_string_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/apigee/lib/google_api/apigee/v1/model/google_cloud_apigee_v1_string_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/apigee/lib/google_api/apigee/v1/model/google_cloud_apigee_v1_string_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1StringResponse do
@moduledoc """
## Attributes
* `data` (*type:* `String.t`, *default:* `nil`) - Details of the operation.
* `errorCode` (*type:* `String.t`, *default:* `nil`) - ID that can be used to find errors in the log files.
* `message` (*type:* `String.t`, *default:* `nil`) - Description of the operation.
* `requestId` (*type:* `String.t`, *default:* `nil`) - ID that can be used to find request details in the log files.
* `status` (*type:* `String.t`, *default:* `nil`) - Status of the operation.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:data => String.t(),
:errorCode => String.t(),
:message => String.t(),
:requestId => String.t(),
:status => String.t()
}
field(:data)
field(:errorCode)
field(:message)
field(:requestId)
field(:status)
end
defimpl Poison.Decoder, for: GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1StringResponse do
def decode(value, options) do
GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1StringResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1StringResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.237288 | 120 | 0.694059 |
731c0d958f0e0314bd56dd0c00fb9379810df3e8 | 490 | ex | Elixir | lib/web/controllers/class_controller.ex | stevegrossi/ex_venture | e02d5a63fdb882d92cfb4af3e15f7b48ad7054aa | [
"MIT"
] | 2 | 2019-05-14T11:36:44.000Z | 2020-07-01T08:54:04.000Z | lib/web/controllers/class_controller.ex | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | null | null | null | lib/web/controllers/class_controller.ex | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | 1 | 2021-01-29T14:12:40.000Z | 2021-01-29T14:12:40.000Z | defmodule Web.ClassController do
use Web, :controller
alias Web.Class
def index(conn, _params) do
classes = Class.all(alpha: true)
conn
|> assign(:classes, classes)
|> render(:index)
end
def show(conn, %{"id" => id}) do
case Class.get(id) do
nil ->
conn |> redirect(to: public_page_path(conn, :index))
class ->
conn
|> assign(:class, class)
|> assign(:extended, true)
|> render(:show)
end
end
end
| 18.148148 | 60 | 0.567347 |
731c131c0efc8c52ede22ad7b28760199f8469b9 | 1,183 | exs | Elixir | mix.exs | jcartwright/rally_api | fdcea7d2a40c7d9c0e392d6836693f5a9a9677a9 | [
"MIT"
] | null | null | null | mix.exs | jcartwright/rally_api | fdcea7d2a40c7d9c0e392d6836693f5a9a9677a9 | [
"MIT"
] | 11 | 2016-08-31T03:29:31.000Z | 2016-09-02T22:00:05.000Z | mix.exs | jcartwright/rally_api | fdcea7d2a40c7d9c0e392d6836693f5a9a9677a9 | [
"MIT"
] | null | null | null | defmodule RallyApi.Mixfile do
use Mix.Project
@description """
A toolkit wrapping Rally's REST webservice for Elixir
"""
def project do
[app: :rally_api,
version: "0.1.0",
elixir: "~> 1.3",
name: "RallyRestToolkitForElixir",
description: @description,
package: package(),
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps(),
source_url: "https://github.com/jcartwright/rally_api" ]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[applications: [:logger, :httpoison]]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[ {:httpoison, "~> 0.9.0"},
{:poison, "~> 2.0"},
{:exvcr, "~> 0.8.2", only: :test} ]
end
defp package do
[ maintainers: ["Jason Cartwright"],
licenses: ["MIT"],
links: %{"Github" => "https://github.com/jcartwright/rally_api"} ]
end
end
| 24.142857 | 77 | 0.606086 |
731c20a252255ee7d068f9082a852f66cfab16d7 | 7,906 | ex | Elixir | apps/engine/lib/engine/db/transaction.ex | omgnetwork/omg-childchain-v2 | 31cc9cf9e42718fc3b9bd6668f24a627cac80b4f | [
"Apache-2.0"
] | 4 | 2020-11-30T17:38:57.000Z | 2021-01-23T21:29:41.000Z | apps/engine/lib/engine/db/transaction.ex | omgnetwork/omg-childchain-v2 | 31cc9cf9e42718fc3b9bd6668f24a627cac80b4f | [
"Apache-2.0"
] | 24 | 2020-11-30T17:32:48.000Z | 2021-02-22T06:25:22.000Z | apps/engine/lib/engine/db/transaction.ex | omgnetwork/omg-childchain-v2 | 31cc9cf9e42718fc3b9bd6668f24a627cac80b4f | [
"Apache-2.0"
] | null | null | null | defmodule Engine.DB.Transaction do
@moduledoc """
The Transaction record. This is one of the main entry points for the system, specifically accepting
transactions into the Childchain as `tx_bytes`. This expands those bytes into:
* `tx_bytes` - A binary of a transaction encoded by RLP.
* `inputs` - The outputs that the transaction is acting on, and changes state e.g marked as "spent"
* `outputs` - The newly created outputs
More information is contained in the `tx_bytes`. However, to keep the Childchain _lean_, we extract
data onto the record as needed.
The schema contains the following fields:
- tx_bytes: The signed bytes submited by users
- tx_hash: The keccak hash of the transaction
- tx_type: The type of the transaction, this is an integer. ie: `1` for payment v1 transactions, `3` for fee transactions
- tx_index: index of the transaction in a block
Virtual fields used for convenience and validation:
- witnesses: Avoid decoding/parsing signatures mutiple times along validation process
- signed_tx: Avoid calling decode(tx_bytes) multiple times along the validation process
Note that with the current implementation, fields virtual fields are not populated when loading record from the DB
"""
use Ecto.Schema
alias __MODULE__.TransactionChangeset
alias Ecto.Multi
alias Engine.DB.Block
alias Engine.DB.Output
alias Engine.DB.TransactionFee
alias Engine.Fee
alias Engine.Repo
alias ExPlasma.Encoding
alias ExPlasma.Transaction, as: ExPlasmaTx
require Logger
@type tx_bytes :: binary
@type hex_tx_bytes :: list(binary)
@type batch :: {non_neg_integer, binary, list(ExPlasma.Transaction.t())}
@type t() :: %{
block: Block.t(),
block_id: pos_integer(),
tx_index: non_neg_integer(),
id: pos_integer(),
inputs: list(Output.t()),
inserted_at: DateTime.t(),
outputs: list(Output.t()),
signed_tx: ExPlasma.Transaction.t() | nil,
tx_bytes: binary(),
tx_hash: <<_::256>>,
tx_type: pos_integer(),
updated_at: DateTime.t(),
witnesses: binary()
}
@timestamps_opts [inserted_at: :node_inserted_at, updated_at: :node_updated_at]
schema "transactions" do
field(:tx_bytes, :binary)
field(:tx_hash, :binary)
field(:tx_type, :integer)
field(:tx_index, :integer)
# Virtual fields used for convenience and validation
# Avoid decoding/parsing signatures mutiple times along validation process
field(:witnesses, {:array, :string}, virtual: true)
# Avoid calling decode(tx_bytes) multiple times along the validation process
field(:signed_tx, :map, virtual: true)
belongs_to(:block, Block)
has_many(:inputs, Output, foreign_key: :spending_transaction_id)
has_many(:outputs, Output, foreign_key: :creating_transaction_id)
has_many(:fees, TransactionFee, foreign_key: :transaction_id)
field(:inserted_at, :utc_datetime)
field(:updated_at, :utc_datetime)
timestamps()
end
@doc """
Query a transaction by the given `field`.
Also preload given `preloads`
"""
def get_by(field, preloads) do
__MODULE__
|> Repo.get_by(field)
|> Repo.preload(preloads)
end
@spec encode_unsigned(t()) :: binary()
def encode_unsigned(transaction) do
{:ok, tx} = ExPlasma.decode(transaction.tx_bytes, signed: false)
ExPlasma.encode!(tx, signed: false)
end
@doc """
Inserts a new transaction and associates it with currently forming block.
If including a new transaction in forming block violates maximum number of transaction per block
then the transaction is associated with a newly inserted forming block.
"""
def insert(hex_tx_bytes) do
case decode(hex_tx_bytes) do
{:ok, data} ->
[data]
|> handle_transactions()
|> Repo.transaction()
|> case do
{:ok, result} ->
{:ok, Map.get(result, "transaction-1-of-1")}
{:error, _, changeset, _} ->
_ = Logger.error("Error when inserting transaction changeset #{inspect(changeset)}")
{:error, changeset}
error ->
_ = Logger.error("Error when inserting transaction #{inspect(error)}")
error
end
decode_error ->
_ = Logger.error("Error when inserting transaction decode_error #{inspect(decode_error)}")
decode_error
end
end
@doc """
Inserts a new batch of transactions and associates it with currently forming block.
If including a new transaction in forming block violates maximum number of transaction per block
then the transaction is associated with a newly inserted forming block.
"""
def insert_batch(txs_bytes) do
case decode_batch(txs_bytes) do
{:ok, batch} ->
batch
|> handle_transactions()
|> Repo.transaction()
|> case do
{:ok, _} = result ->
result
{:error, _, changeset, _} ->
_ = Logger.error("Error when inserting transaction changeset #{inspect(changeset)}")
{:error, changeset}
error ->
_ = Logger.error("Error when inserting transaction #{inspect(error)}")
error
end
decode_error ->
_ = Logger.error("Error when inserting transaction decode_error #{inspect(decode_error)}")
decode_error
end
end
@doc """
Inserts a fee transaction associated with a given block and transaction index
"""
def insert_fee_transaction(repo, currency_with_amount, block, fee_tx_index) do
currency_with_amount
|> TransactionChangeset.new_fee_transaction_changeset(block)
|> TransactionChangeset.set_blknum_and_tx_index(%{block: block, next_tx_index: fee_tx_index})
|> repo.insert()
end
defp handle_transactions(batch) do
all_fees = load_fees()
Enum.reduce(batch, Multi.new(), fn {index, tx_bytes, decoded}, multi ->
{:ok, fees} = load_fee(all_fees, decoded.tx_type)
changeset = TransactionChangeset.new_transaction_changeset(%__MODULE__{}, tx_bytes, decoded, fees)
block_with_next_tx_index = "block_with_next_tx_index-#{index}"
multi
|> Multi.run("current_forming_block-#{index}", fn repo, _ -> Block.get_forming_block_for_update(repo) end)
|> Multi.run(block_with_next_tx_index, fn repo, params ->
Block.get_block_and_tx_index_for_transaction(repo, params, index)
end)
|> Multi.insert("transaction-#{index}", fn %{^block_with_next_tx_index => block_with_next_tx_index} ->
TransactionChangeset.set_blknum_and_tx_index(changeset, block_with_next_tx_index)
end)
end)
end
@spec decode(tx_bytes()) :: {:ok, {<<_::48>>, binary(), ExPlasma.Transaction.t()}} | {:error, atom()}
defp decode(hex_tx_bytes) do
with {:ok, tx_bytes} <- Encoding.to_binary(hex_tx_bytes),
{:ok, decoded} <- ExPlasma.decode(tx_bytes),
{:ok, recovered} <- ExPlasmaTx.with_witnesses(decoded) do
{:ok, {"1-of-1", tx_bytes, recovered}}
end
end
@spec decode_batch(hex_tx_bytes()) :: {:ok, list(batch())} | {:error, atom()}
defp decode_batch(hexs_tx_bytes) do
acc = []
index = 0
decode_batch(hexs_tx_bytes, acc, index)
end
defp decode_batch([], acc, _) do
{:ok, Enum.reverse(acc)}
end
defp decode_batch([hex_tx_bytes | hexs_tx_bytes], acc, index) do
with {:ok, tx_bytes} <- Encoding.to_binary(hex_tx_bytes),
{:ok, decoded} <- ExPlasma.decode(tx_bytes),
{:ok, recovered} <- ExPlasmaTx.with_witnesses(decoded) do
decode_batch(hexs_tx_bytes, [{index, tx_bytes, recovered} | acc], index + 1)
end
end
defp load_fees() do
{:ok, all_fees} = Fee.accepted_fees()
all_fees
end
defp load_fee(all_fees, type) do
fees_for_type = Map.get(all_fees, type, {:error, :invalid_transaction_type})
{:ok, fees_for_type}
end
end
| 34.828194 | 123 | 0.675183 |
731c717c14529d92c27252778427af850d5310d2 | 349 | ex | Elixir | lib/todo/resource/users/friends.ex | bbassett/todo | 2b3a2b8c7663e3d32491603bee2de63446994f5c | [
"MIT"
] | null | null | null | lib/todo/resource/users/friends.ex | bbassett/todo | 2b3a2b8c7663e3d32491603bee2de63446994f5c | [
"MIT"
] | null | null | null | lib/todo/resource/users/friends.ex | bbassett/todo | 2b3a2b8c7663e3d32491603bee2de63446994f5c | [
"MIT"
] | null | null | null | defmodule Todo.Resource.Users.Friends do
use Mazurka.Resource
param user do
value
end
let friends = User.find_friends(user)
mediatype Mazurka.Mediatype.Hyperjson do
action do
%{
"collection" => for friend <- friends do
link_to(Todo.Resource.Users.Read, user: friend)
end
}
end
end
end | 18.368421 | 57 | 0.641834 |
731c78f7857b4a3fc39a856a9e3bdc67e978207d | 2,144 | ex | Elixir | lib/espec/configuration.ex | andrei-mihaila/espec | ffab88aa987bd446e9c44fe38d73cfe6b789f80b | [
"Apache-2.0"
] | 807 | 2015-03-25T14:00:19.000Z | 2022-03-24T08:08:15.000Z | lib/espec/configuration.ex | andrei-mihaila/espec | ffab88aa987bd446e9c44fe38d73cfe6b789f80b | [
"Apache-2.0"
] | 254 | 2015-03-27T10:12:25.000Z | 2021-07-12T01:40:15.000Z | lib/espec/configuration.ex | andrei-mihaila/espec | ffab88aa987bd446e9c44fe38d73cfe6b789f80b | [
"Apache-2.0"
] | 85 | 2015-04-02T10:25:19.000Z | 2021-01-30T21:30:43.000Z | defmodule ESpec.Configuration do
@moduledoc """
Handles ESpec configurations.
@list contains all available keys in config.
"""
@list [
hello: "Description",
before: "Defines before hook",
finally: "Defines finally hook",
silent: "No output",
file_opts: "Run the specific file or spec in the file",
formatters: "Specifies formatters list",
trace: "Detailed output",
out_file: "Output file",
focus: "Run only examples with [focus: true]",
order: "Run specs in the order in which they are declared",
sync: "Ignore async tag and run all specs synchronously",
only: "Run only tests that match the filter",
exclude: "Exclude tests that match the filter",
string: "Run only examples whose full nested descriptions contain string",
seed: "Seeds the random number generator used to randomize tests order",
test: "For test purpose",
shared_specs: "The shared spec files to include",
start_loading_time: "Starts loading files",
finish_loading_time: "Finished loading",
finish_specs_time: "Finished specs",
formatters_timeout:
"How long to wait for the formatters to " <>
"finish formatting (defaults to the GenServer call timeout)",
stale:
"Run only those test files which reference modules that have changed since the last time you ran this task"
]
@doc """
Accepts a keyword of options.
Puts options into application environment.
Allows only whitelisted options.
"""
def add(opts) do
opts
|> Enum.each(fn {key, val} ->
if Enum.member?(Keyword.keys(@list), key) do
Application.put_env(:espec, key, val)
end
end)
end
@doc "Returns the value associated with key."
def get(key), do: Application.get_env(:espec, key)
@doc "Returns all options."
def all, do: Application.get_all_env(:espec)
@doc """
Allows to set the config options.
See `ESpec.configure/1`.
"""
def configure(func), do: func.(ESpec.Configuration)
Keyword.keys(@list)
|> Enum.each(fn func ->
def unquote(func)(value) do
ESpec.Configuration.add([{unquote(func), value}])
end
end)
end
| 32 | 113 | 0.680504 |
731c8b19846088dd3b96e8d32fe8017250fe8a13 | 1,630 | ex | Elixir | lib/ex_oneroster/web/controllers/academic_session_controller.ex | jrissler/ex_oneroster | cec492117bffc14aec91e2448643682ceeb449e9 | [
"Apache-2.0"
] | 3 | 2018-09-06T11:15:07.000Z | 2021-12-27T15:36:51.000Z | lib/ex_oneroster/web/controllers/academic_session_controller.ex | jrissler/ex_oneroster | cec492117bffc14aec91e2448643682ceeb449e9 | [
"Apache-2.0"
] | null | null | null | lib/ex_oneroster/web/controllers/academic_session_controller.ex | jrissler/ex_oneroster | cec492117bffc14aec91e2448643682ceeb449e9 | [
"Apache-2.0"
] | null | null | null | defmodule ExOneroster.Web.AcademicSessionController do
use ExOneroster.Web, :controller
alias ExOneroster.AcademicSessions
alias ExOneroster.AcademicSessions.AcademicSession
action_fallback ExOneroster.Web.FallbackController
def index(conn, _params) do
academic_sessions = AcademicSessions.list_academic_sessions()
render(conn, "index.json", academic_sessions: academic_sessions)
end
def create(conn, %{"academic_session" => academic_session_params}) do
with {:ok, %AcademicSession{} = academic_session} <- AcademicSessions.create_academic_session(academic_session_params) do
conn
|> put_status(:created)
|> put_resp_header("location", academic_session_path(conn, :show, academic_session))
|> render("show.json", academic_session: academic_session)
end
end
def show(conn, %{"id" => id}) do
academic_session = AcademicSessions.get_academic_session!(id)
render(conn, "show.json", academic_session: academic_session)
end
def update(conn, %{"id" => id, "academic_session" => academic_session_params}) do
academic_session = AcademicSessions.get_academic_session!(id)
with {:ok, %AcademicSession{} = academic_session} <- AcademicSessions.update_academic_session(academic_session, academic_session_params) do
render(conn, "show.json", academic_session: academic_session)
end
end
def delete(conn, %{"id" => id}) do
academic_session = AcademicSessions.get_academic_session!(id)
with {:ok, %AcademicSession{}} <- AcademicSessions.delete_academic_session(academic_session) do
send_resp(conn, :no_content, "")
end
end
end
| 37.906977 | 143 | 0.750307 |
731c925be9ac3e9836444bcce9a59384277c288a | 5,023 | ex | Elixir | lib/membrane/rtp/payload_format.ex | membraneframework/membrane_rtp_format | f3749da8e552dce1dc7ad7f24ed0708609ea84a2 | [
"Apache-2.0"
] | null | null | null | lib/membrane/rtp/payload_format.ex | membraneframework/membrane_rtp_format | f3749da8e552dce1dc7ad7f24ed0708609ea84a2 | [
"Apache-2.0"
] | 3 | 2020-11-20T16:19:42.000Z | 2021-05-21T07:40:32.000Z | lib/membrane/rtp/payload_format.ex | membraneframework/membrane_rtp_format | f3749da8e552dce1dc7ad7f24ed0708609ea84a2 | [
"Apache-2.0"
] | null | null | null | defmodule Membrane.RTP.PayloadFormat do
@moduledoc """
This module contains utilities for resolving RTP default payload types,
encoding names, clock rates and (de)payloaders.
"""
alias Membrane.RTP
require Membrane.RTP
@app :membrane_rtp_format
@format_env :__membrane_format
@pt_env :__membrane_payload_type_mapping
@payload_types_specs %{
0 => %{encoding_name: :PCMU, clock_rate: 8000},
3 => %{encoding_name: :GSM, clock_rate: 8000},
4 => %{encoding_name: :G732, clock_rate: 8000},
5 => %{encoding_name: :DVI4, clock_rate: 8000},
6 => %{encoding_name: :DVI4, clock_rate: 16000},
7 => %{encoding_name: :LPC, clock_rate: 8000},
8 => %{encoding_name: :PCMA, clock_rate: 8000},
9 => %{encoding_name: :G722, clock_rate: 8000},
10 => %{encoding_name: :L16, clock_rate: 44100},
11 => %{encoding_name: :L16, clock_rate: 44100},
12 => %{encoding_name: :QCELP, clock_rate: 8000},
13 => %{encoding_name: :CN, clock_rate: 8000},
14 => %{encoding_name: :MPA, clock_rate: 90000},
15 => %{encoding_name: :G728, clock_rate: 8000},
16 => %{encoding_name: :DVI4, clock_rate: 11025},
17 => %{encoding_name: :DVI4, clock_rate: 22050},
18 => %{encoding_name: :G729, clock_rate: 8000},
25 => %{encoding_name: :CELB, clock_rate: 90000},
26 => %{encoding_name: :JPEG, clock_rate: 90000},
28 => %{encoding_name: :NV, clock_rate: 90000},
31 => %{encoding_name: :H261, clock_rate: 90000},
32 => %{encoding_name: :MPV, clock_rate: 90000},
33 => %{encoding_name: :MP2T, clock_rate: 90000},
34 => %{encoding_name: :H263, clock_rate: 90000}
}
@enforce_keys [:encoding_name]
defstruct @enforce_keys ++ [payload_type: nil, payloader: nil, depayloader: nil]
@type t :: %__MODULE__{
encoding_name: RTP.encoding_name_t(),
payload_type: RTP.payload_type_t() | nil,
payloader: module | nil,
depayloader: module | nil
}
@doc false
@spec register_static_formats() :: :ok
def register_static_formats() do
@payload_types_specs
|> Enum.group_by(fn {_pt, specs} -> specs.encoding_name end, fn {pt, _specs} -> pt end)
|> Enum.each(fn
{name, [pt]} -> register(%__MODULE__{encoding_name: name, payload_type: pt})
_ambiguous -> :ok
end)
end
@doc """
Returns encoding name and clock rate for given payload type, if registered.
"""
@spec get_payload_type_mapping(RTP.payload_type_t()) :: %{
optional(:encoding_name) => RTP.encoding_name_t(),
optional(:clock_rate) => RTP.clock_rate_t()
}
def get_payload_type_mapping(payload_type) when RTP.is_payload_type_static(payload_type) do
Map.fetch!(@payload_types_specs, payload_type)
end
def get_payload_type_mapping(payload_type) when RTP.is_payload_type_dynamic(payload_type) do
get_env(@pt_env, payload_type, %{})
end
@doc """
Registers default encoding name and clock rate for a dynamic payload_type
"""
@spec register_payload_type_mapping(
RTP.dynamic_payload_type_t(),
RTP.encoding_name_t(),
RTP.clock_rate_t()
) :: :ok | no_return()
def register_payload_type_mapping(payload_type, encoding_name, clock_rate)
when RTP.is_payload_type_dynamic(payload_type) do
case fetch_env(@pt_env, payload_type) do
{:ok, payload_format} ->
raise "RTP payload type #{payload_type} already registered: #{inspect(payload_format)}"
:error ->
put_env(@pt_env, payload_type, %{encoding_name: encoding_name, clock_rate: clock_rate})
end
end
@doc """
Returns payload format registered for given encoding name.
"""
@spec get(RTP.encoding_name_t()) :: t
def get(encoding_name) do
get_env(@format_env, encoding_name, %__MODULE__{encoding_name: encoding_name})
end
@doc """
Registers payload format.
Raises if some payload format field was already registered and set to different value.
"""
@spec register(t) :: :ok | no_return
def register(%__MODULE__{encoding_name: encoding_name} = payload_format) do
payload_format =
get_env(@format_env, encoding_name, %{})
|> Map.merge(payload_format, &merge_format(encoding_name, &1, &2, &3))
put_env(@format_env, encoding_name, payload_format)
end
defp merge_format(_name, _k, nil, v), do: v
defp merge_format(_name, _k, v, nil), do: v
defp merge_format(_name, _k, v, v), do: v
defp merge_format(name, k, v1, v2) do
raise "Cannot register RTP payload format #{name} field #{k} to #{inspect(v2)}, " <>
"already registered to #{inspect(v1)}."
end
defp put_env(env_key, key, value) do
env = Application.get_env(@app, env_key, %{})
Application.put_env(@app, env_key, Map.put(env, key, value))
end
defp fetch_env(env_key, key) do
Application.get_env(@app, env_key, %{}) |> Map.fetch(key)
end
def get_env(env_key, key, default \\ nil) do
case fetch_env(env_key, key) do
{:ok, value} -> value
:error -> default
end
end
end
| 34.881944 | 95 | 0.667728 |
731ca15494b86b9a63ac2de0321ad28419a11e03 | 118 | exs | Elixir | test/tetris_test.exs | pkhodakovsky/elixir-tetris-game | 90b4cf9b012e835eab370177b2dc532c6632ce17 | [
"MIT"
] | null | null | null | test/tetris_test.exs | pkhodakovsky/elixir-tetris-game | 90b4cf9b012e835eab370177b2dc532c6632ce17 | [
"MIT"
] | null | null | null | test/tetris_test.exs | pkhodakovsky/elixir-tetris-game | 90b4cf9b012e835eab370177b2dc532c6632ce17 | [
"MIT"
] | null | null | null | defmodule TetrisTest do
use ExUnit.Case
test "greets the world" do
assert Tetris.hello() == :world
end
end
| 14.75 | 35 | 0.694915 |
731cda730c9389108f8e59043b14aac968cf4d8e | 1,497 | ex | Elixir | clients/sheets/lib/google_api/sheets/v4/model/chart_data.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/sheets/lib/google_api/sheets/v4/model/chart_data.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/sheets/lib/google_api/sheets/v4/model/chart_data.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Sheets.V4.Model.ChartData do
@moduledoc """
The data included in a domain or series.
## Attributes
* `sourceRange` (*type:* `GoogleApi.Sheets.V4.Model.ChartSourceRange.t`, *default:* `nil`) - The source ranges of the data.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:sourceRange => GoogleApi.Sheets.V4.Model.ChartSourceRange.t()
}
field(:sourceRange, as: GoogleApi.Sheets.V4.Model.ChartSourceRange)
end
defimpl Poison.Decoder, for: GoogleApi.Sheets.V4.Model.ChartData do
def decode(value, options) do
GoogleApi.Sheets.V4.Model.ChartData.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Sheets.V4.Model.ChartData do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.851064 | 127 | 0.740147 |
731cdf2788932b8e672f0ec57e05e40cf5e04cd0 | 387 | ex | Elixir | lib/cineplex_web/plugs/health.ex | upmaru/cineplex | 7d1d516d3e3d3683b2ad4425b61517a8f556f721 | [
"MIT"
] | null | null | null | lib/cineplex_web/plugs/health.ex | upmaru/cineplex | 7d1d516d3e3d3683b2ad4425b61517a8f556f721 | [
"MIT"
] | null | null | null | lib/cineplex_web/plugs/health.ex | upmaru/cineplex | 7d1d516d3e3d3683b2ad4425b61517a8f556f721 | [
"MIT"
] | null | null | null | defmodule CineplexWeb.Plugs.Health do
import Plug.Conn
@spec init(any()) :: any()
def init(options), do: options
@spec call(Plug.Conn.t(), any()) :: Plug.Conn.t()
def call(%{request_path: "/health"} = conn, _opts) do
Appsignal.Transaction.set_action("GET /health")
conn
|> send_resp(:ok, "we're good!")
|> halt()
end
def call(conn, _opts), do: conn
end
| 21.5 | 55 | 0.627907 |
731d0d78101a7d71f120593f1e16eb8a73bf7360 | 2,015 | ex | Elixir | clients/firestore/lib/google_api/firestore/v1/model/filter.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/firestore/lib/google_api/firestore/v1/model/filter.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/firestore/lib/google_api/firestore/v1/model/filter.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Firestore.V1.Model.Filter do
@moduledoc """
A filter.
## Attributes
* `compositeFilter` (*type:* `GoogleApi.Firestore.V1.Model.CompositeFilter.t`, *default:* `nil`) - A composite filter.
* `fieldFilter` (*type:* `GoogleApi.Firestore.V1.Model.FieldFilter.t`, *default:* `nil`) - A filter on a document field.
* `unaryFilter` (*type:* `GoogleApi.Firestore.V1.Model.UnaryFilter.t`, *default:* `nil`) - A filter that takes exactly one argument.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:compositeFilter => GoogleApi.Firestore.V1.Model.CompositeFilter.t(),
:fieldFilter => GoogleApi.Firestore.V1.Model.FieldFilter.t(),
:unaryFilter => GoogleApi.Firestore.V1.Model.UnaryFilter.t()
}
field(:compositeFilter, as: GoogleApi.Firestore.V1.Model.CompositeFilter)
field(:fieldFilter, as: GoogleApi.Firestore.V1.Model.FieldFilter)
field(:unaryFilter, as: GoogleApi.Firestore.V1.Model.UnaryFilter)
end
defimpl Poison.Decoder, for: GoogleApi.Firestore.V1.Model.Filter do
def decode(value, options) do
GoogleApi.Firestore.V1.Model.Filter.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Firestore.V1.Model.Filter do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.018868 | 136 | 0.733499 |
731d26d90ec36bf39a42e9cb2d68b84adc4f35ee | 1,119 | exs | Elixir | config/config.exs | slickcoach/filesize | 6f6a6bdd2a1ac75e0b5b2c408f7bac35a1919b98 | [
"MIT"
] | null | null | null | config/config.exs | slickcoach/filesize | 6f6a6bdd2a1ac75e0b5b2c408f7bac35a1919b98 | [
"MIT"
] | null | null | null | config/config.exs | slickcoach/filesize | 6f6a6bdd2a1ac75e0b5b2c408f7bac35a1919b98 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :filesize, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:filesize, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.096774 | 73 | 0.751564 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.