hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9eeffd1212fba84366cb3f8b71aefc0244d8f103 | 43 | exs | Elixir | elixir/86.exs | merxer/kata | 5dbbca8b4173029f9311398148de9437a329cf9a | [
"MIT"
] | null | null | null | elixir/86.exs | merxer/kata | 5dbbca8b4173029f9311398148de9437a329cf9a | [
"MIT"
] | null | null | null | elixir/86.exs | merxer/kata | 5dbbca8b4173029f9311398148de9437a329cf9a | [
"MIT"
] | null | null | null | square = &(&1 * &1)
square.(8) |> IO.puts
| 10.75 | 21 | 0.488372 |
9ef03f6c64c7dccffa6e9fa030211701eb014f76 | 763 | exs | Elixir | mix.exs | hirokazumiyaji/dynamodb | f8a3fdc64b954aae99e4281fc28c8ca6886e5876 | [
"MIT"
] | 1 | 2017-02-01T12:42:45.000Z | 2017-02-01T12:42:45.000Z | mix.exs | hirokazumiyaji/dynamodb-elixir | f8a3fdc64b954aae99e4281fc28c8ca6886e5876 | [
"MIT"
] | null | null | null | mix.exs | hirokazumiyaji/dynamodb-elixir | f8a3fdc64b954aae99e4281fc28c8ca6886e5876 | [
"MIT"
] | null | null | null | defmodule DynamoDB.Mixfile do
use Mix.Project
def project do
[app: :dynamodb,
version: "0.0.1",
elixir: "~> 1.0",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps]
end
# Configuration for the OTP application
#
# Type `mix help compile.app` for more information
def application do
[applications: [:logger, :httpoison]]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type `mix help deps` for more examples and options
defp deps do
[{:poison, "~> 1.4.0"},
{:httpoison, "~> 0.7.0"},
{:aws_auth, "~> 0.2.0"}]
end
end
| 21.8 | 77 | 0.588467 |
9ef04810aee367db0a2574c4dbe902ffedf58938 | 243 | ex | Elixir | lib/jerry/utils/macros.ex | nroi/jerry | 77b7e1371f5323a2e8956d58e7a16e458d2c6170 | [
"MIT"
] | 1 | 2018-01-25T22:43:19.000Z | 2018-01-25T22:43:19.000Z | lib/jerry/utils/macros.ex | nroi/jerry | 77b7e1371f5323a2e8956d58e7a16e458d2c6170 | [
"MIT"
] | 6 | 2017-09-17T11:33:24.000Z | 2017-10-08T21:16:26.000Z | lib/jerry/utils/macros.ex | nroi/jerry | 77b7e1371f5323a2e8956d58e7a16e458d2c6170 | [
"MIT"
] | null | null | null | defmodule Jerry.Utils.Macros do
@moduledoc false
defmacro source(regex) do
quote do
Regex.source(unquote(regex))
end
end
defmacro compile(string) do
quote do
Regex.compile!(unquote(string))
end
end
end
| 14.294118 | 37 | 0.666667 |
9ef05df8cb80455e019b80d30be76e329ba93c47 | 1,882 | ex | Elixir | lib/oli/delivery/evaluation/evaluator.ex | chrislawson/oli-torus | 94165b211ab74fac3e7c8a14110a394fa9a6f320 | [
"MIT"
] | 45 | 2020-04-17T15:40:27.000Z | 2022-03-25T00:13:30.000Z | lib/oli/delivery/evaluation/evaluator.ex | chrislawson/oli-torus | 94165b211ab74fac3e7c8a14110a394fa9a6f320 | [
"MIT"
] | 944 | 2020-02-13T02:37:01.000Z | 2022-03-31T17:50:07.000Z | lib/oli/delivery/evaluation/evaluator.ex | chrislawson/oli-torus | 94165b211ab74fac3e7c8a14110a394fa9a6f320 | [
"MIT"
] | 23 | 2020-07-28T03:36:13.000Z | 2022-03-17T14:29:02.000Z | defmodule Oli.Delivery.Evaluation.Evaluator do
alias Oli.Delivery.Evaluation.{EvaluationContext, Result}
alias Oli.Activities.Model.{Part, Response}
alias Oli.Delivery.Evaluation.Rule
alias Oli.Activities.ParseUtils
@doc """
Evaluates a student input for a given activity part. In a successful
evaluation, returns the feedback and a scoring result.
"""
def evaluate(%Part{} = part, %EvaluationContext{} = context) do
case Enum.reduce(part.responses, {context, nil, 0, 0}, &consider_response/2) do
{_, %Response{feedback: feedback, score: score}, _, out_of} ->
{:ok, {feedback, %Result{score: score, out_of: out_of}}}
# No matching response found - mark incorrect
{_, nil, _, out_of} ->
# this guarantees that all activities, even unanswered client-side
# evaluated ones, that have no matching responses get 0 out of
# a non-zero maximum value
adjusted_out_of =
if out_of == 0 do
1
else
out_of
end
{:ok,
{ParseUtils.default_content_item("Incorrect"),
%Result{score: 0, out_of: adjusted_out_of}}}
_ ->
{:error, "Error in evaluation"}
end
end
# Consider one response
defp consider_response(
%Response{score: score, rule: rule} = current,
{context, best_response, best_score, out_of}
) do
# Track the highest point value out of all responses
out_of =
case score > out_of do
true -> score
false -> out_of
end
matches =
case Rule.parse_and_evaluate(rule, context) do
{:ok, result} -> result
{:error, _} -> false
end
if matches and (best_score < score or is_nil(best_response)) do
{context, current, score, out_of}
else
{context, best_response, best_score, out_of}
end
end
end
| 30.354839 | 83 | 0.628055 |
9ef062ed796a07bbdd8aa80f78349867a38356b6 | 289 | exs | Elixir | priv/repo/migrations/20190222013947_create_users.exs | AminArria/reserva | 13faef72927aeb84d456f080d65249b8ef50180e | [
"MIT"
] | null | null | null | priv/repo/migrations/20190222013947_create_users.exs | AminArria/reserva | 13faef72927aeb84d456f080d65249b8ef50180e | [
"MIT"
] | 2 | 2021-03-09T00:49:12.000Z | 2021-05-08T03:13:05.000Z | priv/repo/migrations/20190222013947_create_users.exs | AminArria/reserva | 13faef72927aeb84d456f080d65249b8ef50180e | [
"MIT"
] | null | null | null | defmodule Reserva.Repo.Migrations.CreateUsers do
use Ecto.Migration
def change do
create table(:users) do
add :usbid, :string
add :type, :string
add :name, :string
add :email, :string
add :phone_number, :string
timestamps()
end
end
end
| 17 | 48 | 0.629758 |
9ef065d9c56407b62fbe46490374cb0ed6a38485 | 305 | exs | Elixir | test/auth/auth_user_test.exs | shuv1824/potionx | a5888413b13a520d8ddf79fb26b7483e441737c3 | [
"MIT"
] | 31 | 2021-02-16T20:50:46.000Z | 2022-02-03T10:38:07.000Z | test/auth/auth_user_test.exs | shuv1824/potionx | a5888413b13a520d8ddf79fb26b7483e441737c3 | [
"MIT"
] | 6 | 2021-04-07T21:50:20.000Z | 2022-02-06T21:54:04.000Z | test/auth/auth_user_test.exs | shuv1824/potionx | a5888413b13a520d8ddf79fb26b7483e441737c3 | [
"MIT"
] | 4 | 2021-03-25T17:59:44.000Z | 2021-04-25T16:28:22.000Z | defmodule Potionx.Auth.UserTest do
use Potionx.ConnCase
test "test user json gets decoded into a user struct" do
json =
Jason.encode!(%PotionxTest.User{id: 1, roles: [:admin]})
|> Jason.decode!
assert %PotionxTest.User{roles: [:admin]} = PotionxTest.User.from_json(json)
end
end | 30.5 | 80 | 0.691803 |
9ef07b00b0b55812782608b6485caa839ba68105 | 407 | ex | Elixir | lib/infinity_one_web/channel.ex | smpallen99/ucx_ucc | 47225f205a6ac4aacdb9bb4f7512dcf4092576ad | [
"MIT"
] | 11 | 2017-05-15T18:35:05.000Z | 2018-02-05T18:27:40.000Z | lib/infinity_one_web/channel.ex | anndream/infinity_one | 47225f205a6ac4aacdb9bb4f7512dcf4092576ad | [
"MIT"
] | 15 | 2017-11-27T10:38:05.000Z | 2018-02-09T20:42:08.000Z | lib/infinity_one_web/channel.ex | anndream/infinity_one | 47225f205a6ac4aacdb9bb4f7512dcf4092576ad | [
"MIT"
] | 4 | 2017-09-13T11:34:16.000Z | 2018-02-26T13:37:06.000Z | defmodule InfinityOneWeb.Channel do
@moduledoc """
Channel helpers
"""
@type socket :: Phoenix.Socket.t
@doc """
Return the :noreply tuple
"""
@spec noreply(socket) :: {:noreply, socket}
def noreply(socket), do: {:noreply, socket}
@doc """
Return the reply tuple
"""
@spec reply(any, socket) :: {:reply, any, socket}
def reply(reply, socket), do: {:reply, reply, socket}
end
| 19.380952 | 55 | 0.633907 |
9ef09892225af11230ed4b54e799bbbd05837e56 | 778 | ex | Elixir | apps/artemis/lib/artemis/schemas/reaction.ex | artemis-platform/artemis_teams | 9930c3d9528e37b76f0525390e32b66eed7eadde | [
"MIT"
] | 2 | 2020-04-23T02:29:18.000Z | 2020-07-07T13:13:17.000Z | apps/artemis/lib/artemis/schemas/reaction.ex | chrislaskey/artemis_teams | 9930c3d9528e37b76f0525390e32b66eed7eadde | [
"MIT"
] | 4 | 2020-04-26T20:35:36.000Z | 2020-11-10T22:13:19.000Z | apps/artemis/lib/artemis/schemas/reaction.ex | chrislaskey/artemis_teams | 9930c3d9528e37b76f0525390e32b66eed7eadde | [
"MIT"
] | null | null | null | defmodule Artemis.Reaction do
use Artemis.Schema
use Artemis.Schema.SQL
schema "reactions" do
field :resource_id, :string
field :resource_type, :string
field :value, :string
belongs_to :user, Artemis.User
timestamps()
end
# Callbacks
def updatable_fields,
do: [
:resource_id,
:resource_type,
:value,
:user_id
]
def required_fields,
do: [
:resource_id,
:resource_type,
:value,
:user_id
]
def event_log_fields,
do: [
:id,
:resource_id,
:resource_type,
:value,
:user_id
]
# Changesets
def changeset(struct, params \\ %{}) do
struct
|> cast(params, updatable_fields())
|> validate_required(required_fields())
end
end
| 15.56 | 43 | 0.597686 |
9ef0de75c9a8933f0febfd2fa2962e6035f0397a | 2,108 | ex | Elixir | clients/content/lib/google_api/content/v2/model/datafeed_status_error.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/content/lib/google_api/content/v2/model/datafeed_status_error.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/content/lib/google_api/content/v2/model/datafeed_status_error.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V2.Model.DatafeedStatusError do
@moduledoc """
An error occurring in the feed, like "invalid price".
## Attributes
* `code` (*type:* `String.t`, *default:* `nil`) - The code of the error, e.g., "validation/invalid_value".
* `count` (*type:* `String.t`, *default:* `nil`) - The number of occurrences of the error in the feed.
* `examples` (*type:* `list(GoogleApi.Content.V2.Model.DatafeedStatusExample.t)`, *default:* `nil`) - A list of example occurrences of the error, grouped by product.
* `message` (*type:* `String.t`, *default:* `nil`) - The error message, e.g., "Invalid price".
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:code => String.t() | nil,
:count => String.t() | nil,
:examples => list(GoogleApi.Content.V2.Model.DatafeedStatusExample.t()) | nil,
:message => String.t() | nil
}
field(:code)
field(:count)
field(:examples, as: GoogleApi.Content.V2.Model.DatafeedStatusExample, type: :list)
field(:message)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.DatafeedStatusError do
def decode(value, options) do
GoogleApi.Content.V2.Model.DatafeedStatusError.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.DatafeedStatusError do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.642857 | 169 | 0.703985 |
9ef0f4ecb41575dfe82c708a3a82b7ba8749b6c8 | 973 | ex | Elixir | generated_code_examples/elixir/regression/linear.ex | lucasavila00/m2cgen | 4f41ce60cf7f5a6f198d0adc43201f9e5a5aedeb | [
"MIT"
] | null | null | null | generated_code_examples/elixir/regression/linear.ex | lucasavila00/m2cgen | 4f41ce60cf7f5a6f198d0adc43201f9e5a5aedeb | [
"MIT"
] | null | null | null | generated_code_examples/elixir/regression/linear.ex | lucasavila00/m2cgen | 4f41ce60cf7f5a6f198d0adc43201f9e5a5aedeb | [
"MIT"
] | null | null | null | defmodule Model do
@compile {:inline, read: 2}
defp read(bin, pos) do
<<_::size(pos)-unit(64)-binary, value::float, _::binary>> = bin
value
end
defp list_to_binary(list) do
for i <- list, into: <<>>, do: <<i::float>>
end
def score(input) do
input = list_to_binary(input)
(((((((((((((36.367080746577244) + ((read(input,0)) * (-0.10861311354908008))) + ((read(input,1)) * (0.046461486329936456))) + ((read(input,2)) * (0.027432259970172148))) + ((read(input,3)) * (2.6160671309537777))) + ((read(input,4)) * (-17.51793656329737))) + ((read(input,5)) * (3.7674418196772255))) + ((read(input,6)) * (-0.000021581753164971046))) + ((read(input,7)) * (-1.4711768622633645))) + ((read(input,8)) * (0.2956767140062958))) + ((read(input,9)) * (-0.012233831527259383))) + ((read(input,10)) * (-0.9220356453705304))) + ((read(input,11)) * (0.009038220462695552))) + ((read(input,12)) * (-0.542583033714222))
end
end
| 64.866667 | 633 | 0.5889 |
9ef16ea02d16363cecfce1688c1c4a8c04104746 | 307 | ex | Elixir | lib/messaging_status_service/error_sink_behaviour.ex | ciroque/messaging_status_service | 0d32873ac6e0a78c92a5cf08da373ba4aaf22da4 | [
"MIT"
] | null | null | null | lib/messaging_status_service/error_sink_behaviour.ex | ciroque/messaging_status_service | 0d32873ac6e0a78c92a5cf08da373ba4aaf22da4 | [
"MIT"
] | null | null | null | lib/messaging_status_service/error_sink_behaviour.ex | ciroque/messaging_status_service | 0d32873ac6e0a78c92a5cf08da373ba4aaf22da4 | [
"MIT"
] | null | null | null | defmodule MessagingStatusService.Calls.ErrorSinkBehaviour do
@type message :: IO.chardata() | String.Chars.t()
@type metadata :: keyword(String.Chars.t())
@type message_t :: (() -> message) | {message, keyword}
@callback error(message_t) :: any()
@callback error(message_t, metadata) :: any()
end
| 38.375 | 60 | 0.700326 |
9ef18700a281ad2572be0926f571d139477c45d0 | 1,103 | ex | Elixir | lib/hexdocs/store/impl.ex | hexpm/hexdocs | 808d0b06bbd1a36c639231225ed78cec8c182cae | [
"Apache-2.0"
] | 29 | 2016-10-13T07:32:04.000Z | 2022-03-25T05:59:14.000Z | lib/hexdocs/store/impl.ex | hexpm/hexdocs | 808d0b06bbd1a36c639231225ed78cec8c182cae | [
"Apache-2.0"
] | 21 | 2016-02-24T03:59:35.000Z | 2021-07-30T08:44:39.000Z | lib/hexdocs/store/impl.ex | hexpm/hexdocs | 808d0b06bbd1a36c639231225ed78cec8c182cae | [
"Apache-2.0"
] | 12 | 2016-02-28T19:57:36.000Z | 2022-03-18T07:47:39.000Z | defmodule Hexdocs.Store.Impl do
@behaviour Hexdocs.Store.Repo
@behaviour Hexdocs.Store.Docs
def list(bucket, prefix) do
{impl, name} = bucket(bucket)
impl.list(name, prefix)
end
def get(bucket, key, opts) do
{impl, name} = bucket(bucket)
impl.get(name, key, opts)
end
def head_page(bucket, key, opts) do
{impl, name} = bucket(bucket)
impl.head_page(name, key, opts)
end
def get_page(bucket, key, opts) do
{impl, name} = bucket(bucket)
impl.get_page(name, key, opts)
end
def stream_page(bucket, key, opts) do
{impl, name} = bucket(bucket)
impl.stream_page(name, key, opts)
end
def put(bucket, key, body, opts) do
{impl, name} = bucket(bucket)
impl.put(name, key, body, opts)
end
def put!(bucket, key, body, opts) do
{impl, name} = bucket(bucket)
impl.put!(name, key, body, opts)
end
def delete_many(bucket, keys) do
{impl, name} = bucket(bucket)
impl.delete_many(name, keys)
end
defp bucket(key) do
env = Application.get_env(:hexdocs, key)
{env[:implementation], env[:name]}
end
end
| 22.06 | 44 | 0.646419 |
9ef1c6c1bc69e347f191bb23aea83b4f47e169e5 | 666 | ex | Elixir | lib/andy/actuation/actuator_config.ex | jfcloutier/andy | 74b93f734d6f6353356041a603a96ad5aed4b5dc | [
"MIT"
] | 7 | 2019-05-29T22:55:25.000Z | 2021-08-22T18:38:29.000Z | lib/andy/actuation/actuator_config.ex | jfcloutier/andy | 74b93f734d6f6353356041a603a96ad5aed4b5dc | [
"MIT"
] | null | null | null | lib/andy/actuation/actuator_config.ex | jfcloutier/andy | 74b93f734d6f6353356041a603a96ad5aed4b5dc | [
"MIT"
] | 1 | 2020-01-25T20:46:43.000Z | 2020-01-25T20:46:43.000Z | defmodule Andy.ActuatorConfig do
@moduledoc "An actuator's configuration"
defstruct name: nil, type: nil, specs: nil, activations: nil, intents: nil
@doc "Make a new actuator conf"
def new(name: name, type: type, specs: specs, activations: activations) do
config = %Andy.ActuatorConfig{name: name, type: type, specs: specs, activations: activations}
%Andy.ActuatorConfig{config | intents: intent_names(config.activations)}
end
defp intent_names(activations) do
set =
Enum.reduce(
activations,
MapSet.new(),
fn activation, acc -> MapSet.put(acc, activation.intent) end
)
Enum.to_list(set)
end
end
| 28.956522 | 97 | 0.689189 |
9ef1cb2350d65860d9ce4c7319ee724596104659 | 45,322 | ex | Elixir | lib/ecto/schema.ex | mschae/ecto | 00f85444c4f61080617179232c0d528381de5ec3 | [
"Apache-2.0"
] | null | null | null | lib/ecto/schema.ex | mschae/ecto | 00f85444c4f61080617179232c0d528381de5ec3 | [
"Apache-2.0"
] | null | null | null | lib/ecto/schema.ex | mschae/ecto | 00f85444c4f61080617179232c0d528381de5ec3 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Schema do
@moduledoc ~S"""
Defines a schema for a model.
A schema is a struct with associated metadata that is persisted to a
repository. Every schema model is also a struct, that means that you work
with models just like you would work with structs.
## Example
defmodule User do
use Ecto.Schema
schema "users" do
field :name, :string
field :age, :integer, default: 0
has_many :posts, Post
end
end
By default, a schema will generate a primary key named `id`
of type `:integer` and `belongs_to` associations in the schema will generate
foreign keys of type `:integer`. Those setting can be configured
below.
## Schema attributes
The schema supports some attributes to be set before hand,
configuring the defined schema.
Those attributes are:
* `@primary_key` - configures the schema primary key. It expects
a tuple with the primary key name, type (:id or :binary_id) and options. Defaults
to `{:id, :id, autogenerate: true}`. When set to
false, does not define a primary key in the model;
* `@foreign_key_type` - configures the default foreign key type
used by `belongs_to` associations. Defaults to `:integer`;
* `@timestamps_opts` - configures the default timestamps type
used by `timestamps`. Defaults to `[type: Ecto.DateTime, usec: false]`;
* `@derive` - the same as `@derive` available in `Kernel.defstruct/1`
as the schema defines a struct behind the scenes;
The advantage of configuring the schema via those attributes is
that they can be set with a macro to configure application wide
defaults.
For example, if your database does not support autoincrementing
primary keys and requires something like UUID or a RecordID, you
configure and use`:binary_id` as your primary key type as follows:
# Define a module to be used as base
defmodule MyApp.Model do
defmacro __using__(_) do
quote do
use Ecto.Model
@primary_key {:id, :binary_id, autogenerate: true}
@foreign_key_type :binary_id
end
end
end
# Now use MyApp.Model to define new models
defmodule MyApp.Comment do
use MyApp.Model
schema "comments" do
belongs_to :post, MyApp.Post
end
end
Any models using `MyApp.Model` will get the `:id` field with type
`:binary_id` as primary key. We explain what the `:binary_id` type
entails in the next section.
The `belongs_to` association on `MyApp.Comment` will also define
a `:post_id` field with `:binary_id` type that references the `:id`
field of the `MyApp.Post` model.
## Primary keys
Ecto supports two ID types, called `:id` and `:binary_id` which are
often used as the type for primary keys and associations.
The `:id` type is used when the primary key is an integer while the
`:binary_id` is used when the primary key is in binary format, which
may be `Ecto.UUID` for databases like PostgreSQL and MySQL, or some
specific ObjectID or RecordID often imposed by NoSQL databases.
In both cases, both types have their semantics specified by the
underlying adapter/database. For example, if you use the `:id`
type with `:autogenerate`, it means the database will be responsible
for auto-generation the id if it supports it.
Similarly, the `:binary_id` type may be generated in the adapter
for cases like UUID but it may also be handled by the database if
required. In any case, both scenarios are handled transparently by
Ecto.
Besides `:id` and `:binary_id`, which are often used by primary
and foreign keys, Ecto provides a huge variety of types to be used
by the remaining columns.
## Types and casting
When defining the schema, types need to be given. Types are split
in two categories, primitive types and custom types.
### Primitive types
The primitive types are:
Ecto type | Elixir type | Literal syntax in query
:---------------------- | :---------------------- | :---------------------
`:id` | `integer` | 1, 2, 3
`:binary_id` | `binary` | `<<int, int, int, ...>>`
`:integer` | `integer` | 1, 2, 3
`:float` | `float` | 1.0, 2.0, 3.0
`:boolean` | `boolean` | true, false
`:string` | UTF-8 encoded `string` | "hello"
`:binary` | `binary` | `<<int, int, int, ...>>`
`{:array, inner_type}` | `list` | `[value, value, value, ...]`
`:decimal` | [`Decimal`](https://github.com/ericmj/decimal) |
`:map` | `map` |
**Note:** For the `:array` type, replace `inner_type` with one of
the valid types, such as `:string`.
### Custom types
Besides providing primitive types, Ecto allows custom types to be
implemented by developers, allowing Ecto behaviour to be extended.
A custom type is a module that implements the `Ecto.Type` behaviour.
By default, Ecto provides the following custom types:
Custom type | Database type | Elixir type
:---------------------- | :---------------------- | :---------------------
`Ecto.DateTime` | `:datetime` | `%Ecto.DateTime{}`
`Ecto.Date` | `:date` | `%Ecto.Date{}`
`Ecto.Time` | `:time` | `%Ecto.Time{}`
`Ecto.UUID` | `:uuid` | "uuid-string"
Read the `Ecto.Type` documentation for more information on implementing
your own types.
### The map type
The map type allows developers to store an Elixir map directly
in the database:
# In your migration
create table(:users) do
add :data, :map
end
# In your model
field :data, :map
# Now in your code
%User{data: %{"foo" => "bar"}} |> Repo.insert!
%User{data: %{"foo" => value}} = Repo.one(User)
value #=> "bar"
Keep in mind that we advise the map keys to be strings or integers
instead of atoms. Atoms may be accepted depending on how maps are
serialized but the database will always return atom keys as strings
due to security reasons.
In order to support maps, different databases may employ different
techniques. For example, PostgreSQL will store those values in jsonb
fields, allowing you to even query parts of it. MySQL and MSSQL, on
the other hand, do not yet provide a JSON type, so the value will be
stored in a text field.
For maps to work in such databases, Ecto will need a JSON library.
By default Ecto will use [Poison](http://github.com/devinus/poison)
which needs to be added your deps in `mix.exs`:
{:poison, "~> 1.0"}
You can however tell Ecto to use any other library by configuring it:
config :ecto, :json_library, YourLibraryOfChoice
### Casting
When directly manipulating the struct, it is the responsibility of
the developer to ensure the field values have the proper type. For
example, you can create a user struct with an invalid value
for `age`:
iex> user = %User{age: "0"}
iex> user.age
"0"
However, if you attempt to persist the struct above, an error will
be raised since Ecto validates the types when sending them to the
adapter/database.
Therefore, when working and manipulating external data, it is
recommended the usage of `Ecto.Changeset`'s that are able to filter
and properly cast external data:
changeset = Ecto.Changeset.cast(%User{}, %{"age" => "0"}, [:age], [])
user = Repo.insert!(changeset)
In fact, `Ecto.Changeset` and custom types provide a powerful
combination to extend Ecto types and queries.
Finally, models can also have virtual fields by passing the
`virtual: true` option. These fields are not persisted to the database
and can optionally not be type checked by declaring type `:any`.
## Reflection
Any schema module will generate the `__schema__` function that can be
used for runtime introspection of the schema:
* `__schema__(:source)` - Returns the source as given to `schema/2`;
* `__schema__(:primary_key)` - Returns a list of the field that is the primary
key or [] if there is none;
* `__schema__(:fields)` - Returns a list of all non-virtual field names;
* `__schema__(:type, field)` - Returns the type of the given non-virtual field;
* `__schema__(:types)` - Returns a keyword list of all non-virtual
field names and their type;
* `__schema__(:associations)` - Returns a list of all association field names;
* `__schema__(:association, assoc)` - Returns the association reflection of the given assoc;
* `__schema__(:embeds)` - Returns a list of all embedded field names;
* `__schema__(:embed, embed)` - Returns the embedding reflection of the given embed;
* `__schema__(:read_after_writes)` - Non-virtual fields that must be read back
from the database after every write (insert or update);
* `__schema__(:autogenerate)` - Non-virtual fields that are auto generated on insert;
* `__schema__(:autogenerate_id)` - Primary key that is auto generated on insert;
Furthermore, both `__struct__` and `__changeset__` functions are
defined so structs and changeset functionalities are available.
"""
defmodule Metadata do
@moduledoc """
Stores metadata of a struct.
The fields are:
* `state` - the state in a struct's lifetime, one of `:built`,
`:loaded`, `:deleted`
* `source` - the source for the model alongside the query prefix,
defaults to `{nil, "source"}`
* `context` - context stored by the database
"""
defstruct [:state, :source, :context]
defimpl Inspect do
import Inspect.Algebra
def inspect(metadata, opts) do
concat ["#Ecto.Schema.Metadata<", to_doc(metadata.state, opts), ">"]
end
end
end
@doc false
defmacro __using__(_) do
quote do
import Ecto.Schema, only: [schema: 2, embedded_schema: 1]
@primary_key {:id, :id, autogenerate: true}
@timestamps_opts []
@foreign_key_type :id
@before_compile Ecto.Schema
Module.register_attribute(__MODULE__, :ecto_fields, accumulate: true)
Module.register_attribute(__MODULE__, :ecto_assocs, accumulate: true)
Module.register_attribute(__MODULE__, :ecto_embeds, accumulate: true)
Module.register_attribute(__MODULE__, :ecto_raw, accumulate: true)
Module.register_attribute(__MODULE__, :ecto_autogenerate, accumulate: true)
Module.put_attribute(__MODULE__, :ecto_autogenerate_id, nil)
end
end
@doc """
Defines an embedded schema.
This function is literally a shortcut for:
@primary_key {:id, :binary_id, autogenerate: true}
schema "embedded Model" do
"""
defmacro embedded_schema(opts) do
quote do
@primary_key {:id, :binary_id, autogenerate: true}
schema "embedded #{inspect __MODULE__}", unquote(opts)
end
end
@doc """
Defines a schema with a source name and field definitions.
"""
defmacro schema(source, [do: block]) do
quote do
source = unquote(source)
unless is_binary(source) do
raise ArgumentError, "schema source must be a string, got: #{inspect source}"
end
Module.register_attribute(__MODULE__, :changeset_fields, accumulate: true)
Module.register_attribute(__MODULE__, :struct_fields, accumulate: true)
Module.put_attribute(__MODULE__, :struct_fields,
{:__meta__, %Metadata{state: :built, source: {nil, source}}})
primary_key_fields =
case @primary_key do
false ->
[]
{name, type, opts} ->
Ecto.Schema.__field__(__MODULE__, name, type, true, opts)
[name]
other ->
raise ArgumentError, "@primary_key must be false or {name, type, opts}"
end
try do
import Ecto.Schema
unquote(block)
after
:ok
end
fields = @ecto_fields |> Enum.reverse
assocs = @ecto_assocs |> Enum.reverse
embeds = @ecto_embeds |> Enum.reverse
Module.eval_quoted __ENV__, [
Ecto.Schema.__defstruct__(@struct_fields),
Ecto.Schema.__changeset__(@changeset_fields),
Ecto.Schema.__schema__(source, fields, primary_key_fields),
Ecto.Schema.__types__(fields),
Ecto.Schema.__assocs__(assocs),
Ecto.Schema.__embeds__(embeds),
Ecto.Schema.__read_after_writes__(@ecto_raw),
Ecto.Schema.__autogenerate__(@ecto_autogenerate_id)]
end
end
## API
@doc """
Defines a field on the model schema with given name and type.
## Options
* `:default` - Sets the default value on the schema and the struct.
The default value is calculated at compilation time, so don't use
expressions like `Ecto.DateTime.local` or `Ecto.UUID.generate` as
they would then be the same for all records
* `:autogenerate` - Annotates the field to be autogenerated before
insertion if not value is set.
* `:read_after_writes` - When true, the field only sent on insert
if not nil and always read back from the repository during inserts
and updates.
For relational databases, this means the RETURNING option of those
statements are used. For this reason, MySQL does not support this
option and will raise an error if a model is inserted/updated with
read after writes fields.
* `:virtual` - When true, the field is not persisted to the database.
Notice virtual fields do not support `:autogenerate` nor
`:read_after_writes`.
"""
defmacro field(name, type \\ :string, opts \\ []) do
quote do
Ecto.Schema.__field__(__MODULE__, unquote(name), unquote(type), false, unquote(opts))
end
end
@doc """
Generates `:inserted_at` and `:updated_at` timestamp fields.
When using `Ecto.Model`, the fields generated by this macro
will automatically be set to the current time when inserting
and updating values in a repository.
## Options
* `:type` - the timestamps type, defaults to `Ecto.DateTime`.
* `:usec` - boolean, sets whether microseconds are used in timestamps.
Microseconds will be 0 if false. Defaults to false.
* `:inserted_at` - the name of the column for insertion times or `false`
* `:updated_at` - the name of the column for update times or `false`
All options can be pre-configured by setting `@timestamps_opts`.
"""
defmacro timestamps(opts \\ []) do
quote bind_quoted: binding do
timestamps =
[inserted_at: :inserted_at, updated_at: :updated_at,
type: Ecto.DateTime, usec: false]
|> Keyword.merge(@timestamps_opts)
|> Keyword.merge(opts)
if inserted_at = Keyword.fetch!(timestamps, :inserted_at) do
Ecto.Schema.field(inserted_at, Keyword.fetch!(timestamps, :type), [])
end
if updated_at = Keyword.fetch!(timestamps, :updated_at) do
Ecto.Schema.field(updated_at, Keyword.fetch!(timestamps, :type), [])
end
@ecto_timestamps timestamps
end
end
@doc ~S"""
Indicates a one-to-many association with another model.
The current model has zero or more records of the other model. The other
model often has a `belongs_to` field with the reverse association.
## Options
* `:foreign_key` - Sets the foreign key, this should map to a field on the
other model, defaults to the underscored name of the current model
suffixed by `_id`
* `:references` - Sets the key on the current model to be used for the
association, defaults to the primary key on the model
* `:through` - If this association must be defined in terms of existing
associations. Read below for more information
* `:on_delete` - The action taken on associations when parent model
is deleted. May be `:nothing` (default), `:nilify_all`, `:delete_all`
or `:fetch_and_delete`. See `Ecto.Model.Dependent` for more info.
Notice `:on_delete` may also be set in migrations when creating
a reference. If supported, relying on the database is often prefered
* `:on_replace` - The action taken on associations when the model is
replaced when casting or manipulating parent changeset. May be
`:raise` (default), `:mark_as_invalid`, `:nilify`, or `:delete`.
See `Ecto.Changeset`'s section on related models for more info.
* `:on_cast` - The default changeset function to call during casting
of a nested association which can be overridden in `Ecto.Changeset.cast/4`.
It's an atom representing the function name in the associated model's
module which will receive the module and the parameters for casting
(default: `:changeset`)
* `:defaults` - Default values to use when building the association
## Examples
defmodule Post do
use Ecto.Model
schema "posts" do
has_many :comments, Comment
end
end
# Get all comments for a given post
post = Repo.get(Post, 42)
comments = Repo.all assoc(post, :comments)
# The comments can come preloaded on the post struct
[post] = Repo.all(from(p in Post, where: p.id == 42, preload: :comments))
post.comments #=> [%Comment{...}, ...]
## has_many/has_one :through
Ecto also supports defining associations in terms of other associations
via the `:through` option. Let's see an example:
defmodule Post do
use Ecto.Model
schema "posts" do
has_many :comments, Comment
has_one :permalink, Permalink
# In the has_many :through example below, in the list
# `[:comments, :author]` the `:comments` refers to the
# `has_many :comments` in the Post model's own schema
# and the `:author` refers to the `belongs_to :author`
# of the Comment module's schema (the module below).
# (see the description below for more details)
has_many :comments_authors, through: [:comments, :author]
# Specify the association with custom source
has_many :tags, {"posts_tags", Tag}
end
end
defmodule Comment do
use Ecto.Model
schema "comments" do
belongs_to :author, Author
belongs_to :post, Post
has_one :post_permalink, through: [:post, :permalink]
end
end
In the example above, we have defined a `has_many :through` association
named `:comments_authors`. A `:through` association always expect a list
and the first element of the list must be a previously defined association
in the current module. For example, `:comments_authors` first points to
`:comments` in the same module (Post), which then points to `:author` in
the next model `Comment`.
This `:through` associations will return all authors for all comments
that belongs to that post:
# Get all comments for a given post
post = Repo.get(Post, 42)
authors = Repo.all assoc(post, :comments_authors)
`:through` associations are read-only as they are useful to avoid repetition
allowing the developer to easily retrieve data that is often seem together
but stored across different tables.
`:through` associations can also be preloaded. In such cases, not only
the `:through` association is preloaded but all intermediate steps are
preloaded too:
[post] = Repo.all(from(p in Post, where: p.id == 42, preload: :comments_authors))
post.comments_authors #=> [%Author{...}, ...]
# The comments for each post will be preloaded too
post.comments #=> [%Comment{...}, ...]
# And the author for each comment too
hd(post.comments).author #=> %Author{...}
Finally, `:through` can be used with multiple associations (not only 2)
and with associations of any kind, including `belongs_to` and others
`:through` associations. When the `:through` association is expected to
return one or no item, `has_one :through` should be used instead, as in
the example at the beginning of this section:
# How we defined the association above
has_one :post_permalink, through: [:post, :permalink]
# Get a preloaded comment
[comment] = Repo.all(Comment) |> Repo.preload(:post_permalink)
comment.post_permalink #=> %Permalink{...}
"""
defmacro has_many(name, queryable, opts \\ []) do
quote do
Ecto.Schema.__has_many__(__MODULE__, unquote(name), unquote(queryable), unquote(opts))
end
end
@doc ~S"""
Indicates a one-to-one association with another model.
The current model has zero or one records of the other model. The other
model often has a `belongs_to` field with the reverse association.
## Options
* `:foreign_key` - Sets the foreign key, this should map to a field on the
other model, defaults to the underscored name of the current model
suffixed by `_id`
* `:references` - Sets the key on the current model to be used for the
association, defaults to the primary key on the model
* `:through` - If this association must be defined in terms of existing
associations. Read the section in `has_many/3` for more information
* `:on_delete` - The action taken on associations when parent model
is deleted. May be `:nothing` (default), `:nilify_all`, `:delete_all`
or `:fetch_and_delete`. See `Ecto.Model.Dependent` for more info.
Notice `:on_delete` may also be set in migrations when creating
a reference. If supported, relying on the database is often prefered
* `:on_replace` - The action taken on associations when the model is
replaced when casting or manipulating parent changeset. May be
`:raise` (default), `:mark_as_invalid`, `:nilify`, or `:delete`.
See `Ecto.Changeset`'s section on related models for more info.
* `:on_cast` - The default changeset function to call during casting
of a nested association which can be overridden in `Ecto.Changeset.cast/4`.
It's an atom representing the function name in the associated model's
module which will receive the module and the parameters for casting
(default: `:changeset`)
* `:defaults` - Default values to use when building the association
## Examples
defmodule Post do
use Ecto.Model
schema "posts" do
has_one :permalink, Permalink
# Specify the association with custom source
has_one :category, {"posts_categories", Category}
end
end
# The permalink can come preloaded on the post struct
[post] = Repo.all(from(p in Post, where: p.id == 42, preload: :permalink))
post.permalink #=> %Permalink{...}
"""
defmacro has_one(name, queryable, opts \\ []) do
quote do
Ecto.Schema.__has_one__(__MODULE__, unquote(name), unquote(queryable), unquote(opts))
end
end
@doc ~S"""
Indicates a one-to-one association with another model.
The current model belongs to zero or one records of the other model. The other
model often has a `has_one` or a `has_many` field with the reverse association.
You should use `belongs_to` in the table that contains the foreign key. Imagine
a company <-> manager relationship. If the company contains the `manager_id` in
the underlying database table, we say the company belongs to manager.
In fact, when you invoke this macro, a field with the name of foreign key is
automatically defined in the schema for you.
## Options
* `:foreign_key` - Sets the foreign key field name, defaults to the name
of the association suffixed by `_id`. For example, `belongs_to :company`
will define foreign key of `:company_id`
* `:references` - Sets the key on the other model to be used for the
association, defaults to: `:id`
* `:define_field` - When false, does not automatically define a `:foreign_key`
field, implying the user is defining the field manually elsewhere
* `:type` - Sets the type of automatically defined `:foreign_key`.
Defaults to: `:integer` and be set per schema via `@foreign_key_type`
All other options are forwarded to the underlying foreign key definition
and therefore accept the same options as `field/3`.
## Examples
defmodule Comment do
use Ecto.Model
schema "comments" do
belongs_to :post, Post
end
end
# The post can come preloaded on the comment record
[comment] = Repo.all(from(c in Comment, where: c.id == 42, preload: :post))
comment.post #=> %Post{...}
## Polymorphic associations
One common use case for belongs to associations is to handle
polymorphism. For example, imagine you have defined a Comment
model and you wish to use it for commenting on both tasks and
posts.
Some abstractions would force you to define some sort of
polymorphic association with two fields in your database:
* commentable_type
* commentable_id
The problem with this approach is that it breaks references in
the database. You can't use foreign keys and it is very inneficient
both in terms of query time and storage.
In Ecto, we have two ways to solve this issue. The simplest one
is to define multiple fields in the Comment model, one for each
association:
* task_id
* post_id
Unless you have dozens of columns, this is simpler for the developer,
more DB friendly and more efficient on all aspects.
Alternatively, because Ecto does not tie a model to a given table,
we can use separate tables for each association. Let's start over
and define a new Comment model:
defmodule Comment do
use Ecto.Model
schema "abstract table: comments" do
# This will be used by associations on each "concrete" table
field :assoc_id, :integer
end
end
Notice we have changed the table name to "abstract table: comments".
You can choose whatever name you want, the point here is that this
particular table will never exist.
Now in your Post and Task models:
defmodule Post do
use Ecto.Model
schema "posts" do
has_many :comments, {"posts_comments", Comment}, foreign_key: :assoc_id
end
end
defmodule Task do
use Ecto.Model
schema "tasks" do
has_many :comments, {"tasks_comments", Comment}, foreign_key: :assoc_id
end
end
Now each association uses its own specific table, "posts_comments"
and "tasks_comments", which must be created on migrations. The
advantage of this approach is that we never store unrelated data
together, also ensuring we keep databases references fast and correct.
When using this technique, the only limitation is that you cannot
build comments directly. For example, the command below
Repo.insert!(%Comment{})
will attempt to use the abstract table. Instead, one should
Repo.insert!(build(post, :comments))
where `build/2` is defined in `Ecto.Model`. You can also
use `assoc/2` in both `Ecto.Model` and in the query syntax
to easily retrieve associated comments to a given post or
task:
# Fetch all comments associated to the given task
Repo.all(assoc(task, :comments))
Finally, if for some reason you wish to query one of comments
table directly, you can also specify the tuple source in
the query syntax:
Repo.all from(c in {"posts_comments", Comment}), ...)
"""
defmacro belongs_to(name, queryable, opts \\ []) do
quote do
Ecto.Schema.__belongs_to__(__MODULE__, unquote(name), unquote(queryable), unquote(opts))
end
end
## Embeds
@doc ~S"""
Indicates an embedding of one model.
The current model has zero or one records of the other model embedded
inside of it. It uses a field similar to the `:map` type for storage,
but allows embedded models to have all the things regular models can -
callbacks, structured fields, etc. All typecasting operations are
performed on an embedded model alongside the operations on the parent
model.
You must declare your `embeds_one/3` field with type `:map` at the
database level.
## Options
* `:on_cast` - the default changeset function to call during casting,
which can be overridden in `Ecto.Changeset.cast/4`. It's an atom representing
the function name in the embedded model's module which will receive
the module and the parameters for casting (default: `:changeset`).
* `:strategy` - the strategy for storing models in the database.
Ecto supports only the `:replace` strategy out of the box which is the
default. Read the strategy in `embeds_many/3` for more info.
* `:on_replace` - The action taken on associations when the model is
replaced when casting or manipulating parent changeset. May be
`:raise` (default), `:mark_as_invalid`, or `:delete`.
See `Ecto.Changeset`'s section on related models for more info.
## Examples
defmodule Order do
use Ecto.Model
schema "orders" do
embeds_one :item, Item
end
end
defmodule Item do
use Ecto.model
# A required field for all embedded documents
@primary_key {:id, :binary_id, autogenerate: true}
schema "" do
field :name
end
end
# The item is loaded with the order
order = Repo.get!(Order, 42)
order.item #=> %Item{...}
Adding and removal of embeds can only be done via the `Ecto.Changeset`
API so Ecto can properly track the embeded model life-cycle:
order = Repo.get!(Order, 42)
# Generate a changeset
changeset = Ecto.Changeset.change(order)
# Change, put a new one or remove an item
changeset = Ecto.Changeset.put_change(order, :item, nil)
# Update the order
changeset = Repo.update!(changeset)
"""
defmacro embeds_one(name, model, opts \\ []) do
quote do
Ecto.Schema.__embeds_one__(__MODULE__, unquote(name), unquote(model), unquote(opts))
end
end
@doc ~S"""
Indicates an embedding of many models.
The current model has zero or more records of the other model embedded
inside of it, contained in a list. Embedded models have all the things
regular models do - callbacks, structured fields, etc.
It is recommended to declare your `embeds_many/3` field with type
`{:array, :map}` and default value of `[]` at the database level.
In fact, Ecto will automatically translate `nil` values from the
database into empty lists for embeds many (this behaviour is specific
to `embeds_many/3` fields in order to mimic `has_many/3`).
## Options
* `:on_cast` - the default changeset function to call during casting,
which can be overridden in `Ecto.Changeset.cast/4`. It's an atom representing
the function name in the embedded model's module which will receive
the module and the parameters for casting (default: `:changeset`).
* `:strategy` - the strategy for storing models in the database.
Ecto supports only the `:replace` strategy out of the box which is the
default. Read strategy section below for more info.
* `:on_replace` - The action taken on associations when the model is
replaced when casting or manipulating parent changeset. May be
`:raise` (default), `:mark_as_invalid`, or `:delete`.
See `Ecto.Changeset`'s section on related models for more info.
## Examples
defmodule Order do
use Ecto.Model
schema "orders" do
embeds_many :items, Item
end
end
defmodule Item do
use Ecto.model
# embedded_schema is a shorcut for:
#
# @primary_key {:id, :binary_id, autogenerate: true}
# schema "embedded Item" do
#
embedded_schema do
field :name
end
end
# The items are loaded with the order
order = Repo.get!(Order, 42)
order.items #=> [%Item{...}, ...]
Adding and removal of embeds can only be done via the `Ecto.Changeset`
API so Ecto can properly track the embeded models life-cycle:
order = Repo.get!(Order, 42)
# Generate a changeset
changeset = Ecto.Changeset.change(order)
# Change, put a new one or remove all items
changeset = Ecto.Changeset.put_change(order, :items, [])
# Update the order
changeset = Repo.update!(changeset)
## Strategy
A strategy configures how modules should be inserted, updated and deleted
from the database. Changing the strategy may affect how items are stored in
the database, although embeds_many will always have them as a list in the
model.
Ecto supports only the `:replace` strategy out of the box which is the
default. This means all embeds in the model always fully replace the entries
in the database.
For example, if you have a collection with a 100 items, the 100 items will
be sent whenever any of them change. The approach is useful when you need the
parent and embeds to always be consistent.
Other databases may support different strategies, like one that only changes
the embeds that have effectively changed, also reducing the amount of data
send to the database. This is specially common in NoSQL databases.
Please check your adapter documentation in case it supports other strategies.
"""
defmacro embeds_many(name, model, opts \\ []) do
quote do
Ecto.Schema.__embeds_many__(__MODULE__, unquote(name), unquote(model), unquote(opts))
end
end
## Callbacks
@doc false
def __load__(model, prefix, source, context, data, loader) do
source = source || model.__schema__(:source)
struct = model.__struct__()
fields = model.__schema__(:types)
loaded = do_load(struct, fields, data, loader)
loaded = Map.put(loaded, :__meta__,
%Metadata{state: :loaded, source: {prefix, source}, context: context})
Ecto.Model.Callbacks.__apply__(model, :after_load, loaded)
end
defp do_load(struct, fields, map, loader) when is_map(map) do
Enum.reduce(fields, struct, fn
{field, type}, acc ->
value = load!(type, Map.get(map, Atom.to_string(field)), loader)
Map.put(acc, field, value)
end)
end
defp do_load(struct, fields, list, loader) when is_list(list) do
Enum.reduce(fields, {struct, list}, fn
{field, type}, {acc, [h|t]} ->
value = load!(type, h, loader)
{Map.put(acc, field, value), t}
end) |> elem(0)
end
defp load!(type, value, loader) do
case loader.(type, value) do
{:ok, value} -> value
:error -> raise ArgumentError, "cannot load `#{inspect value}` as type #{inspect type}"
end
end
@doc false
def __field__(mod, name, type, pk?, opts) do
check_type!(name, type, opts[:virtual])
default = default_for_type(type, opts)
check_default!(name, type, default)
Module.put_attribute(mod, :changeset_fields, {name, type})
put_struct_field(mod, name, default)
unless opts[:virtual] do
if raw = opts[:read_after_writes] do
Module.put_attribute(mod, :ecto_raw, name)
end
if gen = opts[:autogenerate] do
store_autogenerate!(mod, name, type, pk?)
end
if raw && gen do
raise ArgumentError, "cannot mark the same field as autogenerate and read_after_writes"
end
Module.put_attribute(mod, :ecto_fields, {name, type})
end
end
@valid_has_options [:foreign_key, :references, :through, :on_delete,
:defaults, :on_cast, :on_replace]
@doc false
def __has_many__(mod, name, queryable, opts) do
check_options!(opts, @valid_has_options, "has_many/3")
if is_list(queryable) and Keyword.has_key?(queryable, :through) do
association(mod, :many, name, Ecto.Association.HasThrough, queryable)
else
struct =
association(mod, :many, name, Ecto.Association.Has, [queryable: queryable] ++ opts)
Module.put_attribute(mod, :changeset_fields, {name, {:assoc, struct}})
end
end
@doc false
def __has_one__(mod, name, queryable, opts) do
check_options!(opts, @valid_has_options, "has_one/3")
if is_list(queryable) and Keyword.has_key?(queryable, :through) do
association(mod, :one, name, Ecto.Association.HasThrough, queryable)
else
struct =
association(mod, :one, name, Ecto.Association.Has, [queryable: queryable] ++ opts)
Module.put_attribute(mod, :changeset_fields, {name, {:assoc, struct}})
end
end
@doc false
def __belongs_to__(mod, name, queryable, opts) do
check_options!(opts, [:foreign_key, :references, :define_field, :type], "belongs_to/3")
opts = Keyword.put_new(opts, :foreign_key, :"#{name}_id")
foreign_key_type = opts[:type] || Module.get_attribute(mod, :foreign_key_type)
if Keyword.get(opts, :define_field, true) do
__field__(mod, opts[:foreign_key], foreign_key_type, false, opts)
end
association(mod, :one, name, Ecto.Association.BelongsTo, [queryable: queryable] ++ opts)
end
@doc false
def __embeds_one__(mod, name, model, opts) do
check_options!(opts, [:on_cast, :strategy, :on_replace], "embeds_one/3")
embed(mod, :one, name, model, opts)
end
@doc false
def __embeds_many__(mod, name, model, opts) do
check_options!(opts, [:on_cast, :strategy, :on_replace], "embeds_many/3")
opts = Keyword.put(opts, :default, [])
embed(mod, :many, name, model, opts)
end
## Quoted callbacks
@doc false
def __changeset__(changeset_fields) do
map = changeset_fields |> Enum.into(%{}) |> Macro.escape()
quote do
def __changeset__, do: unquote(map)
end
end
@doc false
def __defstruct__(struct_fields) do
quote do
defstruct unquote(Macro.escape(struct_fields))
end
end
@doc false
def __schema__(source, fields, primary_key) do
field_names = Enum.map(fields, &elem(&1, 0))
# Hash is used by the query cache to specify
# the underlying model structure did not change.
# We don't include the source because the source
# is already part of the query cache itself.
hash = :erlang.phash2({primary_key, fields})
quote do
def __schema__(:source), do: unquote(Macro.escape(source))
def __schema__(:fields), do: unquote(field_names)
def __schema__(:primary_key), do: unquote(primary_key)
def __schema__(:hash), do: unquote(hash)
end
end
@doc false
def __types__(fields) do
quoted =
Enum.map(fields, fn {name, type} ->
quote do
def __schema__(:type, unquote(name)) do
unquote(Macro.escape(type))
end
end
end)
types = Macro.escape(fields)
quote do
def __schema__(:types), do: unquote(types)
unquote(quoted)
def __schema__(:type, _), do: nil
end
end
@doc false
def __assocs__(assocs) do
quoted =
Enum.map(assocs, fn {name, refl} ->
quote do
def __schema__(:association, unquote(name)) do
unquote(Macro.escape(refl))
end
end
end)
assoc_names = Enum.map(assocs, &elem(&1, 0))
quote do
def __schema__(:associations), do: unquote(assoc_names)
unquote(quoted)
def __schema__(:association, _), do: nil
end
end
@doc false
def __embeds__(embeds) do
quoted =
Enum.map(embeds, fn {name, refl} ->
quote do
def __schema__(:embed, unquote(name)) do
unquote(Macro.escape(refl))
end
end
end)
embed_names = Enum.map(embeds, &elem(&1, 0))
quote do
def __schema__(:embeds), do: unquote(embed_names)
unquote(quoted)
def __schema__(:embed, _), do: nil
end
end
@doc false
def __read_after_writes__(fields) do
quote do
def __schema__(:read_after_writes), do: unquote(Enum.reverse(fields))
end
end
@doc false
def __autogenerate__(id) do
quote do
def __schema__(:autogenerate_id), do: unquote(id)
end
end
@doc false
def __before_compile__(env) do
unless Module.get_attribute(env.module, :struct_fields) do
raise "module #{inspect env.module} uses Ecto.Model (or Ecto.Schema) but it " <>
"does not define a schema. Please cherry pick the functionality you want " <>
"instead, for example, by importing Ecto.Query, Ecto.Model or others"
end
end
## Private
defp association(mod, cardinality, name, association, opts) do
not_loaded = %Ecto.Association.NotLoaded{__owner__: mod,
__field__: name, __cardinality__: cardinality}
put_struct_field(mod, name, not_loaded)
opts = [cardinality: cardinality] ++ opts
struct = association.struct(mod, name, opts)
Module.put_attribute(mod, :ecto_assocs, {name, struct})
struct
end
defp embed(mod, cardinality, name, model, opts) do
opts = [cardinality: cardinality, related: model] ++ opts
struct = Ecto.Embedded.struct(mod, name, opts)
__field__(mod, name, {:embed, struct}, false, opts)
Module.put_attribute(mod, :ecto_embeds, {name, struct})
end
defp put_struct_field(mod, name, assoc) do
fields = Module.get_attribute(mod, :struct_fields)
if List.keyfind(fields, name, 0) do
raise ArgumentError, "field/association #{inspect name} is already set on schema"
end
Module.put_attribute(mod, :struct_fields, {name, assoc})
end
defp check_options!(opts, valid, fun_arity) do
case Enum.find(opts, fn {k, _} -> not k in valid end) do
{k, _} ->
raise ArgumentError, "invalid option #{inspect k} for #{fun_arity}"
nil ->
:ok
end
end
defp check_type!(name, type, virtual?) do
cond do
type == :any and not virtual? ->
raise ArgumentError, "only virtual fields can have type :any, " <>
"invalid type for field #{inspect name}"
Ecto.Type.primitive?(type) ->
true
is_atom(type) ->
if Code.ensure_compiled?(type) and function_exported?(type, :type, 0) do
type
else
raise_type_error(name, type)
end
true ->
raise ArgumentError, "invalid type #{inspect type} for field #{inspect name}"
end
end
defp raise_type_error(name, :datetime) do
raise_wrong_ecto_type(name, :datetime, Ecto.DateTime)
end
defp raise_type_error(name, :date) do
raise_wrong_ecto_type(name, :date, Ecto.Date)
end
defp raise_type_error(name, :time) do
raise_wrong_ecto_type(name, :time, Ecto.Time)
end
defp raise_type_error(name, :uuid) do
raise_wrong_ecto_type(name, :uuid, Ecto.UUID)
end
defp raise_type_error(name, type) do
raise ArgumentError, "invalid or unknown type #{inspect type} for field #{inspect name}"
end
defp raise_wrong_ecto_type(name, given_type, real_type) do
raise ArgumentError, "invalid or unknown type #{inspect given_type} for field #{inspect name}. " <>
"Maybe you meant to use #{inspect real_type} as the type?"
end
defp check_default!(_name, :binary_id, _default), do: :ok
defp check_default!(_name, {:embed, _}, _default), do: :ok
defp check_default!(name, type, default) do
case Ecto.Type.dump(type, default) do
{:ok, _} ->
:ok
:error ->
raise ArgumentError, "invalid default argument `#{inspect default}` for " <>
"field #{inspect name} of type #{inspect type}"
end
end
defp store_autogenerate!(mod, name, type, true) do
if id = autogenerate_id(type) do
if Module.get_attribute(mod, :ecto_autogenerate_id) do
raise ArgumentError, "only one primary key with ID type may be marked as autogenerated"
end
Module.put_attribute(mod, :ecto_autogenerate_id, {name, id})
else
store_autogenerate!(mod, name, type, false)
end
end
defp store_autogenerate!(mod, name, type, false) do
cond do
_ = autogenerate_id(type) ->
raise ArgumentError, "only primary keys allow :autogenerate for type #{inspect type}, " <>
"field #{inspect name} is not a primary key"
Ecto.Type.primitive?(type) ->
raise ArgumentError, "field #{inspect name} does not support :autogenerate because it uses a " <>
"primitive type #{inspect type}"
# Note the custom type has already been loaded in check_type!/3
not function_exported?(type, :generate, 0) ->
raise ArgumentError, "field #{inspect name} does not support :autogenerate because it uses a " <>
"custom type #{inspect type} that does not define generate/0"
true ->
Module.put_attribute(mod, :ecto_autogenerate, {name, type})
end
end
defp autogenerate_id(type) do
id = if Ecto.Type.primitive?(type), do: type, else: type.type
if id in [:id, :binary_id], do: id, else: nil
end
defp default_for_type({:array, _}, opts) do
Keyword.get(opts, :default, [])
end
defp default_for_type(_, opts) do
Keyword.get(opts, :default)
end
end
| 34.623377 | 105 | 0.665085 |
9ef1cb6805b4dfb150391f3c09e484d60868676a | 1,372 | ex | Elixir | web/channels/room_channel.ex | korczis/zaas | 6e04c210f324aac853a64b01e63855bd2dfb95d9 | [
"MIT"
] | null | null | null | web/channels/room_channel.ex | korczis/zaas | 6e04c210f324aac853a64b01e63855bd2dfb95d9 | [
"MIT"
] | null | null | null | web/channels/room_channel.ex | korczis/zaas | 6e04c210f324aac853a64b01e63855bd2dfb95d9 | [
"MIT"
] | null | null | null | defmodule Chat.RoomChannel do
@moduledoc """
Auth controller responsible for handling Ueberauth responses
"""
use Phoenix.Channel
require Logger
@doc """
Authorize socket to subscribe and broadcast events on this channel & topic
Possible Return Values
`{:ok, socket}` to authorize subscription for channel for requested topic
`:ignore` to deny subscription/broadcast on this channel
for the requested topic
"""
def join("rooms:lobby", message, socket) do
Process.flag(:trap_exit, true)
:timer.send_interval(5000, :ping)
send(self(), {:after_join, message})
{:ok, socket}
end
def join("rooms:" <> _private_subtopic, _message, _socket) do
{:error, %{reason: "unauthorized"}}
end
def handle_info({:after_join, msg}, socket) do
broadcast! socket, "user:entered", %{user: msg["user"]}
push socket, "join", %{status: "connected"}
{:noreply, socket}
end
def handle_info(:ping, socket) do
push socket, "new:msg", %{user: "SYSTEM", body: "ping"}
{:noreply, socket}
end
def terminate(reason, _socket) do
Logger.debug fn ->
"> leave #{inspect reason}"
end
:ok
end
def handle_in("new:msg", msg, socket) do
broadcast! socket, "new:msg", %{user: msg["user"], body: msg["body"]}
{:reply, {:ok, %{msg: msg["body"]}}, assign(socket, :user, msg["user"])}
end
end
| 25.407407 | 76 | 0.652332 |
9ef1d2eaca553c3e2424d9687d7ccf78851b9679 | 2,278 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v28/model/floodlight_activities_generate_tag_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v28/model/floodlight_activities_generate_tag_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v28/model/floodlight_activities_generate_tag_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.DFAReporting.V28.Model.FloodlightActivitiesGenerateTagResponse do
@moduledoc """
Floodlight Activity GenerateTag Response
## Attributes
- floodlightActivityTag (String.t): Generated tag for this Floodlight activity. For global site tags, this is the event snippet. Defaults to: `null`.
- globalSiteTagGlobalSnippet (String.t): The global snippet section of a global site tag. The global site tag sets new cookies on your domain, which will store a unique identifier for a user or the ad click that brought the user to your site. Learn more. Defaults to: `null`.
- kind (String.t): Identifies what kind of resource this is. Value: the fixed string \"dfareporting#floodlightActivitiesGenerateTagResponse\". Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:floodlightActivityTag => any(),
:globalSiteTagGlobalSnippet => any(),
:kind => any()
}
field(:floodlightActivityTag)
field(:globalSiteTagGlobalSnippet)
field(:kind)
end
defimpl Poison.Decoder,
for: GoogleApi.DFAReporting.V28.Model.FloodlightActivitiesGenerateTagResponse do
def decode(value, options) do
GoogleApi.DFAReporting.V28.Model.FloodlightActivitiesGenerateTagResponse.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.DFAReporting.V28.Model.FloodlightActivitiesGenerateTagResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.610169 | 277 | 0.755926 |
9ef1e3b1ea64f746b1fd44f92af4c9983d6bea1e | 1,831 | ex | Elixir | lib/assent/jwt_adapter.ex | lmeier/assent | 3f5a5d340eb1833cd9d6ada9d7e3056b7e3d4f41 | [
"MIT"
] | 160 | 2019-09-25T01:07:16.000Z | 2022-03-08T00:45:16.000Z | lib/assent/jwt_adapter.ex | lmeier/assent | 3f5a5d340eb1833cd9d6ada9d7e3056b7e3d4f41 | [
"MIT"
] | 47 | 2019-09-25T00:38:44.000Z | 2022-03-21T17:35:43.000Z | lib/assent/jwt_adapter.ex | lmeier/assent | 3f5a5d340eb1833cd9d6ada9d7e3056b7e3d4f41 | [
"MIT"
] | 30 | 2019-10-04T15:58:06.000Z | 2021-12-10T16:43:52.000Z | defmodule Assent.JWTAdapter do
@moduledoc """
JWT adapter helper module.
## Usage
defmodule MyApp.MyJWTAdapter do
@behaviour Assent.JWTAdapter
@impl true
def sign(claims, alg, secret, opts) do
# ...
end
@impl true
def verify(token, secret, opts) do
# ...
end
end
"""
alias Assent.Config
@callback sign(map(), binary(), binary(), Keyword.t()) :: {:ok, binary()} | {:error, any()}
@callback verify(binary(), binary() | map() | nil, Keyword.t()) :: {:ok, map()} | {:error, any()}
@doc """
Generates a signed JSON Web Token signature
"""
@spec sign(map(), binary(), binary(), Keyword.t()) :: {:ok, binary()} | {:error, term()}
def sign(claims, alg, secret, opts \\ []) do
{adapter, opts} = fetch_adapter(opts)
adapter.sign(claims, alg, secret, opts)
end
@doc """
Verifies the JSON Web Token signature
"""
@spec verify(binary(), binary() | map() | nil, Keyword.t()) :: {:ok, map()} | {:error, any()}
def verify(token, secret, opts \\ []) do
{adapter, opts} = fetch_adapter(opts)
adapter.verify(token, secret, opts)
end
defp fetch_adapter(opts) do
default_opts = Keyword.put(opts, :json_library, Config.json_library(opts))
case Keyword.get(opts, :jwt_adapter, Assent.JWTAdapter.AssentJWT) do
{adapter, opts} -> {adapter, Keyword.merge(default_opts, opts)}
adapter -> {adapter, default_opts}
end
end
@doc """
Loads a private key from the provided configuration
"""
@spec load_private_key(Config.t()) :: {:ok, binary()} | {:error, term()}
def load_private_key(config) do
case Config.fetch(config, :private_key_path) do
{:ok, path} -> File.read(path)
{:error, _any} -> Config.fetch(config, :private_key)
end
end
end
| 28.169231 | 99 | 0.600765 |
9ef22230fe9c825cf586f184b4c2b01dfb306fa7 | 6,860 | ex | Elixir | lib/validators/time.ex | tomciopp/ecto_commons | 75ca493739a54b2f73b753c3d2623dc61781d91d | [
"MIT"
] | null | null | null | lib/validators/time.ex | tomciopp/ecto_commons | 75ca493739a54b2f73b753c3d2623dc61781d91d | [
"MIT"
] | null | null | null | lib/validators/time.ex | tomciopp/ecto_commons | 75ca493739a54b2f73b753c3d2623dc61781d91d | [
"MIT"
] | null | null | null | defmodule EctoCommons.TimeValidator do
@moduledoc ~S"""
This module provides validators for `Time`s.
You can use the following checks:
* `:is` to check if a `Time` is exactly some `Time`. You can also provide a `:delta` option (in seconds)
to specify a delta around which the `Time` is still considered identical.
* `:before` to check if a `Time` is before some `Time`
* `:after` to check if a `Time` is after some `Time`
You can also combine the given checks for complex checks. Errors won't be stacked though, the first error
encountered will be returned and subsequent checks will be skipped.
If you want to check everything at once, you'll need to call this validator multiple times.
Also, instead of providing a `Time`, you can also provide some special atoms:
* `:utc_now` will compare the given `Time` with the `Time` at runtime, by calling `Time.utc_now()`.
## Example:
iex> types = %{meeting_start: :time}
iex> params = %{meeting_start: ~T[12:01:01]}
iex> Ecto.Changeset.cast({%{}, types}, params, Map.keys(types))
...> |> validate_time(:meeting_start)
#Ecto.Changeset<action: nil, changes: %{meeting_start: ~T[12:01:01]}, errors: [], data: %{}, valid?: true>
# Using :is to ensure a time is identical to another time
iex> types = %{meeting_start: :time}
iex> params = %{meeting_start: ~T[12:01:01]}
iex> Ecto.Changeset.cast({%{}, types}, params, Map.keys(types))
...> |> validate_time(:meeting_start, is: ~T[12:01:01])
#Ecto.Changeset<action: nil, changes: %{meeting_start: ~T[12:01:01]}, errors: [], data: %{}, valid?: true>
iex> types = %{meeting_start: :time}
iex> params = %{meeting_start: ~T[12:01:01]}
iex> Ecto.Changeset.cast({%{}, types}, params, Map.keys(types))
...> |> validate_time(:meeting_start, is: ~T[13:01:01])
#Ecto.Changeset<action: nil, changes: %{meeting_start: ~T[12:01:01]}, errors: [meeting_start: {"should be %{is}.", [validation: :time, kind: :is]}], data: %{}, valid?: false>
# Using :is with :delta to ensure a time is near another another time
iex> types = %{meeting_start: :time}
iex> params = %{meeting_start: ~T[12:01:01]}
iex> Ecto.Changeset.cast({%{}, types}, params, Map.keys(types))
...> |> validate_time(:meeting_start, is: ~T[12:15:01], delta: 900)
#Ecto.Changeset<action: nil, changes: %{meeting_start: ~T[12:01:01]}, errors: [], data: %{}, valid?: true>
iex> types = %{meeting_start: :time}
iex> params = %{meeting_start: ~T[13:01:01]}
iex> Ecto.Changeset.cast({%{}, types}, params, Map.keys(types))
...> |> validate_time(:meeting_start, is: ~T[12:01:01], delta: 900)
#Ecto.Changeset<action: nil, changes: %{meeting_start: ~T[13:01:01]}, errors: [meeting_start: {"should be %{is}.", [validation: :time, kind: :is]}], data: %{}, valid?: false>
# Using :before to ensure time is before given time
iex> types = %{meeting_start: :time}
iex> params = %{meeting_start: ~T[12:01:01]}
iex> Ecto.Changeset.cast({%{}, types}, params, Map.keys(types))
...> |> validate_time(:meeting_start, before: ~T[13:01:01])
#Ecto.Changeset<action: nil, changes: %{meeting_start: ~T[12:01:01]}, errors: [], data: %{}, valid?: true>
iex> types = %{meeting_start: :time}
iex> params = %{meeting_start: ~T[12:01:01]}
iex> Ecto.Changeset.cast({%{}, types}, params, Map.keys(types))
...> |> validate_time(:meeting_start, before: ~T[11:01:01])
#Ecto.Changeset<action: nil, changes: %{meeting_start: ~T[12:01:01]}, errors: [meeting_start: {"should be before %{before}.", [validation: :time, kind: :before]}], data: %{}, valid?: false>
# Using :after to ensure time is after given time
iex> types = %{meeting_start: :time}
iex> params = %{meeting_start: ~T[12:01:01]}
iex> Ecto.Changeset.cast({%{}, types}, params, Map.keys(types))
...> |> validate_time(:meeting_start, after: ~T[11:01:01])
#Ecto.Changeset<action: nil, changes: %{meeting_start: ~T[12:01:01]}, errors: [], data: %{}, valid?: true>
iex> types = %{meeting_start: :time}
iex> params = %{meeting_start: ~T[12:01:01]}
iex> Ecto.Changeset.cast({%{}, types}, params, Map.keys(types))
...> |> validate_time(:meeting_start, after: ~T[13:01:01])
#Ecto.Changeset<action: nil, changes: %{meeting_start: ~T[12:01:01]}, errors: [meeting_start: {"should be after %{after}.", [validation: :time, kind: :after]}], data: %{}, valid?: false>
"""
import Ecto.Changeset
def validate_time(changeset, field, opts \\ []) do
validate_change(changeset, field, {:time, opts}, fn
_, value ->
is = get_validation_value(opts[:is])
afterr = get_validation_value(opts[:after])
before = get_validation_value(opts[:before])
error =
(is && wrong_time(value, is, opts[:delta], opts)) ||
(afterr && too_soon(value, afterr, opts)) ||
(before && too_late(value, before, opts))
if error, do: [{field, error}], else: []
end)
end
defp wrong_time(%Time{} = value, value, _delta, _opts), do: nil
defp wrong_time(%DateTime{} = value, is, delta, opts),
do: DateTime.to_time(value) |> wrong_time(is, delta, opts)
defp wrong_time(%Time{} = value, is, nil, opts) do
case Time.compare(value, is) do
:eq -> nil
_ -> {message(opts, "should be %{is}."), validation: :time, kind: :is}
end
end
defp wrong_time(%Time{} = value, is, delta, opts) do
case Time.compare(value, is) do
:eq ->
nil
_ ->
case abs(Time.diff(value, is)) do
val when val > delta ->
{message(opts, "should be %{is}."), validation: :time, kind: :is}
_ ->
nil
end
end
end
defp too_soon(%Time{} = value, value, _opts), do: nil
defp too_soon(%DateTime{} = value, afterr, opts),
do: DateTime.to_time(value) |> too_soon(afterr, opts)
defp too_soon(%Time{} = value, afterr, opts) do
case Time.compare(value, afterr) do
:gt -> nil
_ -> {message(opts, "should be after %{after}."), validation: :time, kind: :after}
end
end
defp too_late(%Time{} = value, value, _opts), do: nil
defp too_late(%DateTime{} = value, before, opts),
do: DateTime.to_time(value) |> too_late(before, opts)
defp too_late(%Time{} = value, before, opts) do
case Time.compare(value, before) do
:lt -> nil
_ -> {message(opts, "should be before %{before}."), validation: :time, kind: :before}
end
end
defp get_validation_value(nil), do: nil
defp get_validation_value(:utc_now), do: Time.utc_now()
defp get_validation_value(%Time{} = val), do: val
defp message(opts, key \\ :message, default) do
Keyword.get(opts, key, default)
end
end
| 42.875 | 195 | 0.616035 |
9ef23930963108e02442f49bc1e9f3147fa42f93 | 1,384 | ex | Elixir | kousa/lib/broth/message/room/update_scheduled.ex | asgarovf/dogehouse | 12b81d2008562373591fb55ab8179a3fa8a5db18 | [
"MIT"
] | 2 | 2021-05-01T16:57:50.000Z | 2021-07-07T22:01:14.000Z | kousa/lib/broth/message/room/update_scheduled.ex | asgarovf/dogehouse | 12b81d2008562373591fb55ab8179a3fa8a5db18 | [
"MIT"
] | null | null | null | kousa/lib/broth/message/room/update_scheduled.ex | asgarovf/dogehouse | 12b81d2008562373591fb55ab8179a3fa8a5db18 | [
"MIT"
] | null | null | null | defmodule Broth.Message.Room.UpdateScheduled do
use Broth.Message.Call,
reply: __MODULE__
alias Beef.Repo
@derive {Jason.Encoder, only: [:name, :scheduledFor, :description]}
@primary_key {:id, :binary_id, []}
schema "scheduled_rooms" do
field(:name, :string)
field(:scheduledFor, :utc_datetime_usec)
field(:description, :string, default: "")
end
import Broth.Message.Room.CreateScheduled, only: [validate_future: 1]
def changeset(initializer \\ %__MODULE__{}, data)
def changeset(_, data)
when not is_map_key(data, "id") or
is_nil(:erlang.map_get("id", data)) do
id_error("can't be blank")
end
def changeset(_, data) do
case Repo.get(__MODULE__, data["id"]) do
nil ->
id_error("room not found")
room ->
room
|> cast(data, [:name, :scheduledFor, :description])
|> validate_required([:name, :scheduledFor])
|> validate_future
end
end
def id_error(message) do
%__MODULE__{}
|> change
|> add_error(:id, message)
end
def execute(changeset, state) do
with {:ok, update} <- apply_action(changeset, :validate),
update_data = update |> Map.from_struct() |> Map.delete(:id),
:ok <- Kousa.ScheduledRoom.edit(state.user_id, update.id, update_data) do
{:reply, Repo.get(__MODULE__, update.id), state}
end
end
end
| 26.615385 | 82 | 0.638728 |
9ef28b084996b210346687a3472ef9b556c3d8d3 | 625 | ex | Elixir | lib/changelog_web/views/admin/episode_request_view.ex | wojtekmach/changelog.com | d4a8a7703c5f07a3da63bffd770f4642488cf8fd | [
"MIT"
] | 1 | 2020-05-20T16:58:17.000Z | 2020-05-20T16:58:17.000Z | lib/changelog_web/views/admin/episode_request_view.ex | type1fool/changelog.com | fbec3528cc3f5adfdc75b008bb92b17efc4f248f | [
"MIT"
] | null | null | null | lib/changelog_web/views/admin/episode_request_view.ex | type1fool/changelog.com | fbec3528cc3f5adfdc75b008bb92b17efc4f248f | [
"MIT"
] | null | null | null | defmodule ChangelogWeb.Admin.EpisodeRequestView do
use ChangelogWeb, :admin_view
alias ChangelogWeb.{PersonView}
def description(request) do
{:ok, date} = Timex.format(request.inserted_at, "{M}/{D}")
"##{request.id}" <>
" by " <>
request.submitter.handle <>
" (on #{date}) " <>
pitch_preview(request, 60)
end
def pitch_preview(%{pitch: pitch}, count \\ 80) do
pitch |> md_to_text() |> truncate(count)
end
def submitter_name(%{pronunciation: pronunciation}) do
case pronunciation do
nil -> "Anon"
"" -> "Anon"
_else -> pronunciation
end
end
end
| 23.148148 | 62 | 0.624 |
9ef28d5951172589dcfb785a5dc05c777b9b8390 | 851 | ex | Elixir | elixir/lib/homework/transactions/transaction.ex | phantomxc/web-homework | 28e31e604d9c19a07424b742e34e9fc45b1968de | [
"MIT"
] | null | null | null | elixir/lib/homework/transactions/transaction.ex | phantomxc/web-homework | 28e31e604d9c19a07424b742e34e9fc45b1968de | [
"MIT"
] | null | null | null | elixir/lib/homework/transactions/transaction.ex | phantomxc/web-homework | 28e31e604d9c19a07424b742e34e9fc45b1968de | [
"MIT"
] | null | null | null | defmodule Homework.Transactions.Transaction do
use Ecto.Schema
import Ecto.Changeset
alias Homework.Merchants.Merchant
alias Homework.Users.User
@primary_key {:id, :binary_id, autogenerate: true}
schema "transactions" do
field(:amount, :float)
field(:credit, :boolean, default: false)
field(:debit, :boolean, default: false)
field(:description, :string)
field(:date, :date)
belongs_to(:merchant, Merchant, type: :binary_id, foreign_key: :merchant_id)
belongs_to(:user, User, type: :binary_id, foreign_key: :user_id)
timestamps()
end
@doc false
def changeset(transaction, attrs) do
transaction
|> cast(attrs, [:user_id, :amount, :debit, :description, :merchant_id, :date, :credit])
|> validate_required([:user_id, :amount, :debit, :description, :merchant_id, :date, :credit])
end
end
| 30.392857 | 97 | 0.702703 |
9ef29b9bc13ac3595aa603ce89958210c9b110a3 | 178 | exs | Elixir | test/fluxir/encoder_test.exs | nallwhy/fluxir | 5f380d01df67d4efcc1357e5d1b3f1ebfe6b65b9 | [
"Apache-2.0"
] | null | null | null | test/fluxir/encoder_test.exs | nallwhy/fluxir | 5f380d01df67d4efcc1357e5d1b3f1ebfe6b65b9 | [
"Apache-2.0"
] | 16 | 2021-11-27T18:13:13.000Z | 2021-11-29T23:02:28.000Z | test/fluxir/encoder_test.exs | nallwhy/fluxir | 5f380d01df67d4efcc1357e5d1b3f1ebfe6b65b9 | [
"Apache-2.0"
] | null | null | null | defmodule Fluxir.EncoderTest do
use ExUnit.Case, async: true
alias Fluxir.Encoder
test "encode/2" do
assert ~s("string") = Encoder.encode(:string, "string")
end
end
| 19.777778 | 59 | 0.702247 |
9ef2a65a1c9bc7b1b6ae75c75c46681d326b7a28 | 74 | exs | Elixir | .formatter.exs | ks1venberg/mailibex | 01115dba93560257c6641efea3baacaa9ac03a39 | [
"MIT"
] | 1 | 2019-01-24T10:43:29.000Z | 2019-01-24T10:43:29.000Z | .formatter.exs | ks1venberg/mailibex | 01115dba93560257c6641efea3baacaa9ac03a39 | [
"MIT"
] | 1 | 2019-06-24T14:24:46.000Z | 2019-06-24T14:24:46.000Z | .formatter.exs | ks1venberg/mailibex | 01115dba93560257c6641efea3baacaa9ac03a39 | [
"MIT"
] | 2 | 2019-06-14T13:27:17.000Z | 2020-02-17T12:19:38.000Z | # Used by "mix format"
[
inputs: ["mix.exs", "config/*.exs", "lib/*"]
]
| 14.8 | 46 | 0.527027 |
9ef304bb50a448906b85263c01c1e934fe83a931 | 135 | exs | Elixir | .formatter.exs | scottming/joi | 1c7546bb0473fa53325533c7ab4aec402bfba0d1 | [
"MIT"
] | 25 | 2020-12-03T08:14:51.000Z | 2021-09-01T15:34:30.000Z | .formatter.exs | scottming/joi | 1c7546bb0473fa53325533c7ab4aec402bfba0d1 | [
"MIT"
] | 5 | 2021-02-13T12:56:56.000Z | 2021-07-30T01:27:51.000Z | .formatter.exs | scottming/joi | 1c7546bb0473fa53325533c7ab4aec402bfba0d1 | [
"MIT"
] | 2 | 2021-03-15T00:37:13.000Z | 2021-07-26T15:21:55.000Z | # Used by "mix format"
[
import_deps: [:stream_data],
line_length: 120,
inputs: ["mix.exs", "{config,lib,test}/**/*.{ex,exs}"]
]
| 19.285714 | 56 | 0.6 |
9ef30588aae3e8c12cee80ccfdbd6b2dd2d62916 | 4,799 | ex | Elixir | lib/glimesh_web/live/user_live/components/report_button.ex | itsUnsmart/glimesh.tv | 22c532184bb5046f6c6d8232e8bd66ba534c01c1 | [
"MIT"
] | null | null | null | lib/glimesh_web/live/user_live/components/report_button.ex | itsUnsmart/glimesh.tv | 22c532184bb5046f6c6d8232e8bd66ba534c01c1 | [
"MIT"
] | null | null | null | lib/glimesh_web/live/user_live/components/report_button.ex | itsUnsmart/glimesh.tv | 22c532184bb5046f6c6d8232e8bd66ba534c01c1 | [
"MIT"
] | null | null | null | defmodule GlimeshWeb.UserLive.Components.ReportButton do
use GlimeshWeb, :live_view
@impl true
def render(assigns) do
~L"""
<%= if @user do %>
<div class="text-center">
<a href="#" phx-click="show_modal" class="text-danger">
<%= gettext("Report User") %> <i class="fas fa-flag"></i>
</a>
</div>
<%= if live_flash(@flash, :info) do %>
<p class="alert alert-info" role="alert"
phx-click="lv:clear-flash"
phx-value-key="info"><%= live_flash(@flash, :info) %></p>
<% end %>
<%= if @show_report do %>
<div id="reportModal" class="live-modal"
phx-capture-click="hide_modal"
phx-window-keydown="hide_modal"
phx-key="escape"
phx-target="#paymentModal2"
phx-page-loading>
<div class="modal-dialog" role="document">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title">Report User</h5>
<button type="button" class="close" phx-click="hide_modal" aria-label="Close">
<span aria-hidden="true">×</span>
</button>
</div>
<div class="modal-body">
<p><%= gettext("What has this user done wrong?")%></p>
<%= form_for :user, "#", [phx_submit: :save] %>
<div class="form-check">
<input class="form-check-input" type="radio" name="report_reason" id="hateSpeech" value="hate-speech">
<label class="form-check-label" for="exampleRadios1">
<%= gettext("Hate Speech") %>
</label>
</div>
<div class="form-check">
<input class="form-check-input" type="radio" name="report_reason" id="reportInappropriateContent" value="inappropriate-content">
<label class="form-check-label" for="exampleRadios2">
<%= gettext("Inappropriate Content") %>
</label>
</div>
<div class="form-check">
<input class="form-check-input" type="radio" name="report_reason" id="reportCopyrightInfringementOrLawViolation" value="copyright-infringement-or-law-violation">
<label class="form-check-label" for="reportCopyrightInfringementOrLawViolation">
<%= gettext("Copyright Infringement / Law Violation") %>
</label>
</div>
<div class="form-check">
<input class="form-check-input" type="radio" name="report_reason" id="reportOther" value="other">
<label class="form-check-label" for="reportOther">
<%= gettext("Other") %>
</label>
</div>
<div class="form-group mt-4">
<label for="reportNotes"><%= gettext("Notes") %></label>
<input type="text" class="form-control" name="notes" id="reportNotes" placeholder="Another other details you'd like to share">
</div>
<button class="btn btn-danger btn-block mt-4"><%= gettext("Submit Report") %></button>
</form>
</div>
</div>
</div>
</div>
<% end %>
<% end %>
"""
end
@impl true
def mount(_params, %{"streamer" => streamer, "user" => nil}, socket) do
{:ok,
socket
|> assign(:streamer, streamer)
|> assign(:user, nil)
|> assign(:show_report, false)}
end
@impl true
def mount(_params, %{"streamer" => streamer, "user" => user}, socket) do
Gettext.put_locale(user.locale)
{:ok,
socket
|> assign(:streamer, streamer)
|> assign(:user, user)
|> assign(:show_report, false)}
end
@impl true
def handle_event("save", %{"report_reason" => report_reason, "notes" => notes}, socket) do
{:ok, _} =
Glimesh.Accounts.UserNotifier.deliver_user_report_alert(
socket.assigns.user,
socket.assigns.streamer,
report_reason,
notes
)
{:noreply,
socket |> assign(:show_report, false) |> put_flash(:info, "Report submitted, thank you!")}
end
@impl true
def handle_event("show_modal", _value, socket) do
{:noreply, socket |> assign(:show_report, true)}
end
@impl true
def handle_event("hide_modal", _value, socket) do
{:noreply, socket |> assign(:show_report, false)}
end
end
| 38.392 | 185 | 0.507189 |
9ef31034e2c6900f447b6495f0ddfd0de9c83078 | 2,436 | ex | Elixir | elixir/simple-linked-list/lib/linked_list.ex | yuriploc/exercism | 11d8629cabebd4b72ba55df0ffd5bffd8699ac5f | [
"MIT"
] | null | null | null | elixir/simple-linked-list/lib/linked_list.ex | yuriploc/exercism | 11d8629cabebd4b72ba55df0ffd5bffd8699ac5f | [
"MIT"
] | null | null | null | elixir/simple-linked-list/lib/linked_list.ex | yuriploc/exercism | 11d8629cabebd4b72ba55df0ffd5bffd8699ac5f | [
"MIT"
] | null | null | null | defmodule LinkedList do
@opaque t :: map()
@empty_list %{value: nil, next: nil}
@empty_list_error {:error, :empty_list}
@doc """
Construct a new LinkedList
"""
@spec new() :: t
def new(), do: @empty_list
@doc """
Push an item onto a LinkedList
"""
@spec push(t, any()) :: t
def push(list, elem), do: %{value: elem, next: list}
@doc """
Calculate the length of a LinkedList
"""
@spec length(t) :: non_neg_integer()
def length(list), do: length(list.next, 0)
defp length(nil, counter), do: counter
defp length(list, counter), do: length(list.next, counter + 1)
@doc """
Determine if a LinkedList is empty
"""
@spec empty?(t) :: boolean()
def empty?(list) do
case list.next do
nil -> true
_ -> false
end
end
@doc """
Get the value of a head of the LinkedList
"""
@spec peek(t) :: {:ok, any()} | {:error, :empty_list}
def peek(list) do
case empty?(list) do
true -> @empty_list_error
_ -> {:ok, list.value}
end
end
@doc """
Get tail of a LinkedList
"""
@spec tail(t) :: {:ok, t} | {:error, :empty_list}
def tail(list), do: tail(list.value, list.next)
defp tail(nil, _next), do: @empty_list_error
defp tail(_value, next), do: {:ok, next}
@doc """
Remove the head from a LinkedList
"""
@spec pop(t) :: {:ok, any(), t} | {:error, :empty_list}
def pop(list), do: pop(list.value, list.next, list)
@spec pop(any, any, any) ::
{:error, :empty_list} | {:ok, any, {:error, :empty_list} | {:ok, any}}
defp pop(nil, _next, _list), do: @empty_list_error
defp pop(value, _next, list), do: {:ok, value, list.next}
@doc """
Construct a LinkedList from a stdlib List
"""
@spec from_list(list()) :: t
def from_list([]), do: new()
def from_list(list), do: Enum.reverse(list) |> from_list(new())
defp from_list([head | tail], llist), do: push(llist, head) |> (&from_list(tail, &1)).()
defp from_list([], llist), do: llist
@doc """
Construct a stdlib List LinkedList from a LinkedList
"""
@spec to_list(t) :: list()
def to_list(llist) do
to_list(llist.value, llist.next, []) |> Enum.reverse()
end
defp to_list(nil, _next, acc), do: acc
defp to_list(value, next, acc), do: to_list(next.value, next.next, [value | acc])
@doc """
Reverse a LinkedList
"""
@spec reverse(t) :: t
def reverse(list) do
to_list(list) |> Enum.reverse() |> from_list()
end
end
| 24.857143 | 90 | 0.602217 |
9ef330d739ef0ab1dd31aea72785ff08a1d97141 | 3,667 | ex | Elixir | clients/web_fonts/lib/google_api/web_fonts/v1/api/webfonts.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/web_fonts/lib/google_api/web_fonts/v1/api/webfonts.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/web_fonts/lib/google_api/web_fonts/v1/api/webfonts.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.WebFonts.V1.Api.Webfonts do
@moduledoc """
API calls for all endpoints tagged `Webfonts`.
"""
alias GoogleApi.WebFonts.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Retrieves the list of fonts currently served by the Google Fonts Developer API.
## Parameters
* `connection` (*type:* `GoogleApi.WebFonts.V1.Connection.t`) - Connection to server
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:sort` (*type:* `String.t`) - Enables sorting of the list.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.WebFonts.V1.Model.WebfontList{}}` on success
* `{:error, info}` on failure
"""
@spec webfonts_webfonts_list(Tesla.Env.client(), keyword(), keyword()) ::
{:ok, GoogleApi.WebFonts.V1.Model.WebfontList.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def webfonts_webfonts_list(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:sort => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/webfonts", %{})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.WebFonts.V1.Model.WebfontList{}])
end
end
| 42.149425 | 196 | 0.644669 |
9ef343ed4b15e91d2a3559a5f35619eb168dfe67 | 3,127 | exs | Elixir | lib/logger/test/logger/error_handler_test.exs | kenichi/elixir | 8c27da88c70623cbe516d5310c885943395a82a2 | [
"Apache-2.0"
] | 2 | 2018-11-15T06:38:14.000Z | 2018-11-17T18:03:14.000Z | lib/logger/test/logger/error_handler_test.exs | kenichi/elixir | 8c27da88c70623cbe516d5310c885943395a82a2 | [
"Apache-2.0"
] | 1 | 2018-09-10T23:36:45.000Z | 2018-09-10T23:36:45.000Z | lib/logger/test/logger/error_handler_test.exs | kenichi/elixir | 8c27da88c70623cbe516d5310c885943395a82a2 | [
"Apache-2.0"
] | 1 | 2018-09-10T23:32:56.000Z | 2018-09-10T23:32:56.000Z | defmodule Logger.ErrorHandlerTest do
use Logger.Case
@moduletag :error_logger
test "add_translator/1 and remove_translator/1" do
defmodule CustomTranslator do
def t(:debug, :info, :format, {'hello: ~p', [:ok]}) do
:skip
end
def t(:debug, :info, :format, {'world: ~p', [:ok]}) do
{:ok, "rewritten"}
end
def t(_, _, _, _) do
:none
end
end
assert Logger.add_translator({CustomTranslator, :t})
assert capture_log(fn ->
:error_logger.info_msg('hello: ~p', [:ok])
end) == ""
assert capture_log(fn ->
:error_logger.info_msg('world: ~p', [:ok])
end) =~ "\[info\] rewritten"
after
assert Logger.remove_translator({CustomTranslator, :t})
end
test "survives after crashes" do
expected_result =
"[error] :gen_event handler Logger.ErrorHandler installed in :error_logger terminating\n" <>
"** (exit) an exception was raised:"
assert capture_log(fn ->
:error_logger.info_msg("~p~n", [])
wait_for_handler(:error_logger, Logger.ErrorHandler)
end) =~ expected_result
assert error_log(:info_msg, "~p~n", [:hello]) =~ msg("[info] :hello")
end
test "survives after Logger exit" do
Process.whereis(Logger) |> Process.exit(:kill)
wait_for_logger()
wait_for_handler(:error_logger, Logger.ErrorHandler)
assert error_log(:info_msg, "~p~n", [:hello]) =~ msg("[info] :hello")
end
test "formats error_logger info message" do
assert error_log(:info_msg, "hello", []) =~ msg("[info] hello")
assert error_log(:info_msg, "~p~n", [:hello]) =~ msg("[info] :hello")
end
test "formats error_logger info report" do
assert error_log(:info_report, "hello") =~ msg("[info] \"hello\"")
assert error_log(:info_report, :hello) =~ msg("[info] :hello")
assert error_log(:info_report, :special, :hello) == ""
end
test "formats error_logger error message" do
assert error_log(:error_msg, "hello", []) =~ msg("[error] hello")
assert error_log(:error_msg, "~p~n", [:hello]) =~ msg("[error] :hello")
end
test "formats error_logger error report" do
assert error_log(:error_report, "hello") =~ msg("[error] \"hello\"")
assert error_log(:error_report, :hello) =~ msg("[error] :hello")
assert error_log(:error_report, :special, :hello) == ""
end
test "formats error_logger warning message" do
assert error_log(:warning_msg, "hello", []) =~ msg("[warn] hello")
assert error_log(:warning_msg, "~p~n", [:hello]) =~ msg("[warn] :hello")
end
test "formats error_logger warning report" do
assert error_log(:warning_report, "hello") =~ msg("[warn] \"hello\"")
assert error_log(:warning_report, :hello) =~ msg("[warn] :hello")
assert error_log(:warning_report, :special, :hello) == ""
end
defp error_log(fun, format) do
do_error_log(fun, [format])
end
defp error_log(fun, format, args) do
do_error_log(fun, [format, args])
end
defp do_error_log(fun, args) do
capture_log(fn -> apply(:error_logger, fun, args) end)
end
end
| 31.908163 | 98 | 0.626799 |
9ef34a44357154001ec0d5d0e4a0e4508041f06b | 668 | exs | Elixir | test/langue/java_properties/formatter_test.exs | charlesdemers/accent | eeea52feb30d16ada5023c05fef37c08c267eff0 | [
"BSD-3-Clause"
] | null | null | null | test/langue/java_properties/formatter_test.exs | charlesdemers/accent | eeea52feb30d16ada5023c05fef37c08c267eff0 | [
"BSD-3-Clause"
] | null | null | null | test/langue/java_properties/formatter_test.exs | charlesdemers/accent | eeea52feb30d16ada5023c05fef37c08c267eff0 | [
"BSD-3-Clause"
] | null | null | null | defmodule AccentTest.Formatter.JavaProperties.Formatter do
Code.require_file("expectation_test.exs", __DIR__)
use ExUnit.Case, async: true
alias AccentTest.Formatter.JavaProperties.Expectation.{Simple}
alias Langue.Formatter.JavaProperties.{Parser, Serializer}
@tests [
Simple
]
test "java properties" do
Enum.each(@tests, fn ex ->
{expected_parse, result_parse} = Accent.FormatterTestHelper.test_parse(ex, Parser)
{expected_serialize, result_serialize} = Accent.FormatterTestHelper.test_serialize(ex, Serializer)
assert expected_parse == result_parse
assert expected_serialize == result_serialize
end)
end
end
| 29.043478 | 104 | 0.755988 |
9ef370c330432d94c2552e65b37595217c828ebb | 2,954 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/disk_instantiation_config.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/compute/lib/google_api/compute/v1/model/disk_instantiation_config.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/compute/lib/google_api/compute/v1/model/disk_instantiation_config.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Model.DiskInstantiationConfig do
@moduledoc """
A specification of the desired way to instantiate a disk in the instance template when its created from a source instance.
## Attributes
* `autoDelete` (*type:* `boolean()`, *default:* `nil`) - Specifies whether the disk will be auto-deleted when the instance is deleted (but not when the disk is detached from the instance).
* `customImage` (*type:* `String.t`, *default:* `nil`) - The custom source image to be used to restore this disk when instantiating this instance template.
* `deviceName` (*type:* `String.t`, *default:* `nil`) - Specifies the device name of the disk to which the configurations apply to.
* `instantiateFrom` (*type:* `String.t`, *default:* `nil`) - Specifies whether to include the disk and what image to use. Possible values are: - source-image: to use the same image that was used to create the source instance's corresponding disk. Applicable to the boot disk and additional read-write disks. - source-image-family: to use the same image family that was used to create the source instance's corresponding disk. Applicable to the boot disk and additional read-write disks. - custom-image: to use a user-provided image url for disk creation. Applicable to the boot disk and additional read-write disks. - attach-read-only: to attach a read-only disk. Applicable to read-only disks. - do-not-include: to exclude a disk from the template. Applicable to additional read-write disks, local SSDs, and read-only disks.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:autoDelete => boolean() | nil,
:customImage => String.t() | nil,
:deviceName => String.t() | nil,
:instantiateFrom => String.t() | nil
}
field(:autoDelete)
field(:customImage)
field(:deviceName)
field(:instantiateFrom)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.DiskInstantiationConfig do
def decode(value, options) do
GoogleApi.Compute.V1.Model.DiskInstantiationConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.DiskInstantiationConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 52.75 | 830 | 0.738321 |
9ef3e2e5510956a138cedb26df47d4689e65e18b | 788 | ex | Elixir | test/support/state_machines/order.ex | baldmountain/beepbop | c361d451189801d1f084a14cda21c0b7fbedfa70 | [
"MIT"
] | null | null | null | test/support/state_machines/order.ex | baldmountain/beepbop | c361d451189801d1f084a14cda21c0b7fbedfa70 | [
"MIT"
] | null | null | null | test/support/state_machines/order.ex | baldmountain/beepbop | c361d451189801d1f084a14cda21c0b7fbedfa70 | [
"MIT"
] | null | null | null | defmodule BeepBop.Example.OrderMachine do
@moduledoc """
## Defined events
* `:foobar`
Has no "to_state", and the event callback has full freedom to set the
`to_state`. In fact, there is no validation on the `to_state`.
"""
use BeepBop, ecto_repo: BeepBop.TestRepo
state_machine(
BeepBop.Example.Order,
:state,
~w[cart address payment shipping shipped cancelled]a
) do
event(:foobar, %{from: [:cart]}, fn context ->
s = struct(context.struct, state: :foo)
context
end)
event(:will_fail, %{from: [:cart], to: :cancelled}, fn context ->
multi =
Ecto.Multi.new()
|> Ecto.Multi.run(:failure, fn _repo, _change ->
{:error, :failed}
end)
struct(context, multi: multi)
end)
end
end
| 25.419355 | 73 | 0.616751 |
9ef3fccc7958484d2227414c3cb0977f66d64290 | 48 | exs | Elixir | test/test_helper.exs | Selvaticus/loki | 58006dcf5752595ed7b875bee265423074895ae4 | [
"MIT"
] | 83 | 2016-11-27T11:15:58.000Z | 2021-12-24T21:04:04.000Z | test/test_helper.exs | Selvaticus/loki | 58006dcf5752595ed7b875bee265423074895ae4 | [
"MIT"
] | 28 | 2016-12-08T14:18:32.000Z | 2019-01-08T10:57:54.000Z | test/test_helper.exs | Selvaticus/loki | 58006dcf5752595ed7b875bee265423074895ae4 | [
"MIT"
] | 9 | 2017-02-08T21:56:18.000Z | 2019-06-09T02:14:07.000Z | Loki.TestHelpers.prepare_tests()
ExUnit.start()
| 16 | 32 | 0.8125 |
9ef43a6e89bb2367cb264d16c0f985192f21dc80 | 3,049 | ex | Elixir | deps/gettext/lib/gettext/fuzzy.ex | luishendrix92/exrez | c9a001ff32c2081449ce190d151e7df09a0a42ee | [
"MIT"
] | null | null | null | deps/gettext/lib/gettext/fuzzy.ex | luishendrix92/exrez | c9a001ff32c2081449ce190d151e7df09a0a42ee | [
"MIT"
] | 1 | 2020-07-17T10:07:44.000Z | 2020-07-17T10:07:44.000Z | teachme/deps/gettext/lib/gettext/fuzzy.ex | kodeklubben-sandefjord/kodeklubben-sandefjord.github.io | 89351bceb132f87b1d2a0bcb325fb278d7ad48a4 | [
"MIT"
] | null | null | null | defmodule Gettext.Fuzzy do
@moduledoc false
alias Gettext.PO
alias Gettext.PO.Translation
alias Gettext.PO.PluralTranslation
@type translation_key :: binary | {binary, binary}
@doc """
Returns a matcher function that takes two translation keys and checks if they
match.
`String.jaro_distance/2` (which calculates the Jaro distance) is used to
measure the distance between the two translations. `threshold` is the minimum
distance that means a match. `{:match, distance}` is returned in case of a
match, `:nomatch` otherwise.
"""
@spec matcher(float) :: (translation_key, translation_key -> {:match, float} | :nomatch)
def matcher(threshold) do
fn old_key, new_key ->
distance = jaro_distance(old_key, new_key)
if distance >= threshold, do: {:match, distance}, else: :nomatch
end
end
@doc """
Finds the Jaro distance between the msgids of two translations.
To mimic the behaviour of the `msgmerge` tool, this function only calculates
the Jaro distance of the msgids of the two translations, even if one (or both)
of them is a plural translation.
"""
@spec jaro_distance(translation_key, translation_key) :: float
def jaro_distance(key1, key2)
# Apparently, msgmerge only looks at the msgid when performing fuzzy
# matching. This means that if we have two plural translations with similar
# msgids but very different msgid_plurals, they'll still fuzzy match.
def jaro_distance(key1, key2) when is_binary(key1) and is_binary(key2),
do: String.jaro_distance(key1, key2)
def jaro_distance({key1, _}, key2) when is_binary(key2), do: String.jaro_distance(key1, key2)
def jaro_distance(key1, {key2, _}) when is_binary(key1), do: String.jaro_distance(key1, key2)
def jaro_distance({key1, _}, {key2, _}), do: String.jaro_distance(key1, key2)
@doc """
Merges a translation with the corresponding fuzzy match.
`new` is the newest translation and `existing` is the existing translation
that we use to populate the msgstr of the newest translation.
Note that if `new` is a regular translation, then the result will be a regular
translation; if `new` is a plural translation, then the result will be a
plural translation.
"""
@spec merge(PO.translation(), PO.translation()) :: PO.translation()
def merge(new, existing) do
# Everything comes from "new", except for the msgstr and the comments.
new
|> Map.put(:comments, existing.comments)
|> merge_msgstr(existing)
|> PO.Translations.mark_as_fuzzy()
end
defp merge_msgstr(%Translation{} = new, %Translation{} = existing),
do: %{new | msgstr: existing.msgstr}
defp merge_msgstr(%Translation{} = new, %PluralTranslation{} = existing),
do: %{new | msgstr: existing.msgstr[0]}
defp merge_msgstr(%PluralTranslation{} = new, %Translation{} = existing),
do: %{new | msgstr: Map.new(new.msgstr, fn {i, _} -> {i, existing.msgstr} end)}
defp merge_msgstr(%PluralTranslation{} = new, %PluralTranslation{} = existing),
do: %{new | msgstr: existing.msgstr}
end
| 39.089744 | 95 | 0.716956 |
9ef48ab5a093d8141c5fe4f52c9b25721bd94f6f | 771 | ex | Elixir | server/lib/project_web/views/comment_view.ex | lemartin19/cs4550-project | bd7baf279021543db33fe52beb3e0d5413dbe405 | [
"Xnet",
"X11"
] | null | null | null | server/lib/project_web/views/comment_view.ex | lemartin19/cs4550-project | bd7baf279021543db33fe52beb3e0d5413dbe405 | [
"Xnet",
"X11"
] | null | null | null | server/lib/project_web/views/comment_view.ex | lemartin19/cs4550-project | bd7baf279021543db33fe52beb3e0d5413dbe405 | [
"Xnet",
"X11"
] | null | null | null | defmodule ProjectWeb.CommentView do
use ProjectWeb, :view
alias ProjectWeb.UserView
alias ProjectWeb.CommentView
alias ProjectWeb.ChangesetView
def render("index.json", %{comments: comments}) do
%{data: render_many(comments, CommentView, "comment.json")}
end
def render("show.json", %{comment: comment}) do
%{data: render_one(comment, CommentView, "comment.json")}
end
def render("comment.json", %{comment: comment}) do
%{
id: comment.id,
body: comment.body,
user: render_one(comment.user, UserView, "user.json"),
location: comment.location,
inserted_at: comment.inserted_at
}
end
def render("error.json", %{changeset: changeset}) do
render_one(changeset, ChangesetView, "error.json")
end
end
| 26.586207 | 63 | 0.690013 |
9ef4c1f7ec868da089a22cf65069801d48ed7d3f | 1,927 | ex | Elixir | lib/stein/timer.ex | smartlogic/stein | 785dff2d0400925b896d78b0f092fc501b685d0c | [
"MIT"
] | 14 | 2019-03-07T22:06:51.000Z | 2021-06-05T19:27:22.000Z | lib/stein/timer.ex | smartlogic/stein | 785dff2d0400925b896d78b0f092fc501b685d0c | [
"MIT"
] | 1 | 2019-03-07T16:46:11.000Z | 2019-03-07T16:46:11.000Z | lib/stein/timer.ex | smartlogic/stein | 785dff2d0400925b896d78b0f092fc501b685d0c | [
"MIT"
] | null | null | null | defmodule Stein.Timer do
@moduledoc """
Functions around daily timer processes
"""
@type runs_at_opts() :: Keyword.t()
@type milliseconds :: integer()
alias Stein.Time
@doc """
Calculate the delay to the next daily cycle
"""
@spec calculate_daily_cycle_delay(DateTime.t(), runs_at_opts()) :: milliseconds()
def calculate_daily_cycle_delay(now, runs_at) do
hour = Keyword.get(runs_at, :hour, 0)
minute = Keyword.get(runs_at, :minute, 0)
second = Keyword.get(runs_at, :second, 0)
now
|> Timex.set(hour: hour, minute: minute, second: second)
|> maybe_shift_a_day(now)
|> Timex.diff(now, :milliseconds)
end
defp maybe_shift_a_day(next_run, now) do
case Time.before?(now, next_run) do
true ->
next_run
false ->
Timex.shift(next_run, days: 1)
end
end
@doc """
Calculate the delay to the next weekly cycle
"""
@spec calculate_weekly_cycle_delay(DateTime.t(), runs_at_opts()) :: milliseconds()
def calculate_weekly_cycle_delay(now, runs_at) do
day = Keyword.get(runs_at, :day, 0)
hour = Keyword.get(runs_at, :hour, 0)
minute = Keyword.get(runs_at, :minute, 0)
second = Keyword.get(runs_at, :second, 0)
now
|> Timex.set(hour: hour, minute: minute, second: second)
|> shift_days(day)
|> maybe_shift_a_week(now)
|> Timex.diff(now, :milliseconds)
end
defp shift_days(now, day) do
case Timex.weekday(now) > day do
true ->
# Shift into the next week
# time to end of the week plus the day
Timex.shift(now, days: day + (7 - Timex.weekday(now)))
false ->
# Shift ahead in this week
Timex.shift(now, days: day - Timex.weekday(now))
end
end
defp maybe_shift_a_week(next_run, now) do
case Time.before?(now, next_run) do
true ->
next_run
false ->
Timex.shift(next_run, days: 7)
end
end
end
| 25.025974 | 84 | 0.636222 |
9ef4cf53e72e9f05b6575a83650bc73bdad93c76 | 15,907 | exs | Elixir | lib/ex_unit/test/ex_unit/formatter_test.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 19,291 | 2015-01-01T02:42:49.000Z | 2022-03-31T21:01:40.000Z | lib/ex_unit/test/ex_unit/formatter_test.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 8,082 | 2015-01-01T04:16:23.000Z | 2022-03-31T22:08:02.000Z | lib/ex_unit/test/ex_unit/formatter_test.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 3,472 | 2015-01-03T04:11:56.000Z | 2022-03-29T02:07:30.000Z | Code.require_file("../test_helper.exs", __DIR__)
defmodule ExUnit.FormatterTest do
use ExUnit.Case
import ExUnit.Formatter
doctest ExUnit.Formatter
defmacrop catch_assertion(expr) do
quote do
try do
unquote(expr)
rescue
ex -> ex
end
end
end
defp test_module do
%ExUnit.TestModule{name: Hello}
end
defp test do
%ExUnit.Test{name: :world, module: Hello, tags: %{file: __ENV__.file, line: 1}}
end
def falsy() do
false
end
defp formatter(_key, value), do: value
defp diff_formatter(:diff_enabled?, _default), do: true
defp diff_formatter(_key, value), do: value
test "formats test case filters" do
filters = [run: true, slow: false]
assert format_filters(filters, :exclude) =~ "Excluding tags: [run: true, slow: false]"
assert format_filters(filters, :include) =~ "Including tags: [run: true, slow: false]"
end
test "formats test errors" do
failure = [{:error, catch_error(raise "oops"), []}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (RuntimeError) oops
"""
end
test "formats test exits" do
failure = [{:exit, 1, []}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) == """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (exit) 1
"""
end
test "formats test exits with mfa" do
failure = [{:exit, {:bye, {:mod, :fun, []}}, []}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) == """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (exit) exited in: :mod.fun()
** (EXIT) :bye
"""
end
test "formats test exits with function clause mfa" do
{error, stack} =
try do
Access.fetch(:foo, :bar)
catch
:error, error -> {error, __STACKTRACE__}
end
failure = [{:exit, {{error, stack}, {:mod, :fun, []}}, []}]
assert trim_multiline_whitespace(format_test_failure(test(), failure, 1, 80, &formatter/2)) =~
"""
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (exit) exited in: :mod.fun()
** (EXIT) an exception was raised:
** (FunctionClauseError) no function clause matching in Access.fetch/2
The following arguments were given to Access.fetch/2:
# 1
:foo
# 2
:bar
Attempted function clauses (showing 5 out of 5):
def fetch(%module{} = container, key)
"""
end
test "formats test exits with assertion mfa" do
{error, stack} =
try do
assert 1 == 2
rescue
error -> {error, __STACKTRACE__}
end
failure = [{:exit, {{error, stack}, {:mod, :fun, []}}, []}]
assert trim_multiline_whitespace(format_test_failure(test(), failure, 1, 80, &formatter/2)) =~
"""
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (exit) exited in: :mod.fun()
** (EXIT) an exception was raised:
Assertion with == failed
code: assert 1 == 2
left: 1
right: 2
"""
end
test "formats test throws" do
failure = [{:throw, 1, []}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) == """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (throw) 1
"""
end
test "formats test EXITs" do
failure = [{{:EXIT, self()}, 1, []}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) == """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (EXIT from #{inspect(self())}) 1
"""
end
test "formats test EXITs with function clause errors" do
{error, stack} =
try do
Access.fetch(:foo, :bar)
catch
:error, error -> {error, __STACKTRACE__}
end
failure = [{{:EXIT, self()}, {error, stack}, []}]
format = trim_multiline_whitespace(format_test_failure(test(), failure, 1, 80, &formatter/2))
assert format =~
"""
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (EXIT from #{inspect(self())}) an exception was raised:
** (FunctionClauseError) no function clause matching in Access.fetch/2
The following arguments were given to Access.fetch/2:
# 1
:foo
# 2
:bar
Attempted function clauses (showing 5 out of 5):
def fetch(%module{} = container, key)
"""
assert format =~ ~r"lib/access.ex:\d+: Access.fetch/2"
end
test "formats test EXITs with assertion errors" do
{error, stack} =
try do
assert 1 == 2
rescue
error -> {error, __STACKTRACE__}
end
failure = [{{:EXIT, self()}, {error, stack}, []}]
assert trim_multiline_whitespace(format_test_failure(test(), failure, 1, 80, &formatter/2)) =~
"""
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (EXIT from #{inspect(self())}) an exception was raised:
Assertion with == failed
code: assert 1 == 2
left: 1
right: 2
"""
end
test "formats test errors with test_location_relative_path" do
Application.put_env(:ex_unit, :test_location_relative_path, "apps/sample")
failure = [{:error, catch_error(raise "oops"), []}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """
1) world (Hello)
apps/sample/test/ex_unit/formatter_test.exs:1
** (RuntimeError) oops
"""
after
Application.delete_env(:ex_unit, :test_location_relative_path)
end
test "formats test errors with code snippets" do
stack = {Hello, :world, 1, [file: __ENV__.file, line: 3]}
failure = [{:error, catch_error(raise "oops"), [stack]}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (RuntimeError) oops
code: defmodule ExUnit.FormatterTest do
"""
end
test "formats stacktraces" do
stacktrace = [{Oops, :wrong, 1, [file: "formatter_test.exs", line: 1]}]
failure = [{:error, catch_error(raise "oops"), stacktrace}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (RuntimeError) oops
stacktrace:
formatter_test.exs:1: Oops.wrong/1
"""
end
test "formats assertions" do
failure = [{:error, catch_assertion(assert ExUnit.FormatterTest.falsy()), []}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
Expected truthy, got false
code: assert ExUnit.FormatterTest.falsy()
"""
end
test "formats assertions with patterns and values" do
failure = [{:error, catch_assertion(assert {1, 2, 3} > {1, 2, 3}), []}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
Assertion with > failed, both sides are exactly equal
code: assert {1, 2, 3} > {1, 2, 3}
left: {1, 2, 3}
"""
failure = [{:error, catch_assertion(assert {3, 2, 1} = {1, 2, 3}), []}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
match (=) failed
code: assert {3, 2, 1} = {1, 2, 3}
left: {3, 2, 1}
right: {1, 2, 3}
"""
end
nfc_hello = String.normalize("héllo", :nfc)
nfd_hello = String.normalize("héllo", :nfd)
test "formats assertions with hints" do
failure = [{:error, catch_assertion(assert unquote(nfc_hello) == unquote(nfd_hello)), []}]
assert format_test_failure(test(), failure, 1, 80, &diff_formatter/2) =~ """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
Assertion with == failed
code: assert "#{unquote(nfc_hello)}" == "#{unquote(nfd_hello)}"
left: "#{unquote(nfc_hello)}"
right: "#{unquote(nfd_hello)}"
hint: you are comparing strings that have the same visual representation but are made of different Unicode codepoints
"""
end
test "formats multiple assertions" do
failure = [
{:error, catch_assertion(assert ExUnit.FormatterTest.falsy()), []},
{:error, catch_assertion(assert 1 == 2), []}
]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
Failure #1
Expected truthy, got false
code: assert ExUnit.FormatterTest.falsy()
Failure #2
Assertion with == failed
code: assert 1 == 2
left: 1
right: 2
"""
end
defp trim_multiline_whitespace(string) do
String.replace(string, ~r"\n\s+\n", "\n\n")
end
test "blames function clause error" do
{error, stack} =
try do
Access.fetch(:foo, :bar)
rescue
exception -> {exception, __STACKTRACE__}
end
failure = format_test_failure(test(), [{:error, error, [hd(stack)]}], 1, 80, &formatter/2)
assert trim_multiline_whitespace(failure) =~ """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (FunctionClauseError) no function clause matching in Access.fetch/2
The following arguments were given to Access.fetch/2:
# 1
:foo
# 2
:bar
Attempted function clauses (showing 5 out of 5):
def fetch(%module{} = container, key)
"""
assert failure =~ ~r"\(elixir #{System.version()}\) lib/access\.ex:\d+: Access\.fetch/2"
end
test "formats setup_all errors" do
failure = [{:error, catch_error(raise "oops"), []}]
assert format_test_all_failure(test_module(), failure, 1, 80, &formatter/2) =~ """
1) Hello: failure on setup_all callback, all tests have been invalidated
** (RuntimeError) oops
"""
end
test "formats matches correctly" do
failure = [{:error, catch_assertion(assert %{a: :b} = %{a: :c}), []}]
assert format_test_all_failure(test_module(), failure, 1, :infinity, &formatter/2) =~ """
1) Hello: failure on setup_all callback, all tests have been invalidated
match (=) failed
code: assert %{a: :b} = %{a: :c}
left: %{a: :b}
right: %{a: :c}
"""
end
test "formats assertions with operators with no limit" do
failure = [{:error, catch_assertion(assert [1, 2, 3] == [4, 5, 6]), []}]
assert format_test_all_failure(test_module(), failure, 1, :infinity, &formatter/2) =~ """
1) Hello: failure on setup_all callback, all tests have been invalidated
Assertion with == failed
code: assert [1, 2, 3] == [4, 5, 6]
left: [1, 2, 3]
right: [4, 5, 6]
"""
end
test "formats assertions with operators with column limit" do
failure = [{:error, catch_assertion(assert [1, 2, 3] == [4, 5, 6]), []}]
assert format_test_all_failure(test_module(), failure, 1, 15, &formatter/2) =~ """
1) Hello: failure on setup_all callback, all tests have been invalidated
Assertion with == failed
code: assert [1, 2, 3] == [4, 5, 6]
left: [1,
2,
3]
right: [4,
5,
6]
"""
end
test "formats assertions with complex function call arguments" do
failure = [{:error, catch_assertion(assert is_list(List.to_tuple([1, 2, 3]))), []}]
assert format_test_all_failure(test_module(), failure, 1, 80, &formatter/2) =~ """
1) Hello: failure on setup_all callback, all tests have been invalidated
Expected truthy, got false
code: assert is_list(List.to_tuple([1, 2, 3]))
arguments:
# 1
{1, 2, 3}
"""
failure = [{:error, catch_assertion(assert is_list({1, 2})), []}]
assert format_test_all_failure(test_module(), failure, 1, 80, &formatter/2) =~ """
1) Hello: failure on setup_all callback, all tests have been invalidated
Expected truthy, got false
code: assert is_list({1, 2})
"""
end
test "formats assertions with message with multiple lines" do
message = "Some meaningful error:\nuseful info\nanother useful info"
failure = [{:error, catch_assertion(assert(false, message)), []}]
assert format_test_all_failure(test_module(), failure, 1, :infinity, &formatter/2) =~ """
1) Hello: failure on setup_all callback, all tests have been invalidated
Some meaningful error:
useful info
another useful info
"""
end
defmodule BadInspect do
defstruct key: 0
defimpl Inspect do
def inspect(struct, opts) when is_atom(opts) do
struct.unknown
end
end
end
test "inspect failure" do
failure = [{:error, catch_assertion(assert :will_fail == %BadInspect{}), []}]
message = ~S'''
got FunctionClauseError with message:
"""
no function clause matching in Inspect.ExUnit.FormatterTest.BadInspect.inspect/2
"""
while inspecting:
%{__struct__: ExUnit.FormatterTest.BadInspect, key: 0}
Stacktrace:
'''
assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
Assertion with == failed
code: assert :will_fail == %BadInspect{}
left: :will_fail
right: #Inspect.Error<\n#{message}\
"""
end
defmodule BadMessage do
defexception key: 0
@impl true
def message(_message) do
raise "oops"
end
end
test "message failure" do
failure = [{:error, catch_error(raise BadMessage), []}]
message =
"got RuntimeError with message \"oops\" while retrieving Exception.message/1 " <>
"for %ExUnit.FormatterTest.BadMessage{key: 0}. Stacktrace:"
assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (ExUnit.FormatterTest.BadMessage) #{message}
"""
end
end
| 32.135354 | 134 | 0.533664 |
9ef4d0496452be384611b254042aaa9c2511dfd0 | 84 | ex | Elixir | lib/ecto_schema_store/gettext.ex | onboardingsystems/ecto_schema_store | 120c929faecb686e3da685f411da66c80d7d0127 | [
"Apache-2.0"
] | 8 | 2016-10-27T15:53:18.000Z | 2022-01-03T23:47:45.000Z | lib/ecto_schema_store/gettext.ex | onboardingsystems/ecto_schema_store | 120c929faecb686e3da685f411da66c80d7d0127 | [
"Apache-2.0"
] | 5 | 2017-04-25T15:28:29.000Z | 2017-10-05T16:08:57.000Z | lib/ecto_schema_store/gettext.ex | onboardingsystems/ecto_schema_store | 120c929faecb686e3da685f411da66c80d7d0127 | [
"Apache-2.0"
] | 3 | 2017-08-28T18:51:24.000Z | 2020-01-16T22:30:33.000Z | defmodule EctoSchemaStore.Gettext do
use Gettext, otp_app: :ecto_schema_store
end
| 21 | 42 | 0.833333 |
9ef4ed71bde840504e025587ecc0c9581e9a9b8e | 4,167 | ex | Elixir | test/support/live_views/upload_live.ex | feliperenan/phoenix_live_view | af65bb51fe12ea88e7c66808d2b1118e1c491ddd | [
"MIT"
] | 4,659 | 2019-03-14T20:22:43.000Z | 2022-03-31T20:13:30.000Z | test/support/live_views/upload_live.ex | feliperenan/phoenix_live_view | af65bb51fe12ea88e7c66808d2b1118e1c491ddd | [
"MIT"
] | 1,745 | 2019-03-14T22:04:38.000Z | 2022-03-31T17:26:25.000Z | test/support/live_views/upload_live.ex | feliperenan/phoenix_live_view | af65bb51fe12ea88e7c66808d2b1118e1c491ddd | [
"MIT"
] | 744 | 2019-03-14T20:48:05.000Z | 2022-03-25T14:35:04.000Z | defmodule Phoenix.LiveViewTest.UploadLive do
use Phoenix.LiveView
def render(%{uploads: _} = assigns) do
~H"""
<%= for preflight <- @preflights do %>
preflight:<%= inspect(preflight) %>
<% end %>
<%= for name <- @consumed do %>
consumed:<%= name %>
<% end %>
<form phx-change="validate" phx-submit="save">
<%= for entry <- @uploads.avatar.entries do %>
lv:<%= entry.client_name %>:<%= entry.progress %>%
channel:<%= inspect(Phoenix.LiveView.UploadConfig.entry_pid(@uploads.avatar, entry)) %>
<%= for msg <- upload_errors(@uploads.avatar, entry) do %>
error:<%= inspect(msg) %>
<% end %>
<% end %>
<%= live_file_input @uploads.avatar %>
<button type="submit">save</button>
</form>
"""
end
def render(assigns) do
~H"""
<div>
loading...
</div>
"""
end
def mount(_params, _session, socket) do
{:ok, assign(socket, preflights: [], consumed: [])}
end
def handle_call({:setup, setup_func}, _from, socket) do
{:reply, :ok, setup_func.(socket)}
end
def handle_call({:run, func}, _from, socket), do: func.(socket)
def handle_event("validate", _params, socket) do
{:noreply, socket}
end
## test helpers
def inspect_html_safe(term) do
term
|> inspect()
|> Phoenix.HTML.html_escape()
|> Phoenix.HTML.safe_to_string()
end
def exits_with(lv, upload, kind, func) do
Process.unlink(proxy_pid(lv))
Process.unlink(upload.pid)
try do
func.()
raise "expected to exit with #{inspect(kind)}"
catch
:exit, {{%mod{message: msg}, _}, _} when mod == kind -> msg
end
end
def run(lv, func) do
GenServer.call(lv.pid, {:run, func})
end
def proxy_pid(%{proxy: {_ref, _topic, pid}}), do: pid
end
defmodule Phoenix.LiveViewTest.UploadComponent do
use Phoenix.LiveComponent
def render(%{uploads: _} = assigns) do
~H"""
<div>
<%= for preflight <- @preflights do %>
preflight:<%= inspect(preflight) %>
<% end %>
<%= for name <- @consumed do %>
consumed:<%= name %>
<% end %>
<form phx-change="validate" id={@id} phx-submit="save" phx-target={@myself}>
<%= for entry <- @uploads.avatar.entries do %>
component:<%= entry.client_name %>:<%= entry.progress %>%
channel:<%= inspect(Phoenix.LiveView.UploadConfig.entry_pid(@uploads.avatar, entry)) %>
<%= for msg <- upload_errors(@uploads.avatar, entry) do %>
error:<%= inspect(msg) %>
<% end %>
<% end %>
<%= live_file_input @uploads.avatar %>
<button type="submit">save</button>
</form>
</div>
"""
end
def render(assigns) do
~H"""
<div>
loading...
</div>
"""
end
def update(assigns, socket) do
new_socket =
case assigns[:run] do
{func, from} ->
{:reply, reply, new_socket} = func.(socket)
if from, do: GenServer.reply(from, reply)
new_socket
nil ->
socket
other -> {:other, other}
end
{:ok,
new_socket
|> assign(preflights: [])
|> assign(consumed: [])
|> assign(assigns)}
end
def handle_event("validate", _params, socket) do
{:noreply, socket}
end
end
defmodule Phoenix.LiveViewTest.UploadLiveWithComponent do
use Phoenix.LiveView
def render(assigns) do
~H"""
<div>
<%= if @uploads_count > 0 do %>
<%= for i <- 0..@uploads_count do %>
<%= live_component Phoenix.LiveViewTest.UploadComponent, id: "upload#{i}" %>
<% end %>
<% end %>
</div>
"""
end
def mount(_params, _session, socket) do
{:ok, assign(socket, uploads_count: 1)}
end
def handle_call({:setup, setup_func}, _from, socket) do
{:reply, :ok, setup_func.(socket)}
end
def handle_call({:uploads, count}, _from, socket) do
{:reply, :ok, assign(socket, :uploads_count, count)}
end
def handle_call({:run, func}, from, socket) do
send_update(Phoenix.LiveViewTest.UploadComponent, id: "upload0", run: {func, from})
{:noreply, socket}
end
end
| 24.656805 | 97 | 0.573314 |
9ef50057d0eefec1d20ea63face83bb8a53efe4c | 1,612 | exs | Elixir | test/20_run/steps/schema__check_against_given_fields_test.exs | marick/ecto_test_dsl | 6d460af093367098b7c78db709753deb45904d77 | [
"Unlicense"
] | 4 | 2021-02-09T17:26:34.000Z | 2021-08-08T01:42:52.000Z | test/20_run/steps/schema__check_against_given_fields_test.exs | marick/transformer_test_support | 6d460af093367098b7c78db709753deb45904d77 | [
"Unlicense"
] | null | null | null | test/20_run/steps/schema__check_against_given_fields_test.exs | marick/transformer_test_support | 6d460af093367098b7c78db709753deb45904d77 | [
"Unlicense"
] | null | null | null | defmodule Run.Steps.CheckAgainstGivenFieldsTest do
use EctoTestDSL.Case
use T.Drink.AndRun
alias Run.Steps
use Mockery
import T.RunningStubs
import T.Parse.InternalFunctions
setup do
stub(name: :example, neighborhood: %{}, usually_ignore: [])
stub(result_matches: :unused) # result_matches is checked in result_matches_test.exs
:ok
end
defp run([checks, value]) do
stub_history(inserted_value: value)
stub(result_fields: checks)
Steps.check_against_given_fields(:running, :inserted_value)
end
defp pass(setup), do: assert run(setup) == :uninteresting_result
test "expected values" do
[ %{name: "Bossie"}, %{name: "Bossie"}] |> pass()
end
test "expected change has wrong value" do
input = [ %{name: "Bossie"}, %{name: ""}]
assertion_fails(~r/Example `:example`/,
[message: ~r/Field `:name` has the wrong value/,
left: "",
right: "Bossie"],
fn ->
run(input)
end)
end
test "extra values are OK" do
[ %{name: "Bossie"}, %{name: "Bossie", age: 5}] |> pass
end
test "references to neighbors are supported" do
other_een = een(:other_example)
stub(neighborhood: %{other_een => Neighborhood.Value.inserted(%{id: 333})})
passes = [ %{other_id: id_of(:other_example)}, %{other_id: 333}]
fails = [ %{other_id: id_of(:other_example)}, %{other_id: "NOT"}]
passes |> pass()
assertion_fails(~r/Example `:example`/,
[message: ~r/Field `:other_id` has the wrong value/,
left: "NOT",
right: 333],
fn ->
run(fails)
end)
end
end
| 26.42623 | 89 | 0.628412 |
9ef5035c33e2f29f9ebe065cbd5c7ab08a572ecb | 1,127 | exs | Elixir | config/config.exs | kianmeng/logfmt-elixir | 1130de2e05d87f67788d3c37e673fd85762526dd | [
"MIT"
] | 21 | 2015-06-03T19:15:08.000Z | 2022-02-26T03:39:09.000Z | config/config.exs | kianmeng/logfmt-elixir | 1130de2e05d87f67788d3c37e673fd85762526dd | [
"MIT"
] | 14 | 2015-06-01T02:52:45.000Z | 2022-01-10T19:34:03.000Z | config/config.exs | kianmeng/logfmt-elixir | 1130de2e05d87f67788d3c37e673fd85762526dd | [
"MIT"
] | 11 | 2016-05-04T11:31:43.000Z | 2021-07-07T12:10:30.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# third-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :logfmt, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:logfmt, :key)
#
# You can also configure a third-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env()}.exs"
| 36.354839 | 73 | 0.750665 |
9ef508f0eb6bd484ba3e6b98645c32f4b016f488 | 197 | ex | Elixir | web/controllers/page_controller.ex | KazuCocoa/react_phoenix | 7cd2fe0bdcddf8a0fdd876517232893783bf21e5 | [
"MIT"
] | null | null | null | web/controllers/page_controller.ex | KazuCocoa/react_phoenix | 7cd2fe0bdcddf8a0fdd876517232893783bf21e5 | [
"MIT"
] | null | null | null | web/controllers/page_controller.ex | KazuCocoa/react_phoenix | 7cd2fe0bdcddf8a0fdd876517232893783bf21e5 | [
"MIT"
] | null | null | null | defmodule ReactPhoenix.PageController do
use ReactPhoenix.Web, :controller
plug :action
def index(conn, _params) do
# render conn, "index.html"
json conn, %{id: "sample"}
end
end
| 17.909091 | 40 | 0.695431 |
9ef580af400e17b3ccda7d9a2b910aa5a853a2d1 | 1,421 | ex | Elixir | lib/plug/fetch_component_access_token.ex | feng19/elixir_wechat | 5ae052476f27abf85da484b902347042c097b150 | [
"MIT"
] | 12 | 2019-03-07T01:34:12.000Z | 2021-09-11T06:40:29.000Z | lib/plug/fetch_component_access_token.ex | feng19/elixir_wechat | 5ae052476f27abf85da484b902347042c097b150 | [
"MIT"
] | 6 | 2019-10-12T10:48:07.000Z | 2021-03-03T07:06:34.000Z | lib/plug/fetch_component_access_token.ex | feng19/elixir_wechat | 5ae052476f27abf85da484b902347042c097b150 | [
"MIT"
] | 5 | 2019-03-07T01:34:31.000Z | 2021-01-06T08:01:51.000Z | if Code.ensure_loaded?(Plug) do
defmodule WeChat.Plug.FetchComponentAccessToken do
@moduledoc false
use Plug.Builder
require Logger
def call(conn, opts) do
conn = fetch_query_params(conn)
adapter_storage = opts[:adapter_storage]
query_params = conn.query_params
result =
try do
case fetch(query_params, adapter_storage) do
{:ok, token} ->
%{
"access_token" => token.access_token,
"expires_in" => token.expires_in,
"timestamp" => token.timestamp
}
{:error, %WeChat.Error{} = error} ->
Logger.error(
"fetch access token occurs an error: #{inspect(error)} with query params: #{
inspect(query_params)
}"
)
error
end
rescue
error in WeChat.Error ->
error
end
conn
|> put_resp_content_type("application/json")
|> send_resp(200, Jason.encode!(result))
|> halt()
end
defp fetch(%{"appid" => appid}, adapter_storage) do
comp_adapter_storage = adapter_storage[:component]
WeChat.Component.fetch_component_access_token(appid, comp_adapter_storage)
end
defp fetch(_, _) do
{
:error,
%WeChat.Error{reason: "invalid_request"}
}
end
end
end
| 24.929825 | 92 | 0.55102 |
9ef589fb25deffe08ba36509fed0a3dc1cfe7825 | 2,676 | ex | Elixir | web/views/helpers/view_helpers.ex | zhangsoledad/Doom | 37ddc696e7d71c742bfc90352d76e81f2c78f5b7 | [
"MIT"
] | 6 | 2016-03-17T08:45:34.000Z | 2016-10-10T01:20:37.000Z | web/views/helpers/view_helpers.ex | zhangsoledad/doom | 37ddc696e7d71c742bfc90352d76e81f2c78f5b7 | [
"MIT"
] | null | null | null | web/views/helpers/view_helpers.ex | zhangsoledad/doom | 37ddc696e7d71c742bfc90352d76e81f2c78f5b7 | [
"MIT"
] | 2 | 2016-04-01T06:28:56.000Z | 2016-04-28T09:35:07.000Z | defmodule Doom.ViewHelpers do
use Phoenix.HTML
def side_li_tag(conn, views) when is_list(views) do
info = conn.private
case (info[:phoenix_view] in views) do
true ->
tag(:li, class: "active")
_ ->
tag(:li)
end
end
def avatar_url(user_name) do
name = Pinyin.from_string(user_name, splitter: "")
filename = AlchemicAvatar.generate(name, 150)
filename |> String.split("priv/static", parts: 2) |> List.last
end
def pagination(paginator, path_fun, params) do
current_page = paginator.page_number
total_pages = paginator.total_pages
if total_pages > 1 do
content_tag :ul, class: "pagination pagination-sm no-margin" do
page_tags = [raw_page_link(1, current_page, path_fun, params) | inside_window_page_link(total_pages,current_page, path_fun, params)]
if current_page > 1 do
page_tags = [page_link(path_fun, params, current_page - 1, "«") | page_tags]
end
if current_page < total_pages do
page_tags = [page_tags | page_link(path_fun, params, current_page + 1, "»")]
end
page_tags
end
end
end
defp page_link(path_fun, params, page) do
content_tag :li do
link "#{page}", to: path_fun.( Map.merge(params, %{ "page" => page}))
end
end
defp page_link(path_fun, params, page, :active) do
content_tag :li, class: "active" do
link "#{page}", to: path_fun.( Map.merge(params, %{ "page" => page}))
end
end
defp page_link(path_fun, params, page, text) do
content_tag :li do
link "#{text}", to: path_fun.( Map.merge(params, %{ "page" => page}))
end
end
def inside_window_page_link(total_pages, current_page, path_fun, params) do
left_out = (current_page > 4) && (total_pages > 6)
right_out = total_pages > (current_page + 3)
start = case left_out do
true -> current_page - 2
_ -> 2
end
endp = case (start + 4) > total_pages do
true -> total_pages
_ -> start + 4
end
tags = Enum.map(start..endp, &raw_page_link(&1,current_page, path_fun, params))
case {left_out,right_out} do
{true, true} -> [[gap_page_link | tags] | gap_page_link ]
{true, false} -> [gap_page_link | tags]
{false, true} -> [ tags | gap_page_link]
_ -> tags
end
end
defp gap_page_link do
content_tag :li do
content_tag :span, class: "disable" do
"..."
end
end
end
defp raw_page_link(page, current_page, path_fun, params) do
case current_page == page do
true -> page_link(path_fun, params, page, :active)
_ -> page_link(path_fun, params, page)
end
end
end
| 28.468085 | 140 | 0.62855 |
9ef5be689825ab032918b49013154d8c3eb56425 | 1,155 | exs | Elixir | clients/cloud_tasks/mix.exs | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/cloud_tasks/mix.exs | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/cloud_tasks/mix.exs | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | defmodule GoogleApi.CloudTasks.V2beta2.Mixfile do
use Mix.Project
@version "0.0.1"
def project do
[app: :google_api_cloud_tasks,
version: @version,
elixir: "~> 1.4",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/GoogleCloudPlatform/elixir-google-api/tree/master/clients/FIXME"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.1"},
{:ex_doc, "~> 0.16", only: :dev},
{:dialyxir, "~> 0.5", only: [:dev], runtime: false}
]
end
defp description() do
"""
Manages the execution of large numbers of distributed requests.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/GoogleCloudPlatform/elixir-google-api/tree/master/clients/FIXME",
"Homepage" => "https://cloud.google.com/tasks/"
}
]
end
end
| 23.571429 | 105 | 0.592208 |
9ef60bbec9387d52b2a993db4bfdf021431ddc95 | 1,738 | ex | Elixir | apps/auth_engine/lib/api/create_user.ex | dottogame/overdot | 95aee9621e7036ab4857f49c6a745aaca4d645c5 | [
"MIT"
] | null | null | null | apps/auth_engine/lib/api/create_user.ex | dottogame/overdot | 95aee9621e7036ab4857f49c6a745aaca4d645c5 | [
"MIT"
] | null | null | null | apps/auth_engine/lib/api/create_user.ex | dottogame/overdot | 95aee9621e7036ab4857f49c6a745aaca4d645c5 | [
"MIT"
] | null | null | null | defmodule Api.CreateUser do
use Raxx.Server
@impl Raxx.Server
def handle_request(req, state) do
req_data = Poison.decode!(req.body)
# check for all needed params
has_all = ["email", "nick", "pass"] |> Enum.all?(&Map.has_key?(req_data, &1))
# check if email is taken
{status, _} = Couchdb.Connector.Reader.get(state.user_lookup, req_data["email"])
if status === :error do
if has_all do
# create new user
user_id = UUID.uuid4()
token = UUID.uuid4()
# create link from user email to account id
Couchdb.Connector.Writer.create(
state.user_lookup,
Poison.encode!(%{link: user_id}),
req_data["email"]
)
# assemble account data
data =
%{
"verify" => token,
"nick" => req_data["nick"],
"email" => req_data["email"],
"pass" => Bcrypt.hash_pwd_salt(req_data["pass"])
}
|> Poison.encode!()
# create entry in db
Couchdb.Connector.Writer.create(state.db, data, user_id)
# send email
AuthEngine.MailEngine.Emails.welcome_email(
req_data["email"],
req_data["nick"],
token
)
|> AuthEngine.MailEngine.Mailer.deliver_later()
response(:ok)
|> set_header("content-type", "application/json")
|> set_body("{\"s\": \"ok\"}")
else
response(:ok)
|> set_header("content-type", "application/json")
|> set_body("{\"s\": \"err\"}")
end
else
response(:ok)
|> set_header("content-type", "application/json")
|> set_body("{\"s\": \"ok\", \"c\":\"email already registered\"}")
end
end
end
| 27.587302 | 84 | 0.543728 |
9ef612835d73921ae20e78c5319d54e69c500f47 | 1,829 | ex | Elixir | lib/sftp_client/operations/download_file.ex | zoten/sftp_client | 814d06e36915f60bf98149d790051d03bee189f5 | [
"MIT"
] | 21 | 2019-08-06T01:39:15.000Z | 2021-04-15T20:08:04.000Z | lib/sftp_client/operations/download_file.ex | zoten/sftp_client | 814d06e36915f60bf98149d790051d03bee189f5 | [
"MIT"
] | 19 | 2019-07-10T17:10:34.000Z | 2021-04-18T11:16:58.000Z | lib/sftp_client/operations/download_file.ex | zoten/sftp_client | 814d06e36915f60bf98149d790051d03bee189f5 | [
"MIT"
] | 11 | 2019-07-04T14:56:37.000Z | 2021-04-26T19:09:22.000Z | defmodule SFTPClient.Operations.DownloadFile do
@moduledoc """
A module that provides functions to download files from an SFTP server.
"""
alias SFTPClient.Conn
alias SFTPClient.ConnError
alias SFTPClient.InvalidOptionError
alias SFTPClient.OperationError
alias SFTPClient.Operations.StreamFile
@doc """
Downloads a file from the remote path and stores it in the given local path.
When the local path is a directory, the file name of the local file is
inferred from the remote path.
"""
@spec download_file(Conn.t(), Path.t(), Path.t()) ::
{:ok, Path.t()} | {:error, SFTPClient.error()}
def download_file(%Conn{} = conn, remote_path, local_path) do
{:ok, download_file!(conn, remote_path, local_path)}
rescue
error in [ConnError, InvalidOptionError, OperationError] ->
{:error, error}
end
@doc """
Downloads a file from the remote path and stores it in the given local path.
When the local path is a directory, the file name of the local file is
inferred from the remote path. Raises when the operation fails.
"""
@spec download_file!(Conn.t(), Path.t(), Path.t()) :: Path.t() | no_return
def download_file!(%Conn{} = conn, remote_path, local_path) do
local_path = get_local_path(local_path, remote_path)
source_stream = StreamFile.stream_file!(conn, remote_path)
target_stream = File.stream!(local_path)
source_stream
|> Stream.into(target_stream)
|> Stream.run()
local_path
end
defp get_local_path(local_path, remote_path) do
if File.dir?(local_path) do
Path.join(local_path, Path.basename(remote_path))
else
ensure_store_path_exists!(local_path)
local_path
end
end
defp ensure_store_path_exists!(local_path) do
local_path
|> Path.dirname()
|> File.mkdir_p!()
end
end
| 31 | 78 | 0.706397 |
9ef632052b1296fef3011df601baaf7f983d36dd | 3,066 | ex | Elixir | lib/event_store/storage/stream.ex | davydog187/eventstore | 085602a8cfae7401e6d89472a053fd52f586832f | [
"MIT"
] | 576 | 2017-11-03T14:11:07.000Z | 2022-03-29T06:18:47.000Z | lib/event_store/storage/stream.ex | davydog187/eventstore | 085602a8cfae7401e6d89472a053fd52f586832f | [
"MIT"
] | 129 | 2017-11-08T06:10:20.000Z | 2021-09-15T16:18:14.000Z | lib/event_store/storage/stream.ex | davydog187/eventstore | 085602a8cfae7401e6d89472a053fd52f586832f | [
"MIT"
] | 118 | 2017-11-14T14:10:09.000Z | 2022-03-28T13:13:56.000Z | defmodule EventStore.Storage.Stream do
@moduledoc false
alias EventStore.Page
alias EventStore.Sql.Statements
alias EventStore.Streams.StreamInfo
def stream_info(conn, stream_uuid, opts) do
{schema, opts} = Keyword.pop(opts, :schema)
query = Statements.query_stream_info(schema)
case Postgrex.query(conn, query, [stream_uuid], opts) do
{:ok, %Postgrex.Result{num_rows: 0}} ->
{:ok, StreamInfo.new(stream_uuid)}
{:ok, %Postgrex.Result{rows: [row]}} ->
{:ok, to_stream_info(row)}
{:error, _error} = reply ->
reply
end
end
def paginate_streams(conn, opts) do
{page_number, opts} = Keyword.pop(opts, :page_number, 1)
{page_size, opts} = Keyword.pop(opts, :page_size, 50)
{schema, opts} = Keyword.pop(opts, :schema)
offset = page_size * (page_number - 1)
with {:ok, total_streams} <- count_streams(conn, schema, opts),
{:ok, streams} <- query_streams(conn, schema, page_size, offset, opts) do
page = %Page{
entries: streams,
page_number: page_number,
page_size: page_size,
total_entries: total_streams,
total_pages: Page.total_pages(total_streams, page_size)
}
{:ok, page}
end
end
defp count_streams(conn, schema, opts) do
query = Statements.count_streams(schema)
search = search_term(opts)
case Postgrex.query(conn, query, [search], opts) do
{:ok, %Postgrex.Result{rows: [[count]]}} ->
{:ok, count}
{:error, _error} = reply ->
reply
end
end
defp query_streams(conn, schema, limit, offset, opts) do
search = search_term(opts)
sort_by =
case Keyword.get(opts, :sort_by, :stream_id) do
:stream_uuid -> "stream_uuid"
:stream_id -> "stream_id"
:stream_version -> "stream_version"
:created_at -> "created_at"
:deleted_at -> "deleted_at"
:status -> "status"
end
sort_dir =
case Keyword.get(opts, :sort_dir, :asc) do
:asc -> "ASC"
:desc -> "DESC"
end
query = Statements.query_streams(schema, sort_by, sort_dir)
case Postgrex.query(conn, query, [search, limit, offset], opts) do
{:ok, %Postgrex.Result{num_rows: 0}} ->
{:ok, []}
{:ok, %Postgrex.Result{rows: rows}} ->
{:ok, Enum.map(rows, &to_stream_info/1)}
{:error, _error} = reply ->
reply
end
end
defp search_term(opts) do
case Keyword.get(opts, :search) do
nil -> nil
"" -> nil
search when is_binary(search) -> "%" <> search <> "%"
end
end
defp to_stream_info(row) do
[stream_id, stream_uuid, stream_version, created_at, deleted_at] = row
stream_version = if is_nil(stream_version), do: 0, else: stream_version
status = if is_nil(deleted_at), do: :created, else: :deleted
%StreamInfo{
stream_uuid: stream_uuid,
stream_id: stream_id,
stream_version: stream_version,
created_at: created_at,
deleted_at: deleted_at,
status: status
}
end
end
| 26.205128 | 82 | 0.616765 |
9ef699791adb47cf72ba23841c615b8dc5a7110d | 75 | exs | Elixir | impl/elixir/.formatter.exs | hunterwb/ascii-transliteration | 55afaa37b8af7159442e9f2aad13d32197e4d75e | [
"0BSD"
] | 5 | 2020-01-14T21:52:27.000Z | 2020-07-25T17:39:24.000Z | impl/elixir/.formatter.exs | hunterwb/ascii-transliteration | 55afaa37b8af7159442e9f2aad13d32197e4d75e | [
"0BSD"
] | null | null | null | impl/elixir/.formatter.exs | hunterwb/ascii-transliteration | 55afaa37b8af7159442e9f2aad13d32197e4d75e | [
"0BSD"
] | null | null | null | # Used by "mix format"
[
inputs: ["*.exs", "{lib,test}/**/*.{ex,exs}"]
]
| 15 | 47 | 0.48 |
9ef6a14af12c4a4fe4bbf60865d9e146f5ab605d | 2,029 | ex | Elixir | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2_intent_followup_intent_info.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | 1 | 2019-01-03T22:30:36.000Z | 2019-01-03T22:30:36.000Z | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2_intent_followup_intent_info.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2_intent_followup_intent_info.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2IntentFollowupIntentInfo do
@moduledoc """
Represents a single followup intent in the chain.
## Attributes
- followupIntentName (String.t): The unique identifier of the followup intent. Format: `projects/<Project ID>/agent/intents/<Intent ID>`. Defaults to: `null`.
- parentFollowupIntentName (String.t): The unique identifier of the followup intent parent. Format: `projects/<Project ID>/agent/intents/<Intent ID>`. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:followupIntentName => any(),
:parentFollowupIntentName => any()
}
field(:followupIntentName)
field(:parentFollowupIntentName)
end
defimpl Poison.Decoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2IntentFollowupIntentInfo do
def decode(value, options) do
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2IntentFollowupIntentInfo.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2IntentFollowupIntentInfo do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.232143 | 195 | 0.754066 |
9ef6a86cdd5e34a58b202254594f24308be80194 | 1,640 | ex | Elixir | lib/ex_neo4j/model/build_method.ex | raw1z/ex_neo4j | afb778f56ff65c63ceb848b8debe9c3e8b3a375e | [
"MIT"
] | 2 | 2015-07-28T16:12:56.000Z | 2015-10-09T21:21:16.000Z | lib/ex_neo4j/model/build_method.ex | raw1z/ex_neo4j | afb778f56ff65c63ceb848b8debe9c3e8b3a375e | [
"MIT"
] | 1 | 2015-11-01T16:14:34.000Z | 2015-11-02T10:35:48.000Z | lib/ex_neo4j/model/build_method.ex | raw1z/ex_neo4j | afb778f56ff65c63ceb848b8debe9c3e8b3a375e | [
"MIT"
] | null | null | null | defmodule ExNeo4j.Model.BuildMethod do
def generate(metadata) do
quote do
require ExNeo4j.Model.ModelBuilder
def build, do: ExNeo4j.Model.ModelBuilder.build(__MODULE__)
def build(params) when is_list(params) or is_map(params) do
ExNeo4j.Model.ModelBuilder.build(__MODULE__, params)
end
unquote generate_for_required_field(metadata.fields)
unquote generate_for_each_field(metadata.fields)
end
end
defp generate_for_each_field(fields) do
fields
|> Enum.filter(fn field -> field.required == true end)
|> Enum.map(fn field -> {Atom.to_string(field.name), Macro.var(field.name, __MODULE__)} end)
|> Enum.map fn field ->
function_args = {:%{}, [], [field]}
quote do
def build(unquote(function_args)=attributes) do
attributes = attributes
|> Enum.map(fn {k,v} -> {String.to_atom(k), v} end)
|> Enum.into(Map.new)
ExNeo4j.Model.ModelBuilder.build(__MODULE__, attributes)
end
end
end
end
defp generate_for_required_field(fields) do
required_fields = fields
|> Enum.filter(fn field -> field.required == true end)
|> Enum.map(fn field -> {Atom.to_string(field.name), Macro.var(field.name, __MODULE__)} end)
function_args = {:%{}, [], required_fields}
quote do
def build(unquote(function_args) = attributes) do
attributes = attributes
|> Enum.map(fn {k,v} -> {String.to_atom(k), v} end)
|> Enum.into(Map.new)
ExNeo4j.Model.ModelBuilder.build(__MODULE__, attributes)
end
end
end
end
| 31.538462 | 114 | 0.636585 |
9ef6b23f1f7c6e4839191049c950f6357d94f236 | 1,638 | ex | Elixir | lib/chat_api/emails/email.ex | rlanga/papercups | 358ca46c344908585cd0214a0de96e5676120c68 | [
"MIT"
] | 1 | 2020-08-13T15:11:12.000Z | 2020-08-13T15:11:12.000Z | lib/chat_api/emails/email.ex | rlanga/papercups | 358ca46c344908585cd0214a0de96e5676120c68 | [
"MIT"
] | null | null | null | lib/chat_api/emails/email.ex | rlanga/papercups | 358ca46c344908585cd0214a0de96e5676120c68 | [
"MIT"
] | null | null | null | defmodule ChatApi.Emails.Email do
import Swoosh.Email
import Ecto.Changeset
@from_address System.get_env("FROM_ADDRESS")
@backend_url System.get_env("BACKEND_URL") || ""
defstruct to_address: nil, message: nil
# TODO: Move conversation id out the mailer should only care about the message
def send(to_address, message, conversation_id) do
# Using try catch here because if someone is self hosting and doesn't need the email service it would error out
# TODO: Find a better solution besides try catch probably in config.exs setup an empty mailer that doesn't do anything
try do
link =
"<a href=\"https://#{@backend_url}/conversations/#{conversation_id}\">View in dashboard</a>"
msg = "<b>#{message}</b>"
html = "A new message has arrived:<br />" <> msg <> "<br /><br />" <> link
text = "A new message has arrived: #{message}"
new()
|> to(to_address)
|> from({"hello", @from_address})
|> subject("A customer has sent you a message!")
|> html_body(html)
|> text_body(text)
|> ChatApi.Mailer.deliver()
rescue
e ->
IO.puts(
"Email config environment variable may not have been setup properly: #{e.message}"
)
end
end
@spec changeset(
{map, map} | %{:__struct__ => atom | %{__changeset__: map}, optional(atom) => any},
:invalid | %{optional(:__struct__) => none, optional(atom | binary) => any}
) :: Ecto.Changeset.t()
def changeset(email, attrs) do
email
|> cast(attrs, [:to_address, :message])
|> validate_required([:to_address, :message])
end
end
| 34.851064 | 122 | 0.632479 |
9ef6cb39422b038766a08ef22631e02696c726ad | 12,082 | ex | Elixir | lib/radius/packet.ex | bearice/elixir-radius | cbfb612957eec6afd3bf0c7839c2df1158fb2e0d | [
"MIT"
] | 11 | 2015-05-20T05:14:52.000Z | 2020-09-24T13:34:28.000Z | lib/radius/packet.ex | bearice/elixir-radius | cbfb612957eec6afd3bf0c7839c2df1158fb2e0d | [
"MIT"
] | 2 | 2015-10-23T05:00:19.000Z | 2021-01-16T19:04:19.000Z | lib/radius/packet.ex | bearice/elixir-radius | cbfb612957eec6afd3bf0c7839c2df1158fb2e0d | [
"MIT"
] | 3 | 2015-02-14T20:37:32.000Z | 2019-11-22T14:19:03.000Z | defmodule Radius.Packet do
require Logger
alias Radius.Dict.Attribute
alias Radius.Dict.Vendor
alias Radius.Dict.Value
alias Radius.Dict.EntryNotFoundError
defstruct [
code: nil,
id: nil,
length: nil,
auth: nil,
attrs: [],
raw: nil,
secret: nil,
]
@doc """
Decode radius packet
"""
def decode(data, secret) do
pkt = %{raw: data, secret: secret, attrs: nil}
|> decode_header
|> decode_payload
struct(__MODULE__, pkt)
end
defp decode_header(%{raw: raw} = ctx) do
<<code, id, length::size(16), auth::binary-size(16), rest::binary>> = raw
if byte_size(rest) < length - 20 do
{:error, :packet_too_short}
else
if byte_size(ctx.raw) != length do
raise "Packet length not match."
end
ctx |> Map.merge(%{
code: decode_code(code),
id: id,
length: length,
auth: auth,
rest: rest
})
end
end
defp decode_code(1), do: "Access-Request"
defp decode_code(2), do: "Access-Accept"
defp decode_code(3), do: "Access-Reject"
defp decode_code(11), do: "Access-Challenge"
defp decode_code(4), do: "Accounting-Request"
defp decode_code(5), do: "Accounting-Response"
defp decode_code(12), do: "Status-Server"
defp decode_code(13), do: "Status-Client"
defp decode_code(x), do: x
defp decode_payload(ctx) do
decode_tlv(ctx.rest, [], {1, 1})
|> resolve_tlv(ctx)
end
defp decode_tlv(<<>>, acc, _), do: acc |> Enum.reverse()
defp decode_tlv(bin, _, {_, 0}), do: bin # not to decode USR style VSAs
defp decode_tlv(bin, acc, {tl, ll} = fmt) when byte_size(bin) > tl + ll do
tl = tl * 8
ll = ll * 8
<<type::integer-size(tl), length::integer-size(ll), rest::binary>> = bin
length = length - 2
<<value::binary-size(length), rest::binary>> = rest
decode_tlv(rest, [{type, value} | acc], fmt)
end
defp resolve_tlv(attrs, ctx) when is_list(attrs) do
attrs = attrs |> Enum.map(&resolve_tlv(&1, ctx, nil))
Map.put ctx, :attrs, attrs
end
#VSA Entry
defp resolve_tlv({26,value}, ctx, nil) do
type = "Vendor-Specific"
<<vid::size(32),rest::binary>>=value
try do
v = Vendor.by_id vid
value = case decode_tlv rest,[],v.format do
bin when is_binary(bin) -> bin
tlv when is_list(tlv) ->
Enum.map tlv, fn(x) ->
resolve_tlv(x,ctx,v.id)
end
end
{{type,v.name},value}
rescue _e in EntryNotFoundError ->
{type,value}
end
end
defp resolve_tlv({type, value} = tlv, ctx, vendor) do
try do
attr = Attribute.by_id vendor,type
type = attr.name
has_tag = Keyword.has_key? attr.opts, :has_tag
{tag, value} = case value do
<<0,rest::binary>> when has_tag==true ->
{nil, rest}
<<tag,rest::binary>> when tag in 1..0x1f and has_tag==true ->
{tag, rest}
_ ->
{nil, value}
end
value = value
|> decode_value(attr.type)
|> resolve_value(vendor,attr.id)
|> decrypt_value(Keyword.get(attr.opts, :encrypt), ctx.auth, ctx.secret)
if tag do
{type,{tag,value}}
else
{type,value}
end
rescue _e in EntryNotFoundError->
tlv
end
end
defp decode_value(<<val :: integer-size(8)>>,:byte), do: val
defp decode_value(<<val :: integer-size(16)>>,:short), do: val
defp decode_value(<<val :: integer-size(32)>>,:integer), do: val
defp decode_value(<<val :: integer-size(32)-signed>>,:signed), do: val
defp decode_value(<<val :: integer-size(32)>>,:date), do: val
defp decode_value(<<val :: integer-size(64)>>,:ifid), do: val
defp decode_value(<<a,b,c,d>>,:ipaddr), do: {a,b,c,d}
defp decode_value(<<bin :: binary-size(16)>>,:ipv6addr) do
(for <<x::integer-size(16) <-bin >>, do: x) |> :erlang.list_to_tuple
end
defp decode_value(bin,_t) do
bin
end
defp resolve_value(val,vid,aid) do
try do
v = Value.by_value vid,aid,val
v.name
rescue _e in EntryNotFoundError ->
val
end
end
defp decrypt_value(bin,nil,_,_), do: bin
defp decrypt_value(bin,1,auth,secret) do
Radius.Util.decrypt_rfc2865 bin,secret,auth
end
defp decrypt_value(bin,2,auth,secret) do
Radius.Util.decrypt_rfc2868 bin,secret,auth
end
defp decrypt_value(bin,a,_,_) do
Logger.error "Unknown encrypt type: #{inspect a}"
bin
end
@doc """
Return an iolist of encoded packet
for request packets, leave packet.auth == nil, then I will generate one from random bytes.
for reply packets, set packet.auth = request.auth, I will calc the reply hash with it.
packet.attrs :: [attr]
attr :: {type,value}
type :: String.t | integer | {"Vendor-Specific", vendor}
value :: integer | String.t | ipaddr
vendor :: String.t | integer
ipaddr :: {a,b,c,d} | {a,b,c,d,e,f,g,h}
"""
def encode(packet, options \\ []) do
sign? = options |> Keyword.get(:sign, false)
raw? = options |> Keyword.get(:raw, false)
{auth, reply?} = if packet.auth == nil do
{:crypto.strong_rand_bytes(16), false}
else
{packet.auth, true}
end
packet = %{packet | auth: auth}
packet = if sign? do
attrs =
packet.attrs ++ [
{"Message-Authenticator", <<0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0>>}
]
%{packet | attrs: attrs}
else
packet
end
attrs = encode_attrs(packet)
code = encode_code(packet.code)
length = 20 + :erlang.iolist_size(attrs)
header = <<code, packet.id, length::size(16), auth::binary>>
attrs = if sign? do
signature = :crypto.hmac(:md5, packet.secret, [header, attrs])
[last | attrs] = attrs |> Enum.reverse()
crop_len = byte_size(last) - 16
last = <<last::bytes-size(crop_len), signature::binary>>
[last | attrs] |> Enum.reverse()
else
attrs
end
header = if reply? and raw? == false do
resp_auth =
:crypto.hash_init(:md5)
|> :crypto.hash_update(header)
|> :crypto.hash_update(attrs)
|> :crypto.hash_update(packet.secret)
|> :crypto.hash_final()
<<header::bytes-size(4), resp_auth::binary>>
else
header
end
[header, attrs]
end
defp encode_attrs(%{attrs: a}=ctx) do
Enum.map a, fn(x) ->
x |> resolve_attr(ctx) |> encode_attr
end
end
#back-door for VSAs, encode_vsa could retuen an iolist
defp encode_attr({26,value}), do: [26,:erlang.iolist_size(value)+2,value]
defp encode_attr({tag,value}) when is_binary(value) do
len = byte_size(value) + 2
if len > 0xff do
raise "value oversized: #{inspect {tag,value}}"
end
<<tag,len,value::binary>>
end
defp encode_attr({tag,value}) when is_integer(value) do
if value > 0xFFFFFFFF do
Logger.warn "value truncated: #{inspect {tag,value}}"
end
<<tag,6,value::integer-size(32)>>
end
defp encode_attr({type,value,attr}) do
{t,l}=attr.vendor.format
value = if Keyword.has_key? attr.opts, :has_tag do
{tag,value} = case value do
{tag,value} when tag in 0..0x1f -> {tag,value}
{tag,_value} -> raise "Tag over-range, should be [0-0x1f], got: #{tag}"
value -> {0,value}
end
value = encode_value(value,attr.type)
<<tag,value::binary>>
else
encode_value(value,attr.type)
end
length = byte_size(value) + t + l
ll = l*8
tl = t*8
<<type :: integer-size(tl), length :: integer-size(ll), value :: binary>>
end
defp encrypt_value({tag,bin},attr,ctx), do: {tag,encrypt_value(bin,attr,ctx)}
defp encrypt_value(bin,attr,ctx), do: encrypt_value(bin,Keyword.get(attr.opts,:encrypt),ctx.auth,ctx.secret)
defp encrypt_value(bin,nil,_,_), do: bin
defp encrypt_value(bin,1,auth,secret) do
Radius.Util.encrypt_rfc2865 bin,secret,auth
end
defp encrypt_value(bin,2,auth,secret) do
Radius.Util.encrypt_rfc2868 bin,secret,auth
end
defp encrypt_value(bin,a,_,_) do
Logger.error "Unknown encrypt type: #{inspect a}"
bin
end
defp encode_value(val,:byte) when is_integer(val), do: <<val::size(8)>>
defp encode_value(val,:short) when is_integer(val), do: <<val::size(16)>>
defp encode_value(val,:integer) when is_integer(val), do: <<val::size(32)>>
defp encode_value(val,:signed) when is_integer(val), do: <<val::size(32)-signed>>
defp encode_value(val,:date) when is_integer(val), do: <<val::size(32)>>
defp encode_value(val,:ifid) when is_integer(val), do: <<val::size(64)>>
defp encode_value({a,b,c,d},:ipaddr), do: <<a,b,c,d>>
defp encode_value(x,:ipaddr) when is_integer(x), do: <<x::size(32)>>
defp encode_value(x,:ipv6addr) when is_tuple(x) and tuple_size(x) == 8 do
for x <- :erlang.tuple_to_list(x), into: "", do: <<x::size(16)>>
end
defp encode_value(bin,_), do: bin
defp resolve_attr({{type,vid},value},ctx) when type=="Vendor-Specific" or type == 26 do
{26,encode_vsa(vid,value,ctx)}
end
defp resolve_attr(tlv,ctx) do
resolve_attr(tlv,ctx,%Vendor{})
end
defp resolve_attr({type,value},ctx,vendor) do
case lookup_attr(vendor,type) do
nil -> {type,value}
a -> {a.id,lookup_value(a,value)|>encrypt_value(a,ctx),a}
end
end
defp lookup_attr(vendor,type) when is_integer(type) do
try do
Attribute.by_id vendor.id,type
rescue
_e in EntryNotFoundError -> nil
end
end
#Raise an error if attr not defined
defp lookup_attr(_vendor,type) when is_binary(type) do
Attribute.by_name type
end
defp lookup_value(attr,{tag,val}) do
{tag,lookup_value(attr,val)}
end
defp lookup_value(%{type: :integer}=attr,val) when is_binary(val) do
try do
v = Value.by_name attr.vendor.name,attr.name,val
v.value
rescue _e in EntryNotFoundError->
#raise "Value can not be resolved: #{attr.name}: #{val}"
val
end
end
defp lookup_value(_,val), do: val
defp encode_vsa(vid,value,ctx) when is_binary(value) and is_binary(vid), do: encode_vsa(Vendor.by_name(vid).id,value,ctx)
defp encode_vsa(vid,value,_) when is_binary(value) and is_integer(vid), do: <<vid::size(32),value>>
defp encode_vsa(vid,vsa,ctx) when is_tuple(vsa), do: encode_vsa(vid, [vsa], ctx)
defp encode_vsa(vid,vsa,ctx) when is_binary(vid), do: encode_vsa(Vendor.by_name(vid), vsa, ctx)
defp encode_vsa(vid,vsa,ctx) when is_integer(vid), do: encode_vsa(Vendor.by_id(vid), vsa, ctx)
defp encode_vsa(vendor, vsa, ctx) do
val = Enum.map vsa, fn(x) ->
x|> resolve_attr(ctx,vendor) |> encode_attr
end
[<<vendor.id::size(32)>>|val]
end
defp encode_code(x) when is_integer(x), do: x
defp encode_code("Access-Request"), do: 1
defp encode_code("Access-Accept"), do: 2
defp encode_code("Access-Reject"), do: 3
defp encode_code("Access-Challenge"), do: 11
defp encode_code("Accounting-Request"), do: 4
defp encode_code("Accounting-Response"), do: 5
defp encode_code("Status-Server"), do: 12
defp encode_code("Status-Client"), do: 13
@doc """
Return the value of a given attribute, if found, or default otherwise.
"""
def get_attr(packet, attr_name, default \\ nil) do
result =
packet.attrs
|> Enum.find(default, fn
{^attr_name, _} -> true
_ -> false
end)
case result do
{_, value} -> value
_ -> nil
end
end
@doc """
Verify if the packet signature is valid.
"""
def verify(packet) do
# TODO: this code is going to fail when validating replies
sig1 =
packet
|> Radius.Packet.get_attr("Message-Authenticator")
if sig1 != nil do
attrs =
packet.attrs
|> Enum.filter(fn {k, _} -> k != "Message-Authenticator" end)
raw =
%{packet | attrs: attrs}
|> Radius.Packet.encode(raw: true, sign: true)
|> IO.iodata_to_binary
crop_len = byte_size(raw) - 16
<<_::bytes-size(crop_len), sig2::binary>> = raw
sig1 == sig2
else
false
end
end
end #defmodule Packet
| 29.980149 | 123 | 0.620096 |
9ef6cd557028d87b4bb1ad36472ba82f3bfb2d43 | 249 | ex | Elixir | lib/big_brother/utils/misc.ex | luismasuelli/elixir-websockets-auth | 845b826c4f2b612565930771497188081e6af71c | [
"MIT"
] | null | null | null | lib/big_brother/utils/misc.ex | luismasuelli/elixir-websockets-auth | 845b826c4f2b612565930771497188081e6af71c | [
"MIT"
] | null | null | null | lib/big_brother/utils/misc.ex | luismasuelli/elixir-websockets-auth | 845b826c4f2b612565930771497188081e6af71c | [
"MIT"
] | null | null | null | defmodule BigBrother.Utils.Misc do
@moduledoc """
Several utils functions for this app.
"""
def get_keywords(options, defaults) do
options = Keyword.merge(defaults, options)
Enum.map Keyword.keys(defaults), &(options[&1])
end
end
| 22.636364 | 51 | 0.706827 |
9ef6e5199706df4dce2322091728901995abcc8c | 2,299 | ex | Elixir | lib/calamity/event_store/list_event_store.ex | Cantido/calamity | 203c95eca8cbe6cf7eab8a8f88f14ece6246935f | [
"MIT"
] | null | null | null | lib/calamity/event_store/list_event_store.ex | Cantido/calamity | 203c95eca8cbe6cf7eab8a8f88f14ece6246935f | [
"MIT"
] | 1 | 2022-02-28T11:22:00.000Z | 2022-02-28T11:22:00.000Z | lib/calamity/event_store/list_event_store.ex | Cantido/calamity | 203c95eca8cbe6cf7eab8a8f88f14ece6246935f | [
"MIT"
] | null | null | null | # SPDX-FileCopyrightText: 2021 Rosa Richter
#
# SPDX-License-Identifier: MIT
defmodule Calamity.EventStore.ListEventStore do
@moduledoc """
A simple in-memory event store.
"""
alias Calamity.EventMetadata
defstruct streams: %{},
subscribers: %{}
defimpl Calamity.EventStore do
def append(store, stream_id, events, opts) do
case assert_version(store, stream_id, Keyword.get(opts, :expected_version, :any)) do
:ok ->
new_events =
Enum.map(events, fn event ->
{event, %EventMetadata{created_at: DateTime.utc_now()}}
end)
updated_streams =
store.streams
|> Map.put_new(stream_id, [])
|> Map.update!(stream_id, fn previous_events -> previous_events ++ new_events end)
subs_to_version = Map.get(store.subscribers, stream_id, [])
subs_to_all = Map.get(store.subscribers, :all, [])
Enum.concat(subs_to_version, subs_to_all)
|> Enum.each(fn subscriber ->
Process.send(subscriber, {:events, events}, [])
end)
store = %{store | streams: updated_streams}
{:ok, store}
err ->
{:error, err}
end
end
defp assert_version(_store, _stream_id, :any) do
:ok
end
defp assert_version(store, stream_id, :no_stream) do
if Map.get(store.streams, stream_id, []) |> Enum.empty?() do
:ok
else
:stream_exists
end
end
defp assert_version(store, stream_id, version) do
current_version = Map.get(store.streams, stream_id, []) |> Enum.count()
if version == current_version do
:ok
else
:version_mismatch
end
end
def stream(store, :all, _opts) do
Map.values(store.streams)
|> Enum.concat()
|> Enum.sort_by(fn {_event, metadata} ->
metadata.created_at
end,
DateTime)
end
def stream(store, stream_id, opts) do
Map.get(store.streams, stream_id, [])
|> Enum.drop(Keyword.get(opts, :start_version, 0))
end
def subscribe(store, stream_id, pid) do
new_subscribers =
Map.update(store.subscribers, stream_id, [pid], &[pid | &1])
%{store | subscribers: new_subscribers}
end
end
end
| 26.125 | 94 | 0.595911 |
9ef6f19434c82c02f4395e29c7b94b0c299181a0 | 8,179 | exs | Elixir | integration_test/sql/sandbox.exs | hauleth/ecto_sql | 1d7f4b73bfa04e02a26bba8b3ea79a457850af0f | [
"Apache-2.0"
] | 1 | 2020-10-18T09:11:56.000Z | 2020-10-18T09:11:56.000Z | integration_test/sql/sandbox.exs | hauleth/ecto_sql | 1d7f4b73bfa04e02a26bba8b3ea79a457850af0f | [
"Apache-2.0"
] | 1 | 2020-06-29T20:59:38.000Z | 2020-06-29T20:59:38.000Z | integration_test/sql/sandbox.exs | hauleth/ecto_sql | 1d7f4b73bfa04e02a26bba8b3ea79a457850af0f | [
"Apache-2.0"
] | 1 | 2019-05-31T02:43:25.000Z | 2019-05-31T02:43:25.000Z | defmodule Ecto.Integration.SandboxTest do
use ExUnit.Case
alias Ecto.Adapters.SQL.Sandbox
alias Ecto.Integration.{PoolRepo, TestRepo}
alias Ecto.Integration.Post
import ExUnit.CaptureLog
Application.put_env(:ecto_sql, __MODULE__.DynamicRepo, Application.get_env(:ecto_sql, TestRepo))
defmodule DynamicRepo do
use Ecto.Repo, otp_app: :ecto_sql, adapter: TestRepo.__adapter__()
end
describe "errors" do
test "raises if repo doesn't exist" do
assert_raise UndefinedFunctionError, ~r"function UnknownRepo.get_dynamic_repo/0 is undefined", fn ->
Sandbox.mode(UnknownRepo, :manual)
end
end
test "raises if repo is not started" do
assert_raise RuntimeError, ~r"could not lookup Ecto repo #{inspect DynamicRepo} because it was not started", fn ->
Sandbox.mode(DynamicRepo, :manual)
end
end
test "raises if repo is not using sandbox" do
assert_raise RuntimeError, ~r"cannot invoke sandbox operation with pool DBConnection", fn ->
Sandbox.mode(PoolRepo, :manual)
end
assert_raise RuntimeError, ~r"cannot invoke sandbox operation with pool DBConnection", fn ->
Sandbox.checkout(PoolRepo)
end
end
test "includes link to SQL sandbox on ownership errors" do
assert_raise DBConnection.OwnershipError,
~r"See Ecto.Adapters.SQL.Sandbox docs for more information.", fn ->
TestRepo.all(Post)
end
end
end
describe "mode" do
test "uses the repository when checked out" do
assert_raise DBConnection.OwnershipError, ~r"cannot find ownership process", fn ->
TestRepo.all(Post)
end
Sandbox.checkout(TestRepo)
assert TestRepo.all(Post) == []
Sandbox.checkin(TestRepo)
assert_raise DBConnection.OwnershipError, ~r"cannot find ownership process", fn ->
TestRepo.all(Post)
end
end
test "uses the repository when allowed from another process" do
assert_raise DBConnection.OwnershipError, ~r"cannot find ownership process", fn ->
TestRepo.all(Post)
end
parent = self()
Task.start_link fn ->
Sandbox.checkout(TestRepo)
Sandbox.allow(TestRepo, self(), parent)
send(parent, :allowed)
Process.sleep(:infinity)
end
assert_receive :allowed
assert TestRepo.all(Post) == []
end
test "uses the repository when shared from another process" do
assert_raise DBConnection.OwnershipError, ~r"cannot find ownership process", fn ->
TestRepo.all(Post)
end
parent = self()
Task.start_link(fn ->
Sandbox.checkout(TestRepo)
Sandbox.mode(TestRepo, {:shared, self()})
send(parent, :shared)
Process.sleep(:infinity)
end)
assert_receive :shared
assert Task.async(fn -> TestRepo.all(Post) end) |> Task.await == []
after
Sandbox.mode(TestRepo, :manual)
end
test "works with a dynamic repo" do
repo_pid = start_supervised!({DynamicRepo, name: nil})
DynamicRepo.put_dynamic_repo(repo_pid)
assert Sandbox.mode(DynamicRepo, :manual) == :ok
assert_raise DBConnection.OwnershipError, ~r"cannot find ownership process", fn ->
DynamicRepo.all(Post)
end
Sandbox.checkout(DynamicRepo)
assert DynamicRepo.all(Post) == []
end
test "works with a repo pid" do
repo_pid = start_supervised!({DynamicRepo, name: nil})
DynamicRepo.put_dynamic_repo(repo_pid)
assert Sandbox.mode(repo_pid, :manual) == :ok
assert_raise DBConnection.OwnershipError, ~r"cannot find ownership process", fn ->
DynamicRepo.all(Post)
end
Sandbox.checkout(repo_pid)
assert DynamicRepo.all(Post) == []
end
end
describe "savepoints" do
test "runs inside a sandbox that is rolled back on checkin" do
Sandbox.checkout(TestRepo)
assert TestRepo.insert(%Post{})
assert TestRepo.all(Post) != []
Sandbox.checkin(TestRepo)
Sandbox.checkout(TestRepo)
assert TestRepo.all(Post) == []
Sandbox.checkin(TestRepo)
end
test "runs inside a sandbox that may be disabled" do
Sandbox.checkout(TestRepo, sandbox: false)
assert TestRepo.insert(%Post{})
assert TestRepo.all(Post) != []
Sandbox.checkin(TestRepo)
Sandbox.checkout(TestRepo)
assert {1, _} = TestRepo.delete_all(Post)
Sandbox.checkin(TestRepo)
Sandbox.checkout(TestRepo, sandbox: false)
assert {1, _} = TestRepo.delete_all(Post)
Sandbox.checkin(TestRepo)
end
test "runs inside a sandbox with caller data when preloading associations" do
Sandbox.checkout(TestRepo)
assert TestRepo.insert(%Post{})
parent = self()
Task.start_link fn ->
Sandbox.allow(TestRepo, parent, self())
assert [_] = TestRepo.all(Post) |> TestRepo.preload([:author, :comments])
send parent, :success
end
assert_receive :success
end
test "runs inside a sidebox with custom ownership timeout" do
:ok = Sandbox.checkout(TestRepo, ownership_timeout: 200)
parent = self()
assert capture_log(fn ->
{:ok, pid} =
Task.start(fn ->
Sandbox.allow(TestRepo, parent, self())
TestRepo.transaction(fn -> Process.sleep(500) end)
end)
ref = Process.monitor(pid)
assert_receive {:DOWN, ^ref, _, ^pid, _}, 1000
end) =~ "it owned the connection for longer than 200ms"
end
test "does not taint the sandbox on query errors" do
Sandbox.checkout(TestRepo)
{:ok, _} = TestRepo.insert(%Post{}, skip_transaction: true)
{:error, _} = TestRepo.query("INVALID")
{:ok, _} = TestRepo.insert(%Post{}, skip_transaction: true)
Sandbox.checkin(TestRepo)
end
end
describe "transactions" do
@tag :transaction_isolation
test "with custom isolation level" do
Sandbox.checkout(TestRepo, isolation: "READ UNCOMMITTED")
# Setting it to the same level later on works
TestRepo.query!("SET TRANSACTION ISOLATION LEVEL READ UNCOMMITTED")
# Even inside a transaction
TestRepo.transaction fn ->
TestRepo.query!("SET TRANSACTION ISOLATION LEVEL READ UNCOMMITTED")
end
end
test "disconnects on transaction timeouts" do
Sandbox.checkout(TestRepo)
assert capture_log(fn ->
{:error, :rollback} =
TestRepo.transaction(fn -> Process.sleep(1000) end, timeout: 100)
end) =~ "timed out"
Sandbox.checkin(TestRepo)
end
end
describe "checkouts" do
test "with transaction inside checkout" do
Sandbox.checkout(TestRepo)
TestRepo.checkout(fn ->
refute TestRepo.in_transaction?()
TestRepo.transaction(fn ->
assert TestRepo.in_transaction?()
end)
refute TestRepo.in_transaction?()
end)
end
test "with checkout inside transaction" do
Sandbox.checkout(TestRepo)
TestRepo.transaction(fn ->
assert TestRepo.in_transaction?()
TestRepo.checkout(fn ->
assert TestRepo.in_transaction?()
end)
assert TestRepo.in_transaction?()
end)
end
end
describe "start_owner!/2" do
test "checks out the connection" do
assert_raise DBConnection.OwnershipError, ~r"cannot find ownership process", fn ->
TestRepo.all(Post)
end
owner = Sandbox.start_owner!(TestRepo)
assert TestRepo.all(Post) == []
:ok = Sandbox.stop_owner(owner)
refute Process.alive?(owner)
end
test "can set shared mode" do
assert_raise DBConnection.OwnershipError, ~r"cannot find ownership process", fn ->
TestRepo.all(Post)
end
parent = self()
Task.start_link(fn ->
owner = Sandbox.start_owner!(TestRepo, shared: true)
send(parent, {:owner, owner})
Process.sleep(:infinity)
end)
assert_receive {:owner, owner}
assert TestRepo.all(Post) == []
:ok = Sandbox.stop_owner(owner)
after
Sandbox.mode(TestRepo, :manual)
end
end
end
| 29.210714 | 120 | 0.647879 |
9ef6fe6debf7993ce305cb167a004c22603f6b6b | 423 | ex | Elixir | lib/jsonapi/paginator.ex | snewcomer/jsonapi | 3ed41d5c133fce5554b75a983daeb20e4f471c95 | [
"MIT"
] | 1 | 2020-10-29T19:28:23.000Z | 2020-10-29T19:28:23.000Z | lib/jsonapi/paginator.ex | snewcomer/jsonapi | 3ed41d5c133fce5554b75a983daeb20e4f471c95 | [
"MIT"
] | 1 | 2020-12-01T21:30:00.000Z | 2020-12-01T21:30:00.000Z | lib/jsonapi/paginator.ex | snewcomer/jsonapi | 3ed41d5c133fce5554b75a983daeb20e4f471c95 | [
"MIT"
] | null | null | null | defmodule JSONAPI.Paginator do
@moduledoc """
Pagination strategy behaviour
"""
alias Plug.Conn
@type page :: map()
@type options :: Keyword.t()
@type links :: %{
first: String.t() | nil,
last: String.t() | nil,
next: String.t() | nil,
prev: String.t() | nil
}
@callback paginate(data :: term, view :: atom, conn :: Conn.t(), page, options) :: links
end
| 20.142857 | 90 | 0.548463 |
9ef704fee20a97f6062ebe553d26fd25d4b2e47c | 562 | exs | Elixir | 17-Conway3d/test/conway4d_test.exs | dvrensk/advent-of-code-2020 | 237e80da9958f37e51c0ac84da74bec5fec1f185 | [
"Unlicense"
] | 1 | 2020-12-02T01:34:06.000Z | 2020-12-02T01:34:06.000Z | 17-Conway3d/test/conway4d_test.exs | dvrensk/advent-of-code-2020 | 237e80da9958f37e51c0ac84da74bec5fec1f185 | [
"Unlicense"
] | null | null | null | 17-Conway3d/test/conway4d_test.exs | dvrensk/advent-of-code-2020 | 237e80da9958f37e51c0ac84da74bec5fec1f185 | [
"Unlicense"
] | null | null | null | defmodule Conway4dTest do
use ExUnit.Case
doctest Conway4d
test "puzzle 1: count after boot" do
assert Conway4d.count_after_6(input()) == 2236
end
def input(path \\ "input.txt"), do: File.read!(path)
def input_lines(path \\ "input.txt"), do: input(path) |> String.split("\n", trim: true)
def input_paragraphs(path \\ "input.txt"), do: input(path) |> String.split("\n\n", trim: true)
def input_ints(path \\ "input.txt"),
do: input(path) |> String.split() |> Enum.map(&String.to_integer/1)
def assert_eq(a, b), do: assert(a == b)
end
| 31.222222 | 96 | 0.656584 |
9ef7055cbb512b7e992ea4ce78d7dec43e5ccfb7 | 310 | exs | Elixir | test/support/fixtures/dynamic/schema_with_duplicate_names.exs | TheRealReal/absinthe | 6eae5bc36283e58f42d032b8afd90de3ad64f97b | [
"MIT"
] | 4,101 | 2016-03-02T03:49:20.000Z | 2022-03-31T05:46:01.000Z | test/support/fixtures/dynamic/schema_with_duplicate_names.exs | TheRealReal/absinthe | 6eae5bc36283e58f42d032b8afd90de3ad64f97b | [
"MIT"
] | 889 | 2016-03-02T16:06:59.000Z | 2022-03-31T20:24:12.000Z | test/support/fixtures/dynamic/schema_with_duplicate_names.exs | TheRealReal/absinthe | 6eae5bc36283e58f42d032b8afd90de3ad64f97b | [
"MIT"
] | 564 | 2016-03-02T07:49:59.000Z | 2022-03-06T14:40:59.000Z | defmodule Absinthe.Fixtures.SchemaWithDuplicateNames do
use Absinthe.Schema
query do
# Query type must exist
end
object :person do
description "A person"
field :name, :string
end
object :another_person, name: "Person" do
description "A person"
field :type, :string
end
end
| 17.222222 | 55 | 0.7 |
9ef7071740d26ad0271daaf0927fa14ab4dd3a18 | 2,814 | exs | Elixir | test/glimesh_web/controllers/user_session_controller_test.exs | YFG-Online/glimesh.tv | 5d9bb6f4ab383897c383bf33bbfac783b09e294e | [
"MIT"
] | null | null | null | test/glimesh_web/controllers/user_session_controller_test.exs | YFG-Online/glimesh.tv | 5d9bb6f4ab383897c383bf33bbfac783b09e294e | [
"MIT"
] | null | null | null | test/glimesh_web/controllers/user_session_controller_test.exs | YFG-Online/glimesh.tv | 5d9bb6f4ab383897c383bf33bbfac783b09e294e | [
"MIT"
] | null | null | null | defmodule GlimeshWeb.UserSessionControllerTest do
use GlimeshWeb.ConnCase, async: true
import Glimesh.AccountsFixtures
setup do
%{user: user_fixture()}
end
describe "GET /users/log_in" do
test "renders log in page", %{conn: conn} do
conn = get(conn, Routes.user_session_path(conn, :new))
response = html_response(conn, 200)
assert response =~ "<h3>Login to our Alpha!</h3>"
assert response =~ "Login</button>"
assert response =~ "Register</a>"
end
test "redirects if already logged in", %{conn: conn, user: user} do
conn = conn |> log_in_user(user) |> get(Routes.user_session_path(conn, :new))
assert redirected_to(conn) == "/"
end
end
describe "POST /users/log_in" do
test "logs the user in", %{conn: conn, user: user} do
conn =
post(conn, Routes.user_session_path(conn, :create), %{
"user" => %{"email" => user.email, "password" => valid_user_password()}
})
assert get_session(conn, :user_token)
assert redirected_to(conn) =~ "/"
# Now do a logged in request and assert on the menu
conn = get(conn, "/")
response = html_response(conn, 200)
assert response =~ user.username
assert response =~ "My Profile\n</a>"
assert response =~ "Sign Out\n</a>"
end
test "logs the user in with remember me", %{conn: conn, user: user} do
conn =
post(conn, Routes.user_session_path(conn, :create), %{
"user" => %{
"email" => user.email,
"password" => valid_user_password(),
"remember_me" => "true"
}
})
assert conn.resp_cookies["user_remember_me"]
assert redirected_to(conn) =~ "/"
end
test "emits error message with invalid credentials", %{conn: conn, user: user} do
conn =
post(conn, Routes.user_session_path(conn, :create), %{
"user" => %{"email" => user.email, "password" => "invalid_password"}
})
response = html_response(conn, 200)
assert response =~ "<h3>Login to our Alpha!</h3>"
assert response =~ "Invalid e-mail or password"
end
end
describe "DELETE /users/log_out" do
test "logs the user out", %{conn: conn, user: user} do
conn = conn |> log_in_user(user) |> delete(Routes.user_session_path(conn, :delete))
assert redirected_to(conn) == "/"
refute get_session(conn, :user_token)
assert get_flash(conn, :info) =~ "Logged out successfully"
end
test "succeeds even if the user is not logged in", %{conn: conn} do
conn = delete(conn, Routes.user_session_path(conn, :delete))
assert redirected_to(conn) == "/"
refute get_session(conn, :user_token)
assert get_flash(conn, :info) =~ "Logged out successfully"
end
end
end
| 33.105882 | 89 | 0.614072 |
9ef70da1367f2cb374250ec407bed8f4c30fc1e9 | 261 | ex | Elixir | lib/edgedb/application.ex | f0lio/edgedb-elixir | b285bd8037b0b951aabfa1d1733889880f8bfd66 | [
"MIT"
] | null | null | null | lib/edgedb/application.ex | f0lio/edgedb-elixir | b285bd8037b0b951aabfa1d1733889880f8bfd66 | [
"MIT"
] | null | null | null | lib/edgedb/application.ex | f0lio/edgedb-elixir | b285bd8037b0b951aabfa1d1733889880f8bfd66 | [
"MIT"
] | null | null | null | defmodule EdgeDB.Application do
@moduledoc false
use Application
@impl Application
def start(_type, _args) do
children = [
EdgeDB.Borrower
]
Supervisor.start_link(children, strategy: :one_for_one, name: EdgeDB.Supervisor)
end
end
| 17.4 | 84 | 0.712644 |
9ef735f1907260a504f8935efbbcf46af6064f4f | 1,798 | exs | Elixir | test/wobserver/util/metrics_test.exs | szlend/wobserver | 6c3ea1ef772ddb4a0b35956d155f33fc46f71a8c | [
"MIT"
] | 984 | 2017-02-06T17:13:48.000Z | 2022-03-18T22:46:55.000Z | test/wobserver/util/metrics_test.exs | szlend/wobserver | 6c3ea1ef772ddb4a0b35956d155f33fc46f71a8c | [
"MIT"
] | 52 | 2017-02-13T16:12:10.000Z | 2021-04-08T22:31:28.000Z | test/wobserver/util/metrics_test.exs | szlend/wobserver | 6c3ea1ef772ddb4a0b35956d155f33fc46f71a8c | [
"MIT"
] | 74 | 2017-02-13T15:23:49.000Z | 2021-07-30T07:43:27.000Z | defmodule Wobserver.Util.MetricsTest do
use ExUnit.Case
alias Wobserver.Util.Metrics
describe "overview" do
test "returns a list" do
assert is_list(Metrics.overview)
end
test "returns a keyword list" do
assert Keyword.keyword?(Metrics.overview)
end
end
describe "register" do
test "registers a metric" do
assert Metrics.register [example: {fn -> [{5, []}] end, :gauge, "Description"}]
assert Keyword.has_key?(Metrics.overview, :example)
end
test "registers a metric generator" do
assert Metrics.register [
fn -> [generated: {fn -> [{5, []}] end, :gauge, "Description"}] end
]
assert Keyword.has_key?(Metrics.overview, :generated)
end
test "registers a string metric generator" do
assert Metrics.register [
"fn -> [generated_s: {fn -> [{5, []}] end, :gauge, \"Description\"}] end"
]
assert Keyword.has_key?(Metrics.overview, :generated_s)
end
end
describe "load_config" do
setup do
:meck.new Application, [:passthrough]
:meck.expect Application, :get_env, fn (:wobserver, option, _) ->
case option do
:metrics -> [
additional: [config_example: {fn -> [{5, []}] end, :gauge, "Description"}],
generators: [fn -> [config_generated: {fn -> [{5, []}] end, :gauge, "Description"}] end]
]
:discovery -> :none
:port -> 4001
end
end
on_exit(fn -> :meck.unload end)
Metrics.load_config
:ok
end
test "loads metric from config" do
assert Keyword.has_key?(Metrics.overview, :config_example)
end
test "loads generated metrics from config" do
assert Keyword.has_key?(Metrics.overview, :config_generated)
end
end
end
| 26.057971 | 100 | 0.607898 |
9ef779f1efdbb6983a529ac39a48834346bc041b | 2,357 | exs | Elixir | config/config.exs | m4hi2/crawly | b9e1bfffcc97e978023924e7aad53fc8a223aebf | [
"Apache-2.0"
] | 486 | 2019-05-30T09:19:59.000Z | 2021-04-28T07:51:31.000Z | config/config.exs | m4hi2/crawly | b9e1bfffcc97e978023924e7aad53fc8a223aebf | [
"Apache-2.0"
] | 131 | 2019-06-29T12:43:24.000Z | 2021-04-24T19:40:07.000Z | config/config.exs | m4hi2/crawly | b9e1bfffcc97e978023924e7aad53fc8a223aebf | [
"Apache-2.0"
] | 52 | 2019-06-24T10:13:41.000Z | 2021-03-28T07:36:42.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :crawly, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:crawly, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
config :crawly,
fetcher: {Crawly.Fetchers.HTTPoisonFetcher, []},
retry: [
retry_codes: [400],
max_retries: 3,
ignored_middlewares: [Crawly.Middlewares.UniqueRequest]
],
# Stop spider after scraping certain amount of items
closespider_itemcount: 500,
# Stop spider if it does crawl fast enough
closespider_timeout: 20,
concurrent_requests_per_domain: 5,
# TODO: this looks outdated
follow_redirect: true,
log_to_file: false,
# Request middlewares
middlewares: [
Crawly.Middlewares.DomainFilter,
Crawly.Middlewares.UniqueRequest,
Crawly.Middlewares.RobotsTxt,
{Crawly.Middlewares.UserAgent,
user_agents: [
"Mozilla/5.0 (Macintosh; Intel Mac OS X x.y; rv:42.0) Gecko/20100101 Firefox/42.0",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.106 Safari/537.36 OPR/38.0.2220.41"
]}
],
pipelines: [
{Crawly.Pipelines.Validate, fields: [:title, :author, :time, :url]},
{Crawly.Pipelines.DuplicatesFilter, item_id: :title},
Crawly.Pipelines.JSONEncoder
]
import_config "#{Mix.env()}.exs"
| 34.15942 | 131 | 0.72465 |
9ef7805f5e36d0530f7ebe91f30b4957bef6b69b | 1,650 | ex | Elixir | lib/erlsom/model.ex | kim-company/soapex | 46ac23ee621ce1259cad5e8f64ed6480be8511c3 | [
"MIT"
] | null | null | null | lib/erlsom/model.ex | kim-company/soapex | 46ac23ee621ce1259cad5e8f64ed6480be8511c3 | [
"MIT"
] | null | null | null | lib/erlsom/model.ex | kim-company/soapex | 46ac23ee621ce1259cad5e8f64ed6480be8511c3 | [
"MIT"
] | 2 | 2019-05-28T15:10:28.000Z | 2021-03-17T10:13:26.000Z | defmodule Erlsom.Model do
#from_model = Erlsom.Model.extract_records(model, tag)
#from_hrl = Record.Extractor.extract(tag, from: hrl)
#^from_model = from_hrl
# Record.defrecord :model,
# Record.extract(:model, from_lib: "erlsom/include/../src/erlsom_parse.hrl")
## Record.extract(:model, from_lib: "erlsom/../../../../deps/erlsom/src/erlsom_parse.hrl")
# Record.defrecord :type,
# Record.extract(:type, from_lib: "erlsom/include/../src/erlsom_parse.hrl")
## Record.extract(:model, from_lib: "erlsom/../../../../deps/erlsom/src/erlsom_parse.hrl")
def extract_records(erlsom_model) do
erlsom_model
# |> model(:tps)
|> elem(1)
# |> Enum.map(&type(&1, :nm))
|> Enum.filter(&(elem(&1, 1) != :_document))
|> Enum.map(&{elem(&1, 1), to_record(&1)})
end
# {:type, :"tns:ProjectType", :sequence, [els], [atts], :undefined, :undefined, 9, 1, 1, :undefined, :undefined}
defp to_record({:type, _nm, _tp=:sequence, els, atts, _anyAttr, _nillable, _nr, _mn, _mx, _mxd, _typeName}) do
[anyAttribs: :undefined] ++
Enum.map(atts, &to_record/1) ++
Enum.map(els, &to_record/1)
end
# {:el, [alts], 1, 1, :undefined, 2},
defp to_record({:el, [alt], _mn, _mx, _nillable, _nr}) do
to_record(alt)
end
defp to_record({:el, [_|_] = _alts, _mn, _mx, _nillable, _nr}) do
{:choice, :undefined}
end
# [{:alt, :"tns:projectCode", {:"#PCDATA", :char}, [], 1, 1, true, :undefined}],
defp to_record({:alt, tag, _tp, _nxt, _mn, _mx, _rl, _anyInfo}) do
{tag, :undefined}
end
# {:att, :projectID, 1, true, :char}
defp to_record({:att, nm, _nr, _opt, _tp}) do
{nm, :undefined}
end
end
| 34.375 | 114 | 0.626061 |
9ef788b8e061f08c140912a0edf261fdd2c8ba6c | 666 | exs | Elixir | config/config.exs | GregMefford/vintage_net | fdbbd80d45f4b660e72e0150aa4b98a1155b071e | [
"Apache-2.0"
] | null | null | null | config/config.exs | GregMefford/vintage_net | fdbbd80d45f4b660e72e0150aa4b98a1155b071e | [
"Apache-2.0"
] | null | null | null | config/config.exs | GregMefford/vintage_net | fdbbd80d45f4b660e72e0150aa4b98a1155b071e | [
"Apache-2.0"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# Overrides for unit tests:
#
# * udhcpc_handler: capture whatever happens with udhcpc
# * udhcpd_handler: capture whatever happens with udhcpd
# * resolvconf: don't update the real resolv.conf
# * persistence_dir: use the current directory
# * bin_ip: just fail if anything calls ip rather that run it
config :vintage_net,
udhcpc_handler: VintageNetTest.CapturingUdhcpcHandler,
udhcpd_handler: VintageNetTest.CapturingUdhcpdHandler,
resolvconf: "/dev/null",
persistence_dir: "./test_tmp/persistence",
bin_ip: "false"
| 37 | 61 | 0.783784 |
9ef7bb8f5b708d92f7ee2167842c00181bea1758 | 2,649 | ex | Elixir | lib/shopify_api/router.ex | pixelunion/elixir-shopifyapi | d6c57bda6e59006d68ffb16a2a8b80bcc2f1cc96 | [
"Apache-2.0"
] | 18 | 2019-06-07T13:36:39.000Z | 2021-08-03T21:06:36.000Z | lib/shopify_api/router.ex | pixelunion/elixir-shopifyapi | d6c57bda6e59006d68ffb16a2a8b80bcc2f1cc96 | [
"Apache-2.0"
] | 158 | 2018-08-30T22:09:00.000Z | 2021-09-22T01:18:59.000Z | lib/shopify_api/router.ex | pixelunion/elixir-shopifyapi | d6c57bda6e59006d68ffb16a2a8b80bcc2f1cc96 | [
"Apache-2.0"
] | 4 | 2020-09-05T00:48:46.000Z | 2020-09-30T15:53:50.000Z | defmodule ShopifyAPI.Router do
use Plug.Router
require Logger
alias Plug.Conn
alias ShopifyAPI.{App, AuthToken, AuthTokenServer, ConnHelpers}
alias ShopifyAPI.Shop
plug(:match)
plug(:dispatch)
get "/install/:app" do
install_app(conn)
end
get "/install" do
install_app(conn)
end
# Shopify Callback on App authorization
get "/authorized/:app" do
Logger.info("Authorized #{ConnHelpers.shop_domain(conn)}")
if auth_code_present?(conn) do
with {:ok, app} <- ConnHelpers.fetch_shopify_app(conn),
true <- verify_nonce(app, conn.query_params),
true <- ConnHelpers.verify_params_with_hmac(app, conn.query_params),
{:ok, auth_token} <- request_auth_token(conn, app) do
Shop.post_install(auth_token)
AuthTokenServer.set(auth_token)
conn
|> Conn.resp(200, "Authenticated.")
|> Conn.halt()
else
res ->
Logger.info("#{__MODULE__} failed authorized with: #{inspect(res)}")
conn
|> Conn.resp(404, "Not Found.")
|> Conn.halt()
end
else
# No auth code given, redirect to shopify's app install page
install_app(conn)
end
end
defp verify_nonce(%_{nonce: nonce}, %{"state" => state}), do: nonce == state
# Shopify doesn't pass the nonce back if the install was initiated from the partners dashboard.
defp verify_nonce(_, _) do
Logger.info("No nonce passed to install most likely dev install, skipping check")
true
end
defp request_auth_token(conn, app) do
app
|> App.fetch_token(ConnHelpers.shop_domain(conn), ConnHelpers.auth_code(conn))
|> case do
{:ok, token} ->
{:ok,
%AuthToken{
app_name: ConnHelpers.app_name(conn),
shop_name: ConnHelpers.shop_domain(conn),
code: ConnHelpers.auth_code(conn),
timestamp: String.to_integer(conn.query_params["timestamp"]),
token: token
}}
_msg ->
{:error, "unable to fetch token"}
end
end
defp install_app(conn) do
conn
|> ConnHelpers.fetch_shopify_app()
|> case do
{:ok, app} ->
install_url = App.install_url(app, ConnHelpers.shop_domain(conn))
conn
|> Conn.put_resp_header("location", install_url)
|> Conn.resp(unquote(302), "You are being redirected.")
|> Conn.halt()
res ->
Logger.info("#{__MODULE__} failed install with: #{res}")
conn
|> Conn.resp(404, "Not Found.")
|> Conn.halt()
end
end
defp auth_code_present?(conn), do: ConnHelpers.auth_code(conn) != nil
end
| 26.757576 | 97 | 0.619102 |
9ef7dabae109d9d7e8d5269ad25207eed4f917b9 | 21,331 | ex | Elixir | lib/phoenix/channel.ex | shritesh/phoenix | 4bf53ecaae5a9057ea57c248964490dfdee312af | [
"MIT"
] | null | null | null | lib/phoenix/channel.ex | shritesh/phoenix | 4bf53ecaae5a9057ea57c248964490dfdee312af | [
"MIT"
] | null | null | null | lib/phoenix/channel.ex | shritesh/phoenix | 4bf53ecaae5a9057ea57c248964490dfdee312af | [
"MIT"
] | null | null | null | defmodule Phoenix.Channel do
@moduledoc ~S"""
Defines a Phoenix Channel.
Channels provide a means for bidirectional communication from clients that
integrate with the `Phoenix.PubSub` layer for soft-realtime functionality.
## Topics & Callbacks
Every time you join a channel, you need to choose which particular topic you
want to listen to. The topic is just an identifier, but by convention it is
often made of two parts: `"topic:subtopic"`. Using the `"topic:subtopic"`
approach pairs nicely with the `Phoenix.Socket.channel/3` allowing you to
match on all topics starting with a given prefix by using a splat (the `*`
character) as the last character in the topic pattern:
channel "room:*", MyApp.RoomChannel
Any topic coming into the router with the `"room:"` prefix would dispatch
to `MyApp.RoomChannel` in the above example. Topics can also be pattern
matched in your channels' `join/3` callback to pluck out the scoped pattern:
# handles the special `"lobby"` subtopic
def join("room:lobby", _payload, socket) do
{:ok, socket}
end
# handles any other subtopic as the room ID, for example `"room:12"`, `"room:34"`
def join("room:" <> room_id, _payload, socket) do
{:ok, socket}
end
## Authorization
Clients must join a channel to send and receive PubSub events on that channel.
Your channels must implement a `join/3` callback that authorizes the socket
for the given topic. For example, you could check if the user is allowed to
join that particular room.
To authorize a socket in `join/3`, return `{:ok, socket}`.
To refuse authorization in `join/3`, return `{:error, reply}`.
## Incoming Events
After a client has successfully joined a channel, incoming events from the
client are routed through the channel's `handle_in/3` callbacks. Within these
callbacks, you can perform any action. Typically you'll either forward a
message to all listeners with `broadcast!/3`, or push a message directly down
the socket with `push/3`. Incoming callbacks must return the `socket` to
maintain ephemeral state.
Here's an example of receiving an incoming `"new_msg"` event from one client,
and broadcasting the message to all topic subscribers for this socket.
def handle_in("new_msg", %{"uid" => uid, "body" => body}, socket) do
broadcast!(socket, "new_msg", %{uid: uid, body: body})
{:noreply, socket}
end
You can also push a message directly down the socket:
# client asks for their current rank, push sent directly as a new event.
def handle_in("current_rank", socket) do
push(socket, "current_rank", %{val: Game.get_rank(socket.assigns[:user])})
{:noreply, socket}
end
## Replies
In addition to pushing messages out when you receive a `handle_in` event,
you can also reply directly to a client event for request/response style
messaging. This is useful when a client must know the result of an operation
or to simply ack messages.
For example, imagine creating a resource and replying with the created record:
def handle_in("create:post", attrs, socket) do
changeset = Post.changeset(%Post{}, attrs)
if changeset.valid? do
post = Repo.insert!(changeset)
response = MyApp.PostView.render("show.json", %{post: post})
{:reply, {:ok, response}, socket}
else
response = MyApp.ChangesetView.render("errors.json", %{changeset: changeset})
{:reply, {:error, response}, socket}
end
end
Alternatively, you may just want to ack the status of the operation:
def handle_in("create:post", attrs, socket) do
changeset = Post.changeset(%Post{}, attrs)
if changeset.valid? do
Repo.insert!(changeset)
{:reply, :ok, socket}
else
{:reply, :error, socket}
end
end
## Intercepting Outgoing Events
When an event is broadcasted with `broadcast/3`, each channel subscriber can
choose to intercept the event and have their `handle_out/3` callback triggered.
This allows the event's payload to be customized on a socket by socket basis
to append extra information, or conditionally filter the message from being
delivered. If the event is not intercepted with `Phoenix.Channel.intercept/1`,
then the message is pushed directly to the client:
intercept ["new_msg", "user_joined"]
# for every socket subscribing to this topic, append an `is_editable`
# value for client metadata.
def handle_out("new_msg", msg, socket) do
push(socket, "new_msg", Map.merge(msg,
%{is_editable: User.can_edit_message?(socket.assigns[:user], msg)}
))
{:noreply, socket}
end
# do not send broadcasted `"user_joined"` events if this socket's user
# is ignoring the user who joined.
def handle_out("user_joined", msg, socket) do
unless User.ignoring?(socket.assigns[:user], msg.user_id) do
push(socket, "user_joined", msg)
end
{:noreply, socket}
end
## Broadcasting to an external topic
In some cases, you will want to broadcast messages without the context of
a `socket`. This could be for broadcasting from within your channel to an
external topic, or broadcasting from elsewhere in your application like a
controller or another process. Such can be done via your endpoint:
# within channel
def handle_in("new_msg", %{"uid" => uid, "body" => body}, socket) do
...
broadcast_from!(socket, "new_msg", %{uid: uid, body: body})
MyApp.Endpoint.broadcast_from!(self(), "room:superadmin",
"new_msg", %{uid: uid, body: body})
{:noreply, socket}
end
# within controller
def create(conn, params) do
...
MyApp.Endpoint.broadcast!("room:" <> rid, "new_msg", %{uid: uid, body: body})
MyApp.Endpoint.broadcast!("room:superadmin", "new_msg", %{uid: uid, body: body})
redirect(conn, to: "/")
end
## Terminate
On termination, the channel callback `terminate/2` will be invoked with
the error reason and the socket.
If we are terminating because the client left, the reason will be
`{:shutdown, :left}`. Similarly, if we are terminating because the
client connection was closed, the reason will be `{:shutdown, :closed}`.
If any of the callbacks return a `:stop` tuple, it will also
trigger terminate with the reason given in the tuple.
`terminate/2`, however, won't be invoked in case of errors nor in
case of exits. This is the same behaviour as you find in Elixir
abstractions like `GenServer` and others. Typically speaking, if you
want to clean something up, it is better to monitor your channel
process and do the clean up from another process. Similar to GenServer,
it would also be possible `:trap_exit` to guarantee that `terminate/2`
is invoked. This practice is not encouraged though.
## Exit reasons when stopping a channel
When the channel callbacks return a `:stop` tuple, such as:
{:stop, :shutdown, socket}
{:stop, {:error, :enoent}, socket}
the second argument is the exit reason, which follows the same behaviour as
standard `GenServer` exits.
You have three options to choose from when shutting down a channel:
* `:normal` - in such cases, the exit won't be logged and linked processes
do not exit
* `:shutdown` or `{:shutdown, term}` - in such cases, the exit won't be
logged and linked processes exit with the same reason unless they're
trapping exits
* any other term - in such cases, the exit will be logged and linked
processes exit with the same reason unless they're trapping exits
## Subscribing to external topics
Sometimes you may need to programmatically subscribe a socket to external
topics in addition to the the internal `socket.topic`. For example,
imagine you have a bidding system where a remote client dynamically sets
preferences on products they want to receive bidding notifications on.
Instead of requiring a unique channel process and topic per
preference, a more efficient and simple approach would be to subscribe a
single channel to relevant notifications via your endpoint. For example:
defmodule MyApp.Endpoint.NotificationChannel do
use Phoenix.Channel
def join("notification:" <> user_id, %{"ids" => ids}, socket) do
topics = for product_id <- ids, do: "product:#{product_id}"
{:ok, socket
|> assign(:topics, [])
|> put_new_topics(topics)}
end
def handle_in("watch", %{"product_id" => id}, socket) do
{:reply, :ok, put_new_topics(socket, ["product:#{id}"])}
end
def handle_in("unwatch", %{"product_id" => id}, socket) do
{:reply, :ok, MyApp.Endpoint.unsubscribe("product:#{id}")}
end
defp put_new_topics(socket, topics) do
Enum.reduce(topics, socket, fn topic, acc ->
topics = acc.assigns.topics
if topic in topics do
acc
else
:ok = MyApp.Endpoint.subscribe(topic)
assign(acc, :topics, [topic | topics])
end
end)
end
end
Note: the caller must be responsible for preventing duplicate subscriptions.
After calling `subscribe/1` from your endpoint, the same flow applies to
handling regular Elixir messages within your channel. Most often, you'll
simply relay the `%Phoenix.Socket.Broadcast{}` event and payload:
alias Phoenix.Socket.Broadcast
def handle_info(%Broadcast{topic: _, event: event, payload: payload}, socket) do
push(socket, event, payload)
{:noreply, socket}
end
## Hibernation
From Erlang/OTP 20, channels automatically hibernate to save memory
after 15_000 milliseconds of inactivity. This can be customized by
passing the `:hibernate_after` option to `use Phoenix.Channel`:
use Phoenix.Channel, hibernate_after: 60_000
You can also set it to `:infinity` to fully disable it.
## Shutdown
You can configure the shutdown of each channel used when your application
is shutting down by setting the `:shutdown` value on use:
use Phoenix.Channel, shutdown: 5_000
It defaults to 5_000.
## Logging
By default, channel `"join"` and `"handle_in"` events are logged, using
the level `:info` and `:debug`, respectively. Logs can be customized per
event type or disabled by setting the `:log_join` and `:log_handle_in`
options when using `Phoenix.Channel`. For example, the following
configuration logs join events as `:info`, but disables logging for
incoming events:
use Phoenix.Channel, log_join: :info, log_handle_in: false
"""
alias Phoenix.Socket
alias Phoenix.Channel.Server
@type reply :: status :: atom | {status :: atom, response :: map}
@type socket_ref ::
{transport_pid :: Pid, serializer :: module, topic :: binary, ref :: binary,
join_ref :: binary}
@doc """
Handle channel joins by `topic`.
To authorize a socket, return `{:ok, socket}` or `{:ok, reply, socket}`. To
refuse authorization, return `{:error, reason}`.
## Example
def join("room:lobby", payload, socket) do
if authorized?(payload) do
{:ok, socket}
else
{:error, %{reason: "unauthorized"}}
end
end
"""
@callback join(topic :: binary, payload :: map, socket :: Socket.t()) ::
{:ok, Socket.t()}
| {:ok, reply :: map, Socket.t()}
| {:error, reason :: map}
@doc """
Handle incoming `event`s.
## Example
def handle_in("ping", payload, socket) do
{:reply, {:ok, payload}, socket}
end
"""
@callback handle_in(event :: String.t(), payload :: map, socket :: Socket.t()) ::
{:noreply, Socket.t()}
| {:noreply, Socket.t(), timeout | :hibernate}
| {:reply, reply, Socket.t()}
| {:stop, reason :: term, Socket.t()}
| {:stop, reason :: term, reply, Socket.t()}
@doc """
Intercepts outgoing `event`s.
See `intercept/1`.
"""
@callback handle_out(event :: String.t(), payload :: map, socket :: Socket.t()) ::
{:noreply, Socket.t()}
| {:noreply, Socket.t(), timeout | :hibernate}
| {:stop, reason :: term, Socket.t()}
@doc """
Handle regular Elixir process messages.
See `GenServer.handle_info/2`.
"""
@callback handle_info(msg :: term, socket :: Socket.t()) ::
{:noreply, Socket.t()}
| {:stop, reason :: term, Socket.t()}
@doc """
Handle regular GenServer call messages.
See `GenServer.handle_call/3`.
"""
@callback handle_call(msg :: term, from :: {pid, tag :: term}, socket :: Socket.t()) ::
{:reply, response :: term, Socket.t()}
| {:noreply, Socket.t()}
| {:stop, reason :: term, Socket.t()}
@doc """
Handle regular GenServer cast messages.
See `GenServer.handle_cast/2`.
"""
@callback handle_cast(msg :: term, socket :: Socket.t()) ::
{:noreply, Socket.t()}
| {:stop, reason :: term, Socket.t()}
@doc false
@callback code_change(old_vsn, Socket.t(), extra :: term) ::
{:ok, Socket.t()}
| {:error, reason :: term}
when old_vsn: term | {:down, term}
@doc """
Invoked when the channel process is about to exit.
See `GenServer.terminate/2`.
"""
@callback terminate(
reason :: :normal | :shutdown | {:shutdown, :left | :closed | term},
Socket.t()
) ::
term
@optional_callbacks handle_in: 3,
handle_out: 3,
handle_info: 2,
handle_call: 3,
handle_cast: 2,
code_change: 3,
terminate: 2
defmacro __using__(opts \\ []) do
quote do
opts = unquote(opts)
@behaviour unquote(__MODULE__)
@on_definition unquote(__MODULE__)
@before_compile unquote(__MODULE__)
@phoenix_intercepts []
@phoenix_log_join Keyword.get(opts, :log_join, :info)
@phoenix_log_handle_in Keyword.get(opts, :log_handle_in, :debug)
@phoenix_hibernate_after Keyword.get(opts, :hibernate_after, 15_000)
@phoenix_shutdown Keyword.get(opts, :shutdown, 5000)
import unquote(__MODULE__)
import Phoenix.Socket, only: [assign: 3]
def child_spec(init_arg) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [init_arg]},
shutdown: @phoenix_shutdown,
restart: :temporary
}
end
def start_link(triplet) do
GenServer.start_link(Phoenix.Channel.Server, triplet,
hibernate_after: @phoenix_hibernate_after
)
end
def __socket__(:private) do
%{log_join: @phoenix_log_join, log_handle_in: @phoenix_log_handle_in}
end
end
end
defmacro __before_compile__(_) do
quote do
def __intercepts__, do: @phoenix_intercepts
end
end
@doc """
Defines which Channel events to intercept for `handle_out/3` callbacks.
By default, broadcasted events are pushed directly to the client, but
intercepting events gives your channel a chance to customize the event
for the client to append extra information or filter the message from being
delivered.
*Note*: intercepting events can introduce significantly more overhead if a
large number of subscribers must customize a message since the broadcast will
be encoded N times instead of a single shared encoding across all subscribers.
## Examples
intercept ["new_msg"]
def handle_out("new_msg", payload, socket) do
push(socket, "new_msg", Map.merge(payload,
is_editable: User.can_edit_message?(socket.assigns[:user], payload)
))
{:noreply, socket}
end
`handle_out/3` callbacks must return one of:
{:noreply, Socket.t} |
{:noreply, Socket.t, timeout | :hibernate} |
{:stop, reason :: term, Socket.t}
"""
defmacro intercept(events) do
quote do
@phoenix_intercepts unquote(events)
end
end
@doc false
def __on_definition__(env, :def, :handle_out, [event, _payload, _socket], _, _)
when is_binary(event) do
unless event in Module.get_attribute(env.module, :phoenix_intercepts) do
IO.write(
"#{Path.relative_to(env.file, File.cwd!())}:#{env.line}: [warning] " <>
"An intercept for event \"#{event}\" has not yet been defined in #{env.module}.handle_out/3. " <>
"Add \"#{event}\" to your list of intercepted events with intercept/1"
)
end
end
def __on_definition__(_env, _kind, _name, _args, _guards, _body) do
:ok
end
@doc """
Broadcast an event to all subscribers of the socket topic.
The event's message must be a serializable map.
## Examples
iex> broadcast(socket, "new_message", %{id: 1, content: "hello"})
:ok
"""
def broadcast(socket, event, message) do
%{pubsub_server: pubsub_server, topic: topic} = assert_joined!(socket)
Server.broadcast(pubsub_server, topic, event, message)
end
@doc """
Same as `broadcast/3`, but raises if broadcast fails.
"""
def broadcast!(socket, event, message) do
%{pubsub_server: pubsub_server, topic: topic} = assert_joined!(socket)
Server.broadcast!(pubsub_server, topic, event, message)
end
@doc """
Broadcast event from pid to all subscribers of the socket topic.
The channel that owns the socket will not receive the published
message. The event's message must be a serializable map.
## Examples
iex> broadcast_from(socket, "new_message", %{id: 1, content: "hello"})
:ok
"""
def broadcast_from(socket, event, message) do
%{pubsub_server: pubsub_server, topic: topic, channel_pid: channel_pid} =
assert_joined!(socket)
Server.broadcast_from(pubsub_server, channel_pid, topic, event, message)
end
@doc """
Same as `broadcast_from/3`, but raises if broadcast fails.
"""
def broadcast_from!(socket, event, message) do
%{pubsub_server: pubsub_server, topic: topic, channel_pid: channel_pid} =
assert_joined!(socket)
Server.broadcast_from!(pubsub_server, channel_pid, topic, event, message)
end
@doc """
Sends event to the socket.
The event's message must be a serializable map.
## Examples
iex> push(socket, "new_message", %{id: 1, content: "hello"})
:ok
"""
def push(socket, event, message) do
%{transport_pid: transport_pid, topic: topic} = assert_joined!(socket)
Server.push(transport_pid, topic, event, message, socket.serializer)
end
@doc """
Replies asynchronously to a socket push.
Useful when you need to reply to a push that can't otherwise be handled using
the `{:reply, {status, payload}, socket}` return from your `handle_in`
callbacks. `reply/2` will be used in the rare cases you need to perform work in
another process and reply when finished by generating a reference to the push
with `socket_ref/1`.
*Note*: In such cases, a `socket_ref` should be generated and
passed to the external process, so the `socket` itself is not leaked outside
the channel. The `socket` holds information such as assigns and transport
configuration, so it's important to not copy this information outside of the
channel that owns it.
## Examples
def handle_in("work", payload, socket) do
Worker.perform(payload, socket_ref(socket))
{:noreply, socket}
end
def handle_info({:work_complete, result, ref}, socket) do
reply(ref, {:ok, result})
{:noreply, socket}
end
"""
@spec reply(socket_ref, reply) :: :ok
def reply(socket_ref, status) when is_atom(status) do
reply(socket_ref, {status, %{}})
end
def reply({transport_pid, serializer, topic, ref, join_ref}, {status, payload}) do
Server.reply(transport_pid, join_ref, ref, topic, {status, payload}, serializer)
end
@doc """
Generates a `socket_ref` for an async reply.
See `reply/2` for example usage.
"""
@spec socket_ref(Socket.t()) :: socket_ref
def socket_ref(%Socket{joined: true, ref: ref} = socket) when not is_nil(ref) do
{socket.transport_pid, socket.serializer, socket.topic, ref, socket.join_ref}
end
def socket_ref(_socket) do
raise ArgumentError, """
socket refs can only be generated for a socket that has joined with a push ref
"""
end
defp assert_joined!(%Socket{joined: true} = socket) do
socket
end
defp assert_joined!(%Socket{joined: false}) do
raise """
push/3, reply/2, and broadcast/3 can only be called after the socket has finished joining.
To push a message on join, send to self and handle in handle_info/2. For example:
def join(topic, auth_msg, socket) do
...
send(self, :after_join)
{:ok, socket}
end
def handle_info(:after_join, socket) do
push(socket, "feed", %{list: feed_items(socket)})
{:noreply, socket}
end
"""
end
end
| 33.805071 | 107 | 0.653509 |
9ef7e0d0aeb97fb3fa7fb2d10c8092adb64e8054 | 1,282 | ex | Elixir | chapter_11/todo_web/lib/todo/web.ex | librity/elixir_in_action | d2df441ceb7e6a0d3f18bc3ab3c59570125fcdec | [
"MIT"
] | 3 | 2021-04-22T11:55:58.000Z | 2021-08-22T13:19:56.000Z | chapter_11/todo_web/lib/todo/web.ex | librity/elixir_in_action | d2df441ceb7e6a0d3f18bc3ab3c59570125fcdec | [
"MIT"
] | null | null | null | chapter_11/todo_web/lib/todo/web.ex | librity/elixir_in_action | d2df441ceb7e6a0d3f18bc3ab3c59570125fcdec | [
"MIT"
] | 3 | 2021-04-22T21:19:45.000Z | 2021-08-22T13:20:03.000Z | defmodule Todo.Web do
use Plug.Router
alias Todo.Cache.Client, as: CacheClient
alias Todo.Server.Client, as: ServerClient
plug(:match)
plug(:dispatch)
def child_spec(_) do
Plug.Adapters.Cowboy.child_spec(
scheme: :http,
options: [port: 5454],
plug: __MODULE__
)
end
get "/entries" do
conn = Plug.Conn.fetch_query_params(conn)
list_name = Map.fetch!(conn.params, "list")
date = conn.params |> Map.fetch!("date") |> Date.from_iso8601!()
entries =
list_name
|> CacheClient.server_process()
|> ServerClient.entries(date)
formatted_entries =
entries
|> Stream.map(&"#{&1.date} #{&1.title}")
|> Enum.join("\n")
conn
|> Plug.Conn.put_resp_content_type("text/plain")
|> Plug.Conn.send_resp(200, formatted_entries)
end
post "/add_entry" do
conn = Plug.Conn.fetch_query_params(conn)
list_name = Map.fetch!(conn.params, "list")
title = Map.fetch!(conn.params, "title")
date = conn.params |> Map.fetch!("date") |> Date.from_iso8601!()
list_name
|> CacheClient.server_process()
|> ServerClient.add_entry(%{title: title, date: date})
conn
|> Plug.Conn.put_resp_content_type("text/plain")
|> Plug.Conn.send_resp(200, "OK")
end
end
| 24.188679 | 68 | 0.637285 |
9ef7f24bcfad96071e386a632d0d829c19adae1f | 1,115 | exs | Elixir | elixir/day3/states/config/config.exs | hemslo/seven-in-seven | 5c772abf8e0887675e56a9478be663807c79c3a8 | [
"MIT"
] | null | null | null | elixir/day3/states/config/config.exs | hemslo/seven-in-seven | 5c772abf8e0887675e56a9478be663807c79c3a8 | [
"MIT"
] | null | null | null | elixir/day3/states/config/config.exs | hemslo/seven-in-seven | 5c772abf8e0887675e56a9478be663807c79c3a8 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :states, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:states, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 35.967742 | 73 | 0.750673 |
9ef8224be35507dfc5d915668b67d1e398f0b729 | 196 | ex | Elixir | exercises/practice/square-root/lib/square_root.ex | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 343 | 2017-06-22T16:28:28.000Z | 2022-03-25T21:33:32.000Z | exercises/practice/square-root/lib/square_root.ex | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 583 | 2017-06-19T10:48:40.000Z | 2022-03-28T21:43:12.000Z | exercises/practice/square-root/lib/square_root.ex | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 228 | 2017-07-05T07:09:32.000Z | 2022-03-27T08:59:08.000Z | defmodule SquareRoot do
@doc """
Calculate the integer square root of a positive integer
"""
@spec calculate(radicand :: pos_integer) :: pos_integer
def calculate(radicand) do
end
end
| 21.777778 | 57 | 0.72449 |
9ef840ece43aae1527d5827847d12c30b213150a | 1,304 | exs | Elixir | apps/activity_logger/priv/repo/migrations/20180907173648_add_audit.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/activity_logger/priv/repo/migrations/20180907173648_add_audit.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/activity_logger/priv/repo/migrations/20180907173648_add_audit.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | # Copyright 2018-2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule ActivityLogger.Repo.Migrations.AddAudit do
use Ecto.Migration
def change do
create table(:audit, primary_key: false) do
add :uuid, :uuid, primary_key: true
add :id, :string, null: false
add :action, :string, null: false
add :target_uuid, :uuid, null: false
add :target_type, :string, null: false
add :target_changes, :map, null: false
add :target_encrypted_metadata, :binary
add :originator_uuid, :uuid
add :originator_type, :string
add :metadata, :map
add :inserted_at, :naive_datetime_usec
end
create index(:audit, [:target_uuid, :target_type])
create index(:audit, [:originator_uuid, :originator_type])
end
end
| 31.047619 | 74 | 0.712423 |
9ef854b1cc393c280b05319f339959969fe9c1db | 88 | exs | Elixir | phoenix_webpack/test/views/page_view_test.exs | davejmac/phoenix-webpack | b7d0076069529311abfa6d4b6eda332bb970a24b | [
"MIT"
] | null | null | null | phoenix_webpack/test/views/page_view_test.exs | davejmac/phoenix-webpack | b7d0076069529311abfa6d4b6eda332bb970a24b | [
"MIT"
] | null | null | null | phoenix_webpack/test/views/page_view_test.exs | davejmac/phoenix-webpack | b7d0076069529311abfa6d4b6eda332bb970a24b | [
"MIT"
] | null | null | null | defmodule PhoenixWebpack.PageViewTest do
use PhoenixWebpack.ConnCase, async: true
end
| 22 | 42 | 0.840909 |
9ef89e3290a9cdbfeeff35d46426e7c8629b96a1 | 912 | exs | Elixir | mix.exs | shtanaka/elixir-transfer-control | 1b325a4c0c5ee10551515d7a440e503ed68d4ddf | [
"MIT"
] | null | null | null | mix.exs | shtanaka/elixir-transfer-control | 1b325a4c0c5ee10551515d7a440e503ed68d4ddf | [
"MIT"
] | 2 | 2021-03-10T02:00:57.000Z | 2021-05-10T21:22:39.000Z | mix.exs | shtanaka/elixir-transfer-control | 1b325a4c0c5ee10551515d7a440e503ed68d4ddf | [
"MIT"
] | null | null | null | defmodule Tfcon.Umbrella.MixProject do
use Mix.Project
def project do
[
apps_path: "apps",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options.
#
# Dependencies listed here are available only for this project
# and cannot be accessed from applications inside the apps folder
defp deps do
[
{:guardian, "~> 1.2"},
{:comeonin, "~> 5.1"},
{:bcrypt_elixir, "~> 2.0"},
{:mock, "~> 0.3.0", only: :test},
{:earmark, "~> 1.2", only: :dev},
{:ex_doc, "~> 0.19", only: :dev},
{:phoenix_swagger, "~> 0.8"},
{:ex_json_schema, "~> 0.5"},
{:cors_plug, "~> 2.0"}
]
end
end
| 24 | 77 | 0.547149 |
9ef8ae6cb9879abd23a5e3caccd09fbd330554c7 | 15,325 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/forwarding_rule.ex | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/forwarding_rule.ex | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/forwarding_rule.ex | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Model.ForwardingRule do
@moduledoc """
Represents a Forwarding Rule resource.
Forwarding rule resources in GCP can be either regional or global in scope:
* [Global](/compute/docs/reference/rest/{$api_version}/globalForwardingRules) * [Regional](/compute/docs/reference/rest/{$api_version}/forwardingRules)
A forwarding rule and its corresponding IP address represent the frontend configuration of a Google Cloud Platform load balancer. Forwarding rules can also reference target instances and Cloud VPN Classic gateways (targetVpnGateway).
For more information, read Forwarding rule concepts and Using protocol forwarding.
(== resource_for {$api_version}.forwardingRules ==) (== resource_for {$api_version}.globalForwardingRules ==) (== resource_for {$api_version}.regionForwardingRules ==)
## Attributes
* `IPAddress` (*type:* `String.t`, *default:* `nil`) - IP address that this forwarding rule serves. When a client sends traffic to this IP address, the forwarding rule directs the traffic to the target that you specify in the forwarding rule.
If you don't specify a reserved IP address, an ephemeral IP address is assigned. Methods for specifying an IP address:
* IPv4 dotted decimal, as in `100.1.2.3` * Full URL, as in https://www.googleapis.com/compute/v1/projects/project_id/regions/region/addresses/address-name * Partial URL or by name, as in: * projects/project_id/regions/region/addresses/address-name * regions/region/addresses/address-name * global/addresses/address-name * address-name
The loadBalancingScheme and the forwarding rule's target determine the type of IP address that you can use. For detailed information, refer to [IP address specifications](/load-balancing/docs/forwarding-rule-concepts#ip_address_specifications).
Must be set to `0.0.0.0` when the target is targetGrpcProxy that has validateForProxyless field set to true.
* `IPProtocol` (*type:* `String.t`, *default:* `nil`) - The IP protocol to which this rule applies. For protocol forwarding, valid options are TCP, UDP, ESP, AH, SCTP or ICMP.
For Internal TCP/UDP Load Balancing, the load balancing scheme is INTERNAL, and one of TCP or UDP are valid. For Traffic Director, the load balancing scheme is INTERNAL_SELF_MANAGED, and only TCPis valid. For Internal HTTP(S) Load Balancing, the load balancing scheme is INTERNAL_MANAGED, and only TCP is valid. For HTTP(S), SSL Proxy, and TCP Proxy Load Balancing, the load balancing scheme is EXTERNAL and only TCP is valid. For Network TCP/UDP Load Balancing, the load balancing scheme is EXTERNAL, and one of TCP or UDP is valid.
* `allPorts` (*type:* `boolean()`, *default:* `nil`) - This field is used along with the backend_service field for internal load balancing or with the target field for internal TargetInstance. This field cannot be used with port or portRange fields.
When the load balancing scheme is INTERNAL and protocol is TCP/UDP, specify this field to allow packets addressed to any ports will be forwarded to the backends configured with this forwarding rule.
* `allowGlobalAccess` (*type:* `boolean()`, *default:* `nil`) - This field is used along with the backend_service field for internal load balancing or with the target field for internal TargetInstance. If the field is set to TRUE, clients can access ILB from all regions. Otherwise only allows access from clients in the same region as the internal load balancer.
* `backendService` (*type:* `String.t`, *default:* `nil`) - This field is only used for INTERNAL load balancing.
For internal load balancing, this field identifies the BackendService resource to receive the matched traffic.
* `creationTimestamp` (*type:* `String.t`, *default:* `nil`) - [Output Only] Creation timestamp in RFC3339 text format.
* `description` (*type:* `String.t`, *default:* `nil`) - An optional description of this resource. Provide this property when you create the resource.
* `fingerprint` (*type:* `String.t`, *default:* `nil`) - Fingerprint of this resource. A hash of the contents stored in this object. This field is used in optimistic locking. This field will be ignored when inserting a ForwardingRule. Include the fingerprint in patch request to ensure that you do not overwrite changes that were applied from another concurrent request.
To see the latest fingerprint, make a get() request to retrieve a ForwardingRule.
* `id` (*type:* `String.t`, *default:* `nil`) - [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* `ipVersion` (*type:* `String.t`, *default:* `nil`) - The IP Version that will be used by this forwarding rule. Valid options are IPV4 or IPV6. This can only be specified for an external global forwarding rule.
* `isMirroringCollector` (*type:* `boolean()`, *default:* `nil`) - Indicates whether or not this load balancer can be used as a collector for packet mirroring. To prevent mirroring loops, instances behind this load balancer will not have their traffic mirrored even if a PacketMirroring rule applies to them. This can only be set to true for load balancers that have their loadBalancingScheme set to INTERNAL.
* `kind` (*type:* `String.t`, *default:* `compute#forwardingRule`) - [Output Only] Type of the resource. Always compute#forwardingRule for Forwarding Rule resources.
* `loadBalancingScheme` (*type:* `String.t`, *default:* `nil`) - Specifies the forwarding rule type.
- EXTERNAL is used for:
- Classic Cloud VPN gateways
- Protocol forwarding to VMs from an external IP address
- The following load balancers: HTTP(S), SSL Proxy, TCP Proxy, and Network TCP/UDP
- INTERNAL is used for:
- Protocol forwarding to VMs from an internal IP address
- Internal TCP/UDP load balancers
- INTERNAL_MANAGED is used for:
- Internal HTTP(S) load balancers
- INTERNAL_SELF_MANAGED is used for:
- Traffic Director
For more information about forwarding rules, refer to Forwarding rule concepts.
* `metadataFilters` (*type:* `list(GoogleApi.Compute.V1.Model.MetadataFilter.t)`, *default:* `nil`) - Opaque filter criteria used by Loadbalancer to restrict routing configuration to a limited set of xDS compliant clients. In their xDS requests to Loadbalancer, xDS clients present node metadata. When there is a match, the relevant configuration is made available to those proxies. Otherwise, all the resources (e.g. TargetHttpProxy, UrlMap) referenced by the ForwardingRule will not be visible to those proxies.
For each metadataFilter in this list, if its filterMatchCriteria is set to MATCH_ANY, at least one of the filterLabels must match the corresponding label provided in the metadata. If its filterMatchCriteria is set to MATCH_ALL, then all of its filterLabels must match with corresponding labels provided in the metadata. If multiple metadataFilters are specified, all of them need to be satisfied in order to be considered a match.
metadataFilters specified here will be applifed before those specified in the UrlMap that this ForwardingRule references.
metadataFilters only applies to Loadbalancers that have their loadBalancingScheme set to INTERNAL_SELF_MANAGED.
* `name` (*type:* `String.t`, *default:* `nil`) - Name of the resource; provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash.
* `network` (*type:* `String.t`, *default:* `nil`) - This field is not used for external load balancing.
For INTERNAL and INTERNAL_SELF_MANAGED load balancing, this field identifies the network that the load balanced IP should belong to for this Forwarding Rule. If this field is not specified, the default network will be used.
* `networkTier` (*type:* `String.t`, *default:* `nil`) - This signifies the networking tier used for configuring this load balancer and can only take the following values: PREMIUM, STANDARD.
For regional ForwardingRule, the valid values are PREMIUM and STANDARD. For GlobalForwardingRule, the valid value is PREMIUM.
If this field is not specified, it is assumed to be PREMIUM. If IPAddress is specified, this value must be equal to the networkTier of the Address.
* `portRange` (*type:* `String.t`, *default:* `nil`) - When the load balancing scheme is EXTERNAL, INTERNAL_SELF_MANAGED and INTERNAL_MANAGED, you can specify a port_range. Use with a forwarding rule that points to a target proxy or a target pool. Do not use with a forwarding rule that points to a backend service. This field is used along with the target field for TargetHttpProxy, TargetHttpsProxy, TargetSslProxy, TargetTcpProxy, TargetGrpcProxy, TargetVpnGateway, TargetPool, TargetInstance.
Applicable only when IPProtocol is TCP, UDP, or SCTP, only packets addressed to ports in the specified range will be forwarded to target. Forwarding rules with the same [IPAddress, IPProtocol] pair must have disjoint port ranges.
Some types of forwarding target have constraints on the acceptable ports:
- TargetHttpProxy: 80, 8080
- TargetHttpsProxy: 443
- TargetGrpcProxy: Any ports
- TargetTcpProxy: 25, 43, 110, 143, 195, 443, 465, 587, 700, 993, 995, 1688, 1883, 5222
- TargetSslProxy: 25, 43, 110, 143, 195, 443, 465, 587, 700, 993, 995, 1688, 1883, 5222
- TargetVpnGateway: 500, 4500
* `ports` (*type:* `list(String.t)`, *default:* `nil`) - This field is used along with the backend_service field for internal load balancing.
When the load balancing scheme is INTERNAL, a list of ports can be configured, for example, ['80'], ['8000','9000']. Only packets addressed to these ports are forwarded to the backends configured with the forwarding rule.
If the forwarding rule's loadBalancingScheme is INTERNAL, you can specify ports in one of the following ways:
* A list of up to five ports, which can be non-contiguous * Keyword ALL, which causes the forwarding rule to forward traffic on any port of the forwarding rule's protocol.
* `region` (*type:* `String.t`, *default:* `nil`) - [Output Only] URL of the region where the regional forwarding rule resides. This field is not applicable to global forwarding rules. You must specify this field as part of the HTTP request URL. It is not settable as a field in the request body.
* `selfLink` (*type:* `String.t`, *default:* `nil`) - [Output Only] Server-defined URL for the resource.
* `serviceLabel` (*type:* `String.t`, *default:* `nil`) - An optional prefix to the service name for this Forwarding Rule. If specified, the prefix is the first label of the fully qualified service name.
The label must be 1-63 characters long, and comply with RFC1035. Specifically, the label must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash.
This field is only used for internal load balancing.
* `serviceName` (*type:* `String.t`, *default:* `nil`) - [Output Only] The internal fully qualified service name for this Forwarding Rule.
This field is only used for internal load balancing.
* `subnetwork` (*type:* `String.t`, *default:* `nil`) - This field is only used for INTERNAL load balancing.
For internal load balancing, this field identifies the subnetwork that the load balanced IP should belong to for this Forwarding Rule.
If the network specified is in auto subnet mode, this field is optional. However, if the network is in custom subnet mode, a subnetwork must be specified.
* `target` (*type:* `String.t`, *default:* `nil`) - The URL of the target resource to receive the matched traffic. For regional forwarding rules, this target must live in the same region as the forwarding rule. For global forwarding rules, this target must be a global load balancing resource. The forwarded traffic must be of a type appropriate to the target object. For INTERNAL_SELF_MANAGED load balancing, only targetHttpProxy and targetGrpcProxy are valid, not targetHttpsProxy.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:IPAddress => String.t(),
:IPProtocol => String.t(),
:allPorts => boolean(),
:allowGlobalAccess => boolean(),
:backendService => String.t(),
:creationTimestamp => String.t(),
:description => String.t(),
:fingerprint => String.t(),
:id => String.t(),
:ipVersion => String.t(),
:isMirroringCollector => boolean(),
:kind => String.t(),
:loadBalancingScheme => String.t(),
:metadataFilters => list(GoogleApi.Compute.V1.Model.MetadataFilter.t()),
:name => String.t(),
:network => String.t(),
:networkTier => String.t(),
:portRange => String.t(),
:ports => list(String.t()),
:region => String.t(),
:selfLink => String.t(),
:serviceLabel => String.t(),
:serviceName => String.t(),
:subnetwork => String.t(),
:target => String.t()
}
field(:IPAddress)
field(:IPProtocol)
field(:allPorts)
field(:allowGlobalAccess)
field(:backendService)
field(:creationTimestamp)
field(:description)
field(:fingerprint)
field(:id)
field(:ipVersion)
field(:isMirroringCollector)
field(:kind)
field(:loadBalancingScheme)
field(:metadataFilters, as: GoogleApi.Compute.V1.Model.MetadataFilter, type: :list)
field(:name)
field(:network)
field(:networkTier)
field(:portRange)
field(:ports, type: :list)
field(:region)
field(:selfLink)
field(:serviceLabel)
field(:serviceName)
field(:subnetwork)
field(:target)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.ForwardingRule do
def decode(value, options) do
GoogleApi.Compute.V1.Model.ForwardingRule.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.ForwardingRule do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 78.589744 | 539 | 0.732594 |
9ef8d05c66783dcfcbe66371fc55e48dfaf6053e | 1,152 | exs | Elixir | apps/kv/config/config.exs | rockerBOO/kv_umbrella | 1dbf121b48022c0aaaf1b75d29085d9d2130451d | [
"MIT"
] | null | null | null | apps/kv/config/config.exs | rockerBOO/kv_umbrella | 1dbf121b48022c0aaaf1b75d29085d9d2130451d | [
"MIT"
] | null | null | null | apps/kv/config/config.exs | rockerBOO/kv_umbrella | 1dbf121b48022c0aaaf1b75d29085d9d2130451d | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for third-
# party users, it should be done in your mix.exs file.
# Sample configuration:
#
# config :logger,
# level: :info
#
# config :logger, :console,
# format: "$date $time [$level] $metadata$message\n",
# metadata: [:user_id]
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
config :kv, :routing_table,
[{?a..?m, :"foo@Monolith"},
{?n..?z, :"food@Monolith"}] | 38.4 | 73 | 0.716146 |
9ef8fa00ef0f99b019a5b40e6858a44e66d5010f | 2,671 | exs | Elixir | mix.exs | bortzmeyer/ex_doc | dd10dd43c0ac135ef1d9c029e75a5da251f550af | [
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null | mix.exs | bortzmeyer/ex_doc | dd10dd43c0ac135ef1d9c029e75a5da251f550af | [
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null | mix.exs | bortzmeyer/ex_doc | dd10dd43c0ac135ef1d9c029e75a5da251f550af | [
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null | defmodule ExDoc.Mixfile do
use Mix.Project
@source_url "https://github.com/elixir-lang/ex_doc"
@version "0.26.0"
def project do
[
app: :ex_doc,
version: @version,
elixir: "~> 1.10",
deps: deps(),
aliases: aliases(),
package: package(),
escript: escript(),
elixirc_paths: elixirc_paths(Mix.env()),
source_url: @source_url,
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [coveralls: :test],
description: "ExDoc is a documentation generation tool for Elixir",
docs: docs()
]
end
def application do
[
extra_applications: [:eex, :crypto],
mod: {ExDoc.Application, []}
]
end
defp deps do
[
{:earmark_parser, "~> 1.4.0"},
{:makeup_elixir, "~> 0.14"},
{:makeup_erlang, "~> 0.1"},
{:jason, "~> 1.2", only: :test}
]
end
defp aliases do
[
build: ["cmd npm run --prefix assets build", "compile --force", "docs"],
clean: [&clean_test_fixtures/1, "clean"],
fix: ["format", "cmd npm run --prefix assets lint:fix"],
lint: ["format --check-formatted", "cmd npm run --prefix assets lint"],
setup: ["deps.get", "cmd npm install --prefix assets"]
]
end
defp package do
[
licenses: ["Apache-2.0"],
maintainers: [
"José Valim",
"Eksperimental",
"Milton Mazzarri",
"Friedel Ziegelmayer",
"Dmitry",
"Wojtek Mach"
],
files: ["formatters", "lib", "mix.exs", "LICENSE", "CHANGELOG.md", "README.md"],
links: %{
"GitHub" => @source_url,
"Changelog" => "https://hexdocs.pm/ex_doc/changelog.html",
"Writing documentation" => "https://hexdocs.pm/elixir/writing-documentation.html"
}
]
end
defp escript do
[
main_module: ExDoc.CLI
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp docs do
[
main: "readme",
extras: [
"README.md",
"CHANGELOG.md",
"LICENSE"
],
source_ref: "v#{@version}",
source_url: @source_url,
groups_for_modules: [
Markdown: [
ExDoc.Markdown,
ExDoc.Markdown.Earmark
],
"Formatter API": [
ExDoc.Config,
ExDoc.Formatter.EPUB,
ExDoc.Formatter.HTML,
ExDoc.Formatter.HTML.Autolink,
ExDoc.FunctionNode,
ExDoc.ModuleNode,
ExDoc.TypeNode
]
],
skip_undefined_reference_warnings_on: ["CHANGELOG.md"]
]
end
defp clean_test_fixtures(_args) do
File.rm_rf("test/tmp")
end
end
| 23.637168 | 89 | 0.5541 |
9ef90913603882e68d65aef05518676e6cb8a670 | 3,567 | ex | Elixir | lib/restlax/client.ex | kianmeng/restlax | 6cded7cd34c49216d182efe4f71e62321545678d | [
"MIT"
] | 1 | 2021-02-09T09:25:37.000Z | 2021-02-09T09:25:37.000Z | lib/restlax/client.ex | kianmeng/restlax | 6cded7cd34c49216d182efe4f71e62321545678d | [
"MIT"
] | 3 | 2021-05-17T01:33:35.000Z | 2022-01-27T00:23:31.000Z | lib/restlax/client.ex | kianmeng/restlax | 6cded7cd34c49216d182efe4f71e62321545678d | [
"MIT"
] | 1 | 2022-01-27T00:06:03.000Z | 2022-01-27T00:06:03.000Z | defmodule Restlax.Client do
@moduledoc """
## Rest Client builder
### Options
* `:adapter` - `module()` One of the Tesla Adapters or your own customzied adapter
* `:adapter_opts` - `keyword()` options for the adapter
* `:logger_opts` - `keyword()` options for `Tesla.Middleware.Logger`
* `:base_url` - `String.t()` Base URL, e.g. "https://api.cloudflare.com/client/v4"
* `:encoding` - `encoding()` `:json` or `:form_urlencoded`
* `:encoding_opts` - `keyword()` options for `Tesla.Middleware.JSON` or `Tesla.Middle.FormUrlencoded`
* `:headers` - `[{String.t(), String.t()}]` Default headers, can be overridden per request
### Example
defmodule MyClient do
use Restlax.Client,
base_url: "https://my-awesome.app/api/v1"
adapter: Tesla.Adapter.Hackney
end
*Note: You may pick an adapter directly like in the above code. However, it's preferred to not pick one
if your API client is a library. Leaving it out allows the users of your library to choose one
for themselves.*
For example, if your users already use Mint in their code base, they can use this configuration
config :tesla, Cloudflare.Client, adapter: Tesla.Adapter.Mint
to make the Cloudflare API client use the Mint adapter of Tesla and avoid adding another dependency
### Customization
Feel free to add more middlewares like so
defmodule MyApp.Auth do
@behaviour Tesla.Middleware
@impl Tesla.Middleware
def call(env, next, _) do
auth_token = env.opts[:auth_token] || Application.get_env(:my_app, :auth_token)
headers = auth_token && [{"authorization", "Bearer \#{auth_token}"}]) || []
Tesla.run(%{env | headers: headers ++ env.headers}, next)
end
end
defmodule MyApp.MyClient do
use Restlax.Client,
base_url: "https://my-awesome.app/api/v1"
adapter: Tesla.Adapter.Hackney
plug MyApp.Auth
end
"""
@type encoding :: :json | :form_url_encoded | :raw
@type option ::
{:adapter, module()}
| {:adapter_opts, keyword()}
| {:logger_opts, keyword()}
| {:base_url, String.t()}
| {:encoding, encoding()}
| {:encoding_opts, keyword()}
| {:headers, [{String.t(), String.t()}]}
@spec __using__(opts :: [option()]) :: Macro.t()
defmacro __using__(opts) do
adapter = Keyword.get(opts, :adapter)
adapter_opts = Keyword.get(opts, :adapter_opts, [])
logger_opts = Keyword.get(opts, :logger_opts, [])
base_url = Keyword.fetch!(opts, :base_url)
encoding = Keyword.get(opts, :encoding, :json)
encoding_opts = Keyword.get(opts, :encoding_opts, [])
headers = Keyword.get(opts, :headers)
quote do
use Tesla
if unquote(adapter) do
adapter unquote(adapter), unquote(adapter_opts)
end
plug Tesla.Middleware.Logger, unquote(logger_opts)
plug Tesla.Middleware.BaseUrl, unquote(base_url)
plug Tesla.Middleware.PathParams
case unquote(encoding) do
:json ->
plug Tesla.Middleware.JSON, unquote(encoding_opts)
:form_url_encoded ->
plug Tesla.Middleware.FormUrlencoded, unquote(encoding_opts)
:raw ->
require Logger
Logger.info("No encoding/decoding is configured for #{__MODULE__}")
unknown ->
raise "Unknown encoding: #{inspect(unknown)}"
end
if unquote(headers) do
plug Tesla.Middleware.Headers, unquote(headers)
end
end
end
end
| 30.487179 | 105 | 0.634987 |
9ef90fa63560c3e50480937aa31d6c693ee49a4c | 470 | ex | Elixir | lib/daily_meals_web/controllers/fallback_controller.ex | vinolivae/daily_meals | 8f375cbb7eaf54abfa6b683705bb8075067f9078 | [
"MIT"
] | null | null | null | lib/daily_meals_web/controllers/fallback_controller.ex | vinolivae/daily_meals | 8f375cbb7eaf54abfa6b683705bb8075067f9078 | [
"MIT"
] | null | null | null | lib/daily_meals_web/controllers/fallback_controller.ex | vinolivae/daily_meals | 8f375cbb7eaf54abfa6b683705bb8075067f9078 | [
"MIT"
] | null | null | null | defmodule DailyMealsWeb.FallbackController do
use DailyMealsWeb, :controller
alias DailyMeals.Error
alias DailyMealsWeb.ErrorView
def call(conn, {:error, %Error{status: status, result: result}}) do
conn
|> put_status(status)
|> put_view(ErrorView)
|> render("error.json", result: result)
end
def call(conn, {:error, error}) do
conn
|> put_status(500)
|> put_view(ErrorView)
|> render("error.json", result: error)
end
end
| 22.380952 | 69 | 0.676596 |
9ef91e11b3b267f65ffce37873acc000593b832c | 1,106 | exs | Elixir | mix.exs | tashirosota/dynamo_db_migrator | 5f517df225fb64c60880491856e528af021cbafe | [
"Apache-2.0"
] | 7 | 2022-02-03T15:21:42.000Z | 2022-03-28T02:50:49.000Z | mix.exs | tashirosota/dynamo_db_migrator | 5f517df225fb64c60880491856e528af021cbafe | [
"Apache-2.0"
] | 1 | 2022-03-16T00:38:40.000Z | 2022-03-25T13:12:22.000Z | mix.exs | tashirosota/dynamo_db_migrator | 5f517df225fb64c60880491856e528af021cbafe | [
"Apache-2.0"
] | 1 | 2022-03-28T02:50:52.000Z | 2022-03-28T02:50:52.000Z | defmodule DynamoMigration.MixProject do
use Mix.Project
@source_url "https://github.com/tashirosota/dynamo_migration"
@description "Version management tool for migration file of DynamoDB."
def project do
[
app: :dynamo_migration,
version: "0.2.0",
elixir: "~> 1.10",
description: @description,
name: "DynamoMigration",
start_permanent: Mix.env() == :prod,
package: package(),
docs: docs(),
deps: deps(),
dialyzer: [plt_add_apps: [:mix, :eex]]
]
end
def application do
[
extra_applications: [:logger, :hackney, :eex]
]
end
defp package() do
[
licenses: ["Apache-2.0"],
maintainers: ["Sota Tashiro"],
links: %{"GitHub" => @source_url}
]
end
defp docs do
[
main: "readme",
extras: ["README.md"]
]
end
defp deps do
[
{:ex_doc, ">= 0.0.0", only: :dev, runtime: false},
{:dialyxir, "~> 1.0", only: [:dev, :test], runtime: false},
{:ex_aws_dynamo, ">= 3.0.0"},
{:hackney, ">= 0.0.0"},
{:jason, ">= 0.0.0"}
]
end
end
| 21.269231 | 72 | 0.550633 |
9ef93b0f07dc26773a4a0d73b041c412ee47d576 | 267 | ex | Elixir | test/support/test_helpers.ex | girishramnani/timber-elixir | 7fda5c3cb5e765a34524d2ec21cfbd5b30240bd5 | [
"ISC"
] | null | null | null | test/support/test_helpers.ex | girishramnani/timber-elixir | 7fda5c3cb5e765a34524d2ec21cfbd5b30240bd5 | [
"ISC"
] | null | null | null | test/support/test_helpers.ex | girishramnani/timber-elixir | 7fda5c3cb5e765a34524d2ec21cfbd5b30240bd5 | [
"ISC"
] | null | null | null | defmodule Timber.TestHelpers do
def parse_log_line(line) do
IO.puts("Splitting #{line}")
split_string = String.split(line, "@metadata")
[message, metadata] = split_string
metadata_map = Poison.decode!(metadata)
{message, metadata_map}
end
end
| 26.7 | 50 | 0.707865 |
9ef9608035022c2ede3def717c80deb4c7abbc33 | 2,302 | ex | Elixir | lib/ws/models/birds.ex | batwicket/phoenix-rest-server | fe3d39b123f6940ec696607e76f47427d872e6ce | [
"Apache-2.0"
] | 3 | 2015-02-26T22:42:32.000Z | 2017-06-01T10:56:17.000Z | lib/ws/models/birds.ex | batwicket/phoenix-rest-server | fe3d39b123f6940ec696607e76f47427d872e6ce | [
"Apache-2.0"
] | null | null | null | lib/ws/models/birds.ex | batwicket/phoenix-rest-server | fe3d39b123f6940ec696607e76f47427d872e6ce | [
"Apache-2.0"
] | null | null | null | defmodule Ws.Models.Bird do
@moduledoc """
A model entry. A struct is currently the preferred data structure vs, say, a record.
"""
defstruct name: "<new>", type: "<type>", age: 0
end
defmodule Ws.Models.Birds do
require Ws.Models.Bird
use GenServer
@moduledoc """
Demonstrates a simple Model consisting of a list of structs. In practice the data
would be stored using ecto, Riak, or some other persistent store. The standard
Ws.Supervisor has been extended to add this process as a child.
"""
@doc """
Called by Ws.Supervisor, which also supervises the Phoenix application process
"""
def start_link do
# create an initial entry
tim = %Ws.Models.Bird{name: "Phyre", type: "phoenix", age: 17}
initial_entries = [tim]
# delegate to OTP. initial_entries will be passed to Ws.Models.Birds.init/1
:gen_server.start_link({:local, :bird_data_server_pid}, __MODULE__, initial_entries, [])
end
@doc """
Called by OTP with the initial list of birds
"""
def init(initial_entries) do
{:ok, initial_entries}
end
# Private convenience methods. Alternatively you can expose these to clients and have them
# accept a PID argument so that handle_call/handle_cast can be called. This would expose
# two functions to the global namespace for each call so on general principles it is not
# done here.
@doc """
Add a bird. Convenience function.
"""
defp add(entries, new_entry) do
[new_entry | entries]
end
@doc """
Delete a bird. Convenience function.
"""
defp del(entries, ex_entry) do
if Enum.any?(entries, fn(x) -> x == ex_entry end) do
List.delete(entries, ex_entry)
else
entries
end
end
# GenServer API. The following functions are called by OTP in response to a client request.
@doc """
List birds
"""
def handle_call(:list_entries, _from, entries) do
{:reply, entries, entries}
end
@doc """
Add a bird
"""
def handle_cast({:add_entry, new_entry}, entries) do
{:noreply, add(entries, new_entry)}
end
@doc """
Delete a bird
"""
def handle_cast({:del_entry, ex_entry}, entries) do
{:noreply, del(entries, ex_entry)}
end
@doc """
Delete all birds
"""
def handle_cast({:del_all_entries}, entries) do
{:noreply, []}
end
end
| 24.752688 | 93 | 0.678975 |
9ef97201fe5c6f4084ab11ebdab8b6b08c822bcb | 4,384 | ex | Elixir | lib/hexpm/accounts/user.ex | lau/hexpm | beee80f5358a356530debfea35ee65c3a0aa9b25 | [
"Apache-2.0"
] | null | null | null | lib/hexpm/accounts/user.ex | lau/hexpm | beee80f5358a356530debfea35ee65c3a0aa9b25 | [
"Apache-2.0"
] | null | null | null | lib/hexpm/accounts/user.ex | lau/hexpm | beee80f5358a356530debfea35ee65c3a0aa9b25 | [
"Apache-2.0"
] | null | null | null | defmodule Hexpm.Accounts.User do
use Hexpm.Web, :schema
@derive {Hexpm.Web.Stale, assocs: [:emails, :owned_packages, :organizations, :keys]}
@derive {Phoenix.Param, key: :username}
schema "users" do
field :username, :string
field :full_name, :string
field :password, :string
field :service, :boolean, default: false
timestamps()
embeds_one :handles, UserHandles, on_replace: :delete
has_many :emails, Email
has_many :package_owners, PackageOwner
has_many :owned_packages, through: [:package_owners, :package]
has_many :organization_users, OrganizationUser
has_many :organizations, through: [:organization_users, :organization]
has_many :keys, Key
has_many :audit_logs, AuditLog
has_many :password_resets, PasswordReset
end
@username_regex ~r"^[a-z0-9_\-\.]+$"
@reserved_names ~w(me hex hexpm elixir erlang otp)
defp changeset(user, :create, params, confirmed?) do
cast(user, params, ~w(username full_name password)a)
|> validate_required(~w(username password)a)
|> cast_assoc(:emails, required: true, with: &Email.changeset(&1, :first, &2, confirmed?))
|> update_change(:username, &String.downcase/1)
|> validate_length(:username, min: 3)
|> validate_format(:username, @username_regex)
|> validate_exclusion(:username, @reserved_names)
|> unique_constraint(:username, name: "users_username_idx")
|> validate_length(:password, min: 7)
|> validate_confirmation(:password, message: "does not match password")
|> update_change(:password, &Auth.gen_password/1)
end
def build(params, confirmed? \\ not Application.get_env(:hexpm, :user_confirm)) do
changeset(%Hexpm.Accounts.User{}, :create, params, confirmed?)
end
def update_profile(user, params) do
cast(user, params, ~w(full_name)a)
|> cast_embed(:handles)
end
def update_password_no_check(user, params) do
cast(user, params, ~w(password)a)
|> validate_required(~w(password)a)
|> validate_length(:password, min: 7)
|> validate_confirmation(:password, message: "does not match password")
|> update_change(:password, &Auth.gen_password/1)
end
def update_password(user, params) do
password = user.password
user = %{user | password: nil}
cast(user, params, ~w(password)a)
|> validate_required(~w(password)a)
|> validate_length(:password, min: 7)
|> validate_password(:password, password)
|> validate_confirmation(:password, message: "does not match password")
|> update_change(:password, &Auth.gen_password/1)
end
def can_reset_password?(user, key) do
primary_email = email(user, :primary)
Enum.any?(user.password_resets, fn reset ->
PasswordReset.can_reset?(reset, primary_email, key)
end)
end
def email(user, :primary), do: user.emails |> Enum.find(& &1.primary) |> email()
def email(user, :public), do: user.emails |> Enum.find(& &1.public) |> email()
def email(user, :gravatar), do: user.emails |> Enum.find(& &1.gravatar) |> email()
defp email(nil), do: nil
defp email(email), do: email.email
def get(username_or_email, preload \\ []) do
from(
u in Hexpm.Accounts.User,
where:
u.username == ^username_or_email or
^username_or_email in fragment(
"SELECT emails.email FROM emails WHERE emails.user_id = ? and emails.verified",
u.id
),
preload: ^preload
)
end
def public_get(username_or_email, preload \\ []) do
from(
u in Hexpm.Accounts.User,
where:
u.username == ^username_or_email or
^username_or_email in fragment(
"SELECT emails.email FROM emails WHERE emails.user_id = ? and emails.verified and emails.public", u.id
),
preload: ^preload
)
end
def verify_permissions(%User{}, "api", _resource) do
{:ok, nil}
end
def verify_permissions(%User{}, "repositories", nil) do
{:ok, nil}
end
def verify_permissions(%User{}, "repository", nil) do
:error
end
def verify_permissions(%User{} = user, domain, name) when domain in ["repository", "docs"] do
organization = Organizations.get(name)
if organization && Organizations.access?(organization, user, "read") do
{:ok, organization}
else
:error
end
end
def verify_permissions(%User{}, _domain, _resource) do
:error
end
end
| 31.539568 | 114 | 0.671077 |
9ef97b15171033f40794857665eef44bbbbdfc26 | 7,405 | ex | Elixir | lib/playwright/config.ex | dbrody/playwright-elixir | 48611c08dbdb8e36aa4dd8aa2d97a4014b753815 | [
"MIT"
] | 30 | 2021-06-01T16:59:35.000Z | 2022-03-25T16:56:19.000Z | lib/playwright/config.ex | dbrody/playwright-elixir | 48611c08dbdb8e36aa4dd8aa2d97a4014b753815 | [
"MIT"
] | 35 | 2021-06-10T17:05:31.000Z | 2022-02-11T22:30:36.000Z | lib/playwright/config.ex | dbrody/playwright-elixir | 48611c08dbdb8e36aa4dd8aa2d97a4014b753815 | [
"MIT"
] | 4 | 2021-08-13T20:38:18.000Z | 2022-01-31T04:32:35.000Z | defmodule Playwright.Config do
@moduledoc """
Configuration for Playwright.
## Overview
config :playwright, ConnectOptions,
[...]
config :playwright, LaunchOptions,
[...]
config :playwright, PlaywrightTest,
[...]
## Details for `ConnectOptions`
Configuration for connecting to a running Playwright browser server over a
WebSocket.
### `ws_endpoint` (required)
A browser websocket endpoint to which the runner will connect.
This option is required when using the `:driver` transport to communicate with
a Playwright browser server.
e.g.,
config :playwright, ConnectOptions,
ws_endpoint: "ws://localhost:3000/playwright"
## Details for `LaunchOptions`
Configuration for Playwright browser server launch commands.
### `args` (optional)
Additional arguments to pass to the browser instance. The list of Chromium
flags may be found [online](http://peter.sh/experiments/chromium-command-line-switches/).
e.g.,
config :playwright, LaunchOptions,
args: [
"--use-fake-ui-for-media-stream",
"--use-fake-device-for-media-stream"
]
### `channel` (optional)
Browser distribution channel for Chromium. Supported values are:
- `chrome`
- `chrome-beta`
- `chrome-dev`
- `chrome-canary`
- `msedge`
- `msedge-beta`
- `msedge-dev`
- `msedge-canary`
Read more about using Google Chrome and Microsoft Edge
[online](https://playwright.dev/docs/browsers#google-chrome--microsoft-edge).
e.g.,
config :playwright, LaunchOptions,
channel: "chrome"
### `chromium_sandbox` (optional)
Enable Chromium sandboxing. Defaults to `false`.
e.g.,
config :playwright, LaunchOptions,
chromium_sandbox: true
### `devtools` (optional)
With Chromium, specifies whether to auto-open a "Developer Tools" panel for
each tab. If this option is `true`, the `headless` option will be set to
`false`.
Defaults to `false`.
e.g.,
config :playwright, LaunchOptions,
devtools: true
### `headless` (optional)
Specifies whether to run the browser in "headless" mode. See:
- [headless Chromium](https://developers.google.com/web/updates/2017/04/headless-chrome)
- [headless Firefox](https://developer.mozilla.org/en-US/docs/Mozilla/Firefox/Headless_mode)
Defaults to `true` unless the `devtools` option is `true`.
e.g.,
config :playwright, LaunchOptions,
headless: false # e.g., see a browser window pop up in "dev".
### `downloads_path` (optional)
**WARNING: not yet implemented**
If specified, accepted downloads are written to this directory. Otherwise, a
temporary directory is created and is removed when the browser is closed.
e.g.,
config :playwright, LaunchOptions,
downloads_path: "./doc/downloads"
### `env` (optional)
**WARNING: not yet implemented**
Environment variables that will be made visible to the browser. Defaults to
`System.get_env/0`.
e.g.,
config :playwright, LaunchOptions,
env: ["DEBUG", "true"]
### `executable_path` (optional)
A filesystem path to a browser executable to run instead of the bundled
browser. If `executable_path` is a relative path, then it is resolved relative
to the current working directory.
**Chromium-only**
Playwright can also be used to control the Google Chrome or Microsoft Edge
browsers, but it works best with the bundled version of Chromium. There is no
guarantee that it will work with any other version.
**Use `executable_path` option with extreme caution.**
e.g.,
config :playwright, LaunchOptions,
executable_path: "/Applications/..."
### `playwright_cli_path` (optional)
A filesystem path to the playwright cli.js file to use instead of the default
assets path.
**Chromium-only**
This can be helpful for packaged releases or systems where the node_module may
be located elsewhere on the filesystem.
**Use `playwright_cli_path` option with extreme caution.**
e.g.,
config :playwright, ConnectOptions,
playwright_cli_path: "/Cache/.../playwright/cli.js"
## Details for `PlaywrightTest`
Configuration for usage of `PlaywrightTest.Case`.
### `transport` (optional)
One of `:driver` or `:websocket`, defaults to `:driver`.
Additional configuration may be required depending on the transport
configuration:
- `LaunchOptions` for the `:driver` transport
- `ConnectOptions` for the `:websocket` transport
e.g.,
config :playwright, PlaywrightTest,
transport: :websocket
"""
alias Playwright.Config.Types
alias Playwright.Extra
@typedoc false
@type connect_options :: %{
ws_endpoint: String.t()
}
@typedoc false
@type launch_options :: %{
args: [String.t()],
channel: String.t(),
chromium_sandbox: boolean(),
devtools: boolean(),
downloads_path: String.t(),
env: any(),
executable_path: String.t(),
headless: boolean(),
playwright_cli_path: String.t()
}
@typedoc false
@type playwright_test :: %{
transport: atom()
}
defmodule Types do
@moduledoc false
defmodule ConnectOptions do
@moduledoc false
defstruct [:ws_endpoint, :playwright_cli_path]
end
defmodule LaunchOptions do
@moduledoc false
defstruct [
:args,
:channel,
:chromium_sandbox,
:devtools,
:downloads_path,
:executable_path,
:headless,
:playwright_cli_path
]
end
defmodule PlaywrightTest do
@moduledoc false
defstruct transport: :driver
end
end
@doc false
@spec connect_options(boolean()) :: connect_options
def connect_options(camelcase \\ false) do
config_for(ConnectOptions, %Types.ConnectOptions{}, camelcase) || %{}
end
@doc false
@spec launch_options(boolean()) :: map()
def launch_options(camelcase \\ false) do
config_for(LaunchOptions, %Types.LaunchOptions{}, camelcase) || %{}
# |> clean()
end
@doc false
@spec playwright_test(boolean()) :: Types.PlaywrightTest
def playwright_test(camelcase \\ false) do
config_for(PlaywrightTest, %Types.PlaywrightTest{}, camelcase)
# |> Map.from_struct()
end
@doc false
def config_for(key, mod, camelcase \\ false) do
configured =
Application.get_env(:playwright, key, %{})
|> Enum.into(%{})
result = build(configured, mod) |> clean()
if camelcase, do: camelize(result), else: result
end
# private
# ----------------------------------------------------------------------------
defp build(source, mod) do
result =
for key <- Map.keys(mod) |> Enum.reject(fn key -> key == :__struct__ end),
into: %{} do
case Map.get(source, key) do
nil ->
{key, Map.get(mod, key)}
value ->
{key, value}
end
end
Map.merge(mod, result)
end
defp clean(source) do
Map.from_struct(source)
|> Enum.reject(fn
{_key, value} when is_nil(value) -> true
{_key, value} when is_list(value) -> value == []
_otherwise_ -> false
end)
|> Enum.into(%{})
end
defp camelize(source) do
Extra.Map.deep_camelize_keys(source)
end
end
| 24.358553 | 96 | 0.644564 |
9ef982e778787fbe866bd64919bc22ccbd511841 | 2,311 | ex | Elixir | clients/os_config/lib/google_api/os_config/v1/model/week_day_of_month.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/os_config/lib/google_api/os_config/v1/model/week_day_of_month.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/os_config/lib/google_api/os_config/v1/model/week_day_of_month.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.OSConfig.V1.Model.WeekDayOfMonth do
@moduledoc """
Represents one week day in a month. An example is "the 4th Sunday".
## Attributes
* `dayOfWeek` (*type:* `String.t`, *default:* `nil`) - Required. A day of the week.
* `dayOffset` (*type:* `integer()`, *default:* `nil`) - Optional. Represents the number of days before or after the given week day of month that the patch deployment is scheduled for. For example if `week_ordinal` and `day_of_week` values point to the second day of the month and this `day_offset` value is set to `3`, the patch deployment takes place three days after the second Tuesday of the month. If this value is negative, for example -5, the patches are deployed five days before before the second Tuesday of the month. Allowed values are in range [-30, 30].
* `weekOrdinal` (*type:* `integer()`, *default:* `nil`) - Required. Week number in a month. 1-4 indicates the 1st to 4th week of the month. -1 indicates the last week of the month.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:dayOfWeek => String.t() | nil,
:dayOffset => integer() | nil,
:weekOrdinal => integer() | nil
}
field(:dayOfWeek)
field(:dayOffset)
field(:weekOrdinal)
end
defimpl Poison.Decoder, for: GoogleApi.OSConfig.V1.Model.WeekDayOfMonth do
def decode(value, options) do
GoogleApi.OSConfig.V1.Model.WeekDayOfMonth.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.OSConfig.V1.Model.WeekDayOfMonth do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 43.603774 | 569 | 0.723064 |
9ef9bc25943b89e025d31396a5d73fecebc801cd | 1,863 | ex | Elixir | lib/fun_with_flags/store/persistent/ecto/record.ex | elpikel/fun_with_flags | 1c409e8872eb7e370f827a0d471082aeff09cf33 | [
"MIT"
] | 736 | 2017-02-16T12:55:57.000Z | 2022-03-31T19:50:43.000Z | lib/fun_with_flags/store/persistent/ecto/record.ex | elpikel/fun_with_flags | 1c409e8872eb7e370f827a0d471082aeff09cf33 | [
"MIT"
] | 86 | 2017-04-02T16:42:33.000Z | 2022-03-25T01:07:13.000Z | lib/fun_with_flags/store/persistent/ecto/record.ex | elpikel/fun_with_flags | 1c409e8872eb7e370f827a0d471082aeff09cf33 | [
"MIT"
] | 63 | 2017-04-28T23:38:15.000Z | 2022-03-28T12:49:02.000Z | if Code.ensure_loaded?(Ecto) do
defmodule FunWithFlags.Store.Persistent.Ecto.Record do
@moduledoc false
use Ecto.Schema
import Ecto.Changeset
alias FunWithFlags.{Gate, Config}
@primary_key {:id, :id, autogenerate: true}
schema Config.ecto_table_name() do
field :flag_name, :string
field :gate_type, :string
field :target, :string
field :enabled, :boolean
end
@fields [:flag_name, :gate_type, :target, :enabled]
def changeset(struct, params \\ %{}) do
struct
|> cast(params, @fields)
|> validate_required(@fields)
|> unique_constraint(
:gate_type,
name: "fwf_flag_name_gate_target_idx",
message: "Can't store a duplicated gate."
)
end
def build(flag_name, gate) do
{type, target} = get_type_and_target(gate)
data = %{
flag_name: to_string(flag_name),
gate_type: type,
target: target,
enabled: gate.enabled
}
changeset(%__MODULE__{}, data)
end
def update_target(record = %__MODULE__{gate_type: "percentage"}, gate) do
{"percentage", target} = get_type_and_target(gate)
change(record, target: target)
end
# Do not just store NULL for `target: nil`, because the unique
# index in the table does not see NULL values as equal.
#
def serialize_target(nil), do: "_fwf_none"
def serialize_target(str) when is_binary(str), do: str
def serialize_target(atm) when is_atom(atm), do: to_string(atm)
defp get_type_and_target(%Gate{type: :percentage_of_time, for: target}) do
{"percentage", "time/#{to_string(target)}"}
end
defp get_type_and_target(%Gate{type: :percentage_of_actors, for: target}) do
{"percentage", "actors/#{to_string(target)}"}
end
defp get_type_and_target(%Gate{type: type, for: target}) do
{to_string(type), serialize_target(target)}
end
end
end # Code.ensure_loaded?
| 25.875 | 78 | 0.683843 |
9ef9d25db1c4d11d79ecbfb4ca9776d65e718523 | 727 | ex | Elixir | lib/transhook_web/live/live_helpers.ex | linjunpop/transhook | 59000e5a346c6c059d95c5a1f48190f698b4e7a3 | [
"0BSD"
] | null | null | null | lib/transhook_web/live/live_helpers.ex | linjunpop/transhook | 59000e5a346c6c059d95c5a1f48190f698b4e7a3 | [
"0BSD"
] | null | null | null | lib/transhook_web/live/live_helpers.ex | linjunpop/transhook | 59000e5a346c6c059d95c5a1f48190f698b4e7a3 | [
"0BSD"
] | null | null | null | defmodule TranshookWeb.LiveHelpers do
import Phoenix.LiveView.Helpers
@doc """
Renders a component inside the `TranshookWeb.ModalComponent` component.
The rendered modal receives a `:return_to` option to properly update
the URL when the modal is closed.
## Examples
<%= live_modal TranshookWeb.HookLive.FormComponent,
id: @hook.id || :new,
action: @live_action,
hook: @hook,
return_to: Routes.admin_hook_index_path(@socket, :index) %>
"""
def live_modal(component, opts) do
path = Keyword.fetch!(opts, :return_to)
modal_opts = [id: :modal, return_to: path, component: component, opts: opts]
live_component(TranshookWeb.ModalComponent, modal_opts)
end
end
| 30.291667 | 80 | 0.702889 |
9ef9e37f14d08aad7727aa608c46406773170ae5 | 1,000 | ex | Elixir | test/support/conn_case.ex | ohyecloudy/phoenix-prometheus-grafana | e956f27a34c794e85d1e89fc61af79be18a0ce35 | [
"MIT"
] | 1 | 2020-04-23T08:18:08.000Z | 2020-04-23T08:18:08.000Z | test/support/conn_case.ex | ohyecloudy/phoenix_prometheus_grafana | e956f27a34c794e85d1e89fc61af79be18a0ce35 | [
"MIT"
] | null | null | null | test/support/conn_case.ex | ohyecloudy/phoenix_prometheus_grafana | e956f27a34c794e85d1e89fc61af79be18a0ce35 | [
"MIT"
] | null | null | null | defmodule PpgWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use PpgWeb.ConnCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
alias PpgWeb.Router.Helpers, as: Routes
# The default endpoint for testing
@endpoint PpgWeb.Endpoint
end
end
setup _tags do
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 28.571429 | 60 | 0.733 |
9ef9f1462f39b88da3b53539f46536d822be5f30 | 2,895 | ex | Elixir | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2beta1_event_input.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2beta1_event_input.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2beta1_event_input.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1EventInput do
@moduledoc """
Events allow for matching intents by event name instead of the natural language input. For instance, input `` can trigger a personalized welcome response. The parameter `name` may be used by the agent in the response: `"Hello #welcome_event.name! What can I do for you today?"`.
## Attributes
* `languageCode` (*type:* `String.t`, *default:* `nil`) - Required. The language of this query. See [Language Support](https://cloud.google.com/dialogflow/docs/reference/language) for a list of the currently supported language codes. Note that queries in the same session do not necessarily need to specify the same language.
* `name` (*type:* `String.t`, *default:* `nil`) - Required. The unique identifier of the event.
* `parameters` (*type:* `map()`, *default:* `nil`) - The collection of parameters associated with the event. Depending on your protocol or client library language, this is a map, associative array, symbol table, dictionary, or JSON object composed of a collection of (MapKey, MapValue) pairs: - MapKey type: string - MapKey value: parameter name - MapValue type: - If parameter's entity type is a composite entity: map - Else: depending on parameter value type, could be one of string, number, boolean, null, list or map - MapValue value: - If parameter's entity type is a composite entity: map from composite entity property names to property values - Else: parameter value
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:languageCode => String.t() | nil,
:name => String.t() | nil,
:parameters => map() | nil
}
field(:languageCode)
field(:name)
field(:parameters, type: :map)
end
defimpl Poison.Decoder, for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1EventInput do
def decode(value, options) do
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1EventInput.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1EventInput do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 54.622642 | 678 | 0.745423 |
9ef9f83604103a9cb872df0e1574516a6a75de00 | 1,127 | exs | Elixir | couchdb_docs/config/config.exs | sbezugliy/couchdb_elixir | 2fdcb1617918324315a2217b2d1567e9a2c68558 | [
"Apache-2.0"
] | null | null | null | couchdb_docs/config/config.exs | sbezugliy/couchdb_elixir | 2fdcb1617918324315a2217b2d1567e9a2c68558 | [
"Apache-2.0"
] | null | null | null | couchdb_docs/config/config.exs | sbezugliy/couchdb_elixir | 2fdcb1617918324315a2217b2d1567e9a2c68558 | [
"Apache-2.0"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :couchdb_docs, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:couchdb_docs, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.354839 | 73 | 0.753327 |
9efa0d44384b5f5341de3bca19d4b70cb2e9aa6d | 500 | ex | Elixir | lib/receipt_verifier/error.ex | kianmeng/receipt_verifier | 7111c95ba23ab57c4ea78e88f8816a05536c9e50 | [
"MIT"
] | 18 | 2016-09-05T15:04:27.000Z | 2022-02-11T12:27:24.000Z | lib/receipt_verifier/error.ex | W-Games/receipt_verifier | c1230578e0c9b72a5988574d8ff0e82686aa3d94 | [
"MIT"
] | 13 | 2017-08-25T19:58:20.000Z | 2022-01-16T19:20:34.000Z | lib/receipt_verifier/error.ex | W-Games/receipt_verifier | c1230578e0c9b72a5988574d8ff0e82686aa3d94 | [
"MIT"
] | 8 | 2017-10-27T11:31:20.000Z | 2021-04-21T05:12:34.000Z | defmodule ReceiptVerifier.Error do
@moduledoc """
The error sturct
The `code` is status code returns from Apple's Server,
The `message` is the detailed description of the error
## Example
```elixir
%ReceiptVerifier.Error{code: 21002, message: "The data in the receipt-data property was malformed or missing."}
```
"""
@type t :: %__MODULE__{
code: integer,
message: any,
meta: keyword
}
defstruct code: nil, message: "", meta: []
end
| 20.833333 | 113 | 0.636 |
9efa11d64e58224e15acf22ecdb1391f94e16b16 | 4,826 | ex | Elixir | lib/eqrcode/svg.ex | dcdourado/eqrcode | 825590149941f7c98bbf01481da738b70c109092 | [
"MIT"
] | null | null | null | lib/eqrcode/svg.ex | dcdourado/eqrcode | 825590149941f7c98bbf01481da738b70c109092 | [
"MIT"
] | null | null | null | lib/eqrcode/svg.ex | dcdourado/eqrcode | 825590149941f7c98bbf01481da738b70c109092 | [
"MIT"
] | null | null | null | defmodule EQRCode.SVG do
@moduledoc """
Render the QR Code matrix in SVG format
```elixir
qr_code_content
|> EQRCode.encode()
|> EQRCode.svg(color: "#cc6600", shape: "circle", width: 300)
```
You can specify the following attributes of the QR code:
* `background_color`: In hexadecimal format or `:transparent`. The default is `#FFF`
* `color`: In hexadecimal format. The default is `#000`
* `shape`: Only `square` or `circle`. The default is `square`
* `width`: The width of the QR code in pixel. Without the width attribute, the QR code size will be dynamically generated based on the input string.
* `viewbox`: When set to `true`, the SVG element will specify its height and width using `viewBox`, instead of explicit `height` and `width` tags.
Default options are `[color: "#000", shape: "square"]`.
"""
alias EQRCode.Matrix
@doc """
Return the SVG format of the QR Code
"""
@spec svg(Matrix.t(), map() | Keyword.t()) :: String.t()
def svg(%Matrix{matrix: matrix} = m, options \\ []) do
options = options |> Enum.map(& &1)
matrix_size = Matrix.size(m)
svg_options = options |> Map.new() |> set_svg_options(matrix_size)
dimension = matrix_size * svg_options[:module_size]
xml_tag = ~s(<?xml version="1.0" standalone="yes"?>)
viewbox_attr = ~s(viewBox="0 0 #{matrix_size} #{matrix_size}")
dimension_attrs =
if Keyword.get(options, :viewbox, false) do
viewbox_attr
else
~s(width="#{dimension}" height="#{dimension}" #{viewbox_attr})
end
open_tag =
~s(<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:ev="http://www.w3.org/2001/xml-events" #{
dimension_attrs
}
shape-rendering="crispEdges" style="background-color: #{svg_options[:background_color]}">)
close_tag = ~s(</svg>)
result =
Tuple.to_list(matrix)
|> Stream.with_index()
|> Stream.map(fn {row, row_num} ->
Tuple.to_list(row)
|> format_row_as_svg(row_num, svg_options)
end)
|> Enum.to_list()
Enum.join([xml_tag, open_tag, result, close_tag], "\n")
end
defp set_svg_options(options, matrix_size) do
options
|> Map.put_new(:background_color, "#FFF")
|> Map.put_new(:color, "#000")
|> set_module_size(matrix_size)
|> Map.put_new(:shape, "rectangle")
|> Map.put_new(:size, matrix_size)
end
defp set_module_size(%{width: width} = options, matrix_size) when is_integer(width) do
options
|> Map.put_new(:module_size, width / matrix_size)
end
defp set_module_size(%{width: width} = options, matrix_size) when is_binary(width) do
options
|> Map.put_new(:module_size, String.to_integer(width) / matrix_size)
end
defp set_module_size(options, _matrix_size) do
options
|> Map.put_new(:module_size, 11)
end
defp format_row_as_svg(row_matrix, row_num, svg_options) do
row_matrix
|> Stream.with_index()
|> Stream.map(fn {col, col_num} ->
substitute(col, row_num, col_num, svg_options)
end)
|> Enum.to_list()
end
defp substitute(data, row_num, col_num, %{})
when is_nil(data) or data == 0 do
%{}
|> Map.put(:height, 1)
|> Map.put(:style, "fill: transparent;")
|> Map.put(:width, 1)
|> Map.put(:x, col_num)
|> Map.put(:y, row_num)
|> draw_rect
end
# This pattern match ensures that the QR Codes positional markers are drawn
# as rectangles, regardless of the shape
defp substitute(1, row_num, col_num, %{color: color, size: size})
when (row_num <= 8 and col_num <= 8) or
(row_num >= size - 9 and col_num <= 8) or
(row_num <= 8 and col_num >= size - 9) do
%{}
|> Map.put(:height, 1)
|> Map.put(:style, "fill:#{color};")
|> Map.put(:width, 1)
|> Map.put(:x, col_num)
|> Map.put(:y, row_num)
|> draw_rect
end
defp substitute(1, row_num, col_num, %{color: color, shape: "circle"}) do
radius = 0.5
%{}
|> Map.put(:cx, col_num + radius)
|> Map.put(:cy, row_num + radius)
|> Map.put(:r, radius)
|> Map.put(:style, "fill:#{color};")
|> draw_circle
end
defp substitute(1, row_num, col_num, %{color: color}) do
%{}
|> Map.put(:height, 1)
|> Map.put(:style, "fill:#{color};")
|> Map.put(:width, 1)
|> Map.put(:x, col_num)
|> Map.put(:y, row_num)
|> draw_rect
end
defp draw_rect(attribute_map) do
attributes = get_attributes(attribute_map)
~s(<rect #{attributes}/>)
end
defp draw_circle(attribute_map) do
attributes = get_attributes(attribute_map)
~s(<circle #{attributes}/>)
end
defp get_attributes(attribute_map) do
attribute_map
|> Enum.map(fn {key, value} -> ~s(#{key}="#{value}") end)
|> Enum.join(" ")
end
end
| 29.975155 | 153 | 0.62557 |
9efa26f319cc6a677ac19d041a574be55ac399de | 915 | exs | Elixir | test/configuration/config_before_test.exs | MeneDev/espec | ec4b3d579c5192999e930224a8a2650bb1fdf0bc | [
"Apache-2.0"
] | 807 | 2015-03-25T14:00:19.000Z | 2022-03-24T08:08:15.000Z | test/configuration/config_before_test.exs | MeneDev/espec | ec4b3d579c5192999e930224a8a2650bb1fdf0bc | [
"Apache-2.0"
] | 254 | 2015-03-27T10:12:25.000Z | 2021-07-12T01:40:15.000Z | test/configuration/config_before_test.exs | MeneDev/espec | ec4b3d579c5192999e930224a8a2650bb1fdf0bc | [
"Apache-2.0"
] | 85 | 2015-04-02T10:25:19.000Z | 2021-01-30T21:30:43.000Z | defmodule ConfigBeforeTest do
use ExUnit.Case, async: true
ESpec.configure(fn c ->
c.before(fn tags ->
{:shared, %{answer: 42, tags: tags}}
end)
end)
test "set before in config" do
assert(is_function(ESpec.Configuration.get(:before)))
end
defmodule SomeSpec do
use ESpec
it do: "answer is #{shared[:answer]}"
context "with before and let" do
before do: {:ok, answer: shared[:answer] + 1}
let :answer, do: shared[:answer] + 1
it do: "answer is #{answer()}"
end
end
setup_all do
{:ok, ex1: Enum.at(SomeSpec.examples(), 0), ex2: Enum.at(SomeSpec.examples(), 1)}
end
test "run ex1", context do
example = ESpec.ExampleRunner.run(context[:ex1])
assert(example.result == "answer is 42")
end
test "run ex2", context do
example = ESpec.ExampleRunner.run(context[:ex2])
assert(example.result == "answer is 44")
end
end
| 22.875 | 85 | 0.632787 |
9efa353cb2e94352bbc78410806696be38812afb | 1,406 | exs | Elixir | config/config.exs | smpallen99/elixir_survey_tutorial | 24aa40d73cd3254d510e484693c7249ebcf93605 | [
"MIT"
] | 48 | 2015-10-02T19:16:02.000Z | 2022-01-18T20:50:00.000Z | config/config.exs | smpallen99/elixir_survey_tutorial | 24aa40d73cd3254d510e484693c7249ebcf93605 | [
"MIT"
] | 1 | 2017-01-29T21:46:11.000Z | 2017-05-09T02:47:01.000Z | config/config.exs | smpallen99/elixir_survey_tutorial | 24aa40d73cd3254d510e484693c7249ebcf93605 | [
"MIT"
] | 8 | 2015-10-02T19:18:37.000Z | 2019-09-11T06:25:28.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# Configures the endpoint
config :survey, Survey.Endpoint,
url: [host: "localhost"],
root: Path.dirname(__DIR__),
secret_key_base: "KidRAQbxAff4inlAjVzvwWgeJRadqLJ45dWVMumvyuef4/bVW+pLPFE4sf+ML/Tt",
render_errors: [accepts: ~w(html json)],
pubsub: [name: Survey.PubSub,
adapter: Phoenix.PubSub.PG2]
config :survey,
ecto_repos: [Survey.Repo]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
config :ex_admin,
repo: Survey.Repo,
module: Survey,
modules: [
Survey.ExAdmin.Dashboard,
Survey.ExAdmin.Survey,
Survey.ExAdmin.Question,
Survey.ExAdmin.Choice,
Survey.ExAdmin.Seating,
]
# Configure phoenix generators
config :phoenix, :generators,
migration: true,
binary_id: false
config :phoenix, :template_engines,
haml: PhoenixHaml.Engine,
eex: Phoenix.Template.EExEngine
config :xain, :after_callback, {Phoenix.HTML, :raw}
config :speak_ex, :renderer, :swift
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env}.exs"
| 27.038462 | 86 | 0.738265 |
9efa381c39d95a408bf9d6afbee2d1b59c4bc72a | 1,418 | ex | Elixir | test/support/data_case.ex | CMcDonald82/phoenix-starter | 757e91326f7c82813dfb22a6eaa24147689b6dc8 | [
"MIT"
] | 1 | 2018-05-29T11:50:17.000Z | 2018-05-29T11:50:17.000Z | test/support/data_case.ex | CMcDonald82/phoenix-starter | 757e91326f7c82813dfb22a6eaa24147689b6dc8 | [
"MIT"
] | null | null | null | test/support/data_case.ex | CMcDonald82/phoenix-starter | 757e91326f7c82813dfb22a6eaa24147689b6dc8 | [
"MIT"
] | null | null | null | defmodule PhoenixStarter.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias PhoenixStarter.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import PhoenixStarter.DataCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(PhoenixStarter.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(PhoenixStarter.Repo, {:shared, self()})
end
:ok
end
@doc """
A helper that transform changeset errors to a map of messages.
assert {:error, changeset} = Accounts.create_user(%{password: "short"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Enum.reduce(opts, message, fn {key, value}, acc ->
String.replace(acc, "%{#{key}}", to_string(value))
end)
end)
end
end
| 26.259259 | 77 | 0.687588 |
9efa3b40661579959df8ccab8f8feb1c4e669ff1 | 4,118 | ex | Elixir | lib/ueberauth/strategy/google.ex | andykent/ueberauth_google | b696c6d188790c5f6a848de828963bd3ff57d508 | [
"MIT"
] | null | null | null | lib/ueberauth/strategy/google.ex | andykent/ueberauth_google | b696c6d188790c5f6a848de828963bd3ff57d508 | [
"MIT"
] | null | null | null | lib/ueberauth/strategy/google.ex | andykent/ueberauth_google | b696c6d188790c5f6a848de828963bd3ff57d508 | [
"MIT"
] | null | null | null | defmodule Ueberauth.Strategy.Google do
@moduledoc """
Google Strategy for Überauth.
"""
use Ueberauth.Strategy, uid_field: :sub, default_scope: "email", hd: nil
alias Ueberauth.Auth.Info
alias Ueberauth.Auth.Credentials
alias Ueberauth.Auth.Extra
@doc """
Handles initial request for Google authentication.
"""
def handle_request!(conn) do
scopes = conn.params["scope"] || option(conn, :default_scope)
params =
[scope: scopes]
|> with_optional(:hd, conn)
|> with_optional(:prompt, conn)
|> with_optional(:access_type, conn)
|> with_param(:access_type, conn)
|> with_param(:prompt, conn)
|> with_param(:state, conn)
opts = [redirect_uri: callback_url(conn)]
redirect!(conn, Ueberauth.Strategy.Google.OAuth.authorize_url!(params, opts))
end
@doc """
Handles the callback from Google.
"""
def handle_callback!(%Plug.Conn{params: %{"code" => code}} = conn) do
params = [code: code]
opts = [redirect_uri: callback_url(conn)]
case Ueberauth.Strategy.Google.OAuth.get_access_token(params, opts) do
{:ok, token} ->
fetch_user(conn, token)
{:error, {error_code, error_description}} ->
set_errors!(conn, [error(error_code, error_description)])
end
end
@doc false
def handle_callback!(conn) do
set_errors!(conn, [error("missing_code", "No code received")])
end
@doc false
def handle_cleanup!(conn) do
conn
|> put_private(:google_user, nil)
|> put_private(:google_token, nil)
end
@doc """
Fetches the uid field from the response.
"""
def uid(conn) do
uid_field =
conn
|> option(:uid_field)
|> to_string
conn.private.google_user[uid_field]
end
@doc """
Includes the credentials from the google response.
"""
def credentials(conn) do
token = conn.private.google_token
scope_string = (token.other_params["scope"] || "")
scopes = String.split(scope_string, ",")
%Credentials{
expires: !!token.expires_at,
expires_at: token.expires_at,
scopes: scopes,
token_type: Map.get(token, :token_type),
refresh_token: token.refresh_token,
token: token.access_token
}
end
@doc """
Fetches the fields to populate the info section of the `Ueberauth.Auth` struct.
"""
def info(conn) do
user = conn.private.google_user
%Info{
email: user["email"],
first_name: user["given_name"],
image: user["picture"],
last_name: user["family_name"],
name: user["name"],
urls: %{
profile: user["profile"],
website: user["hd"]
}
}
end
@doc """
Stores the raw information (including the token) obtained from the google callback.
"""
def extra(conn) do
%Extra{
raw_info: %{
token: conn.private.google_token,
user: conn.private.google_user
}
}
end
defp fetch_user(conn, token) do
conn = put_private(conn, :google_token, token)
# userinfo_endpoint from https://accounts.google.com/.well-known/openid-configuration
path = "https://www.googleapis.com/oauth2/v3/userinfo"
resp = Ueberauth.Strategy.Google.OAuth.get(token, path)
case resp do
{:ok, %OAuth2.Response{status_code: 401, body: _body}} ->
set_errors!(conn, [error("token", "unauthorized")])
{:ok, %OAuth2.Response{status_code: status_code, body: user}} when status_code in 200..399 ->
put_private(conn, :google_user, user)
{:error, %OAuth2.Response{status_code: status_code}} ->
set_errors!(conn, [error("OAuth2", status_code)])
{:error, %OAuth2.Error{reason: reason}} ->
set_errors!(conn, [error("OAuth2", reason)])
end
end
defp with_param(opts, key, conn) do
if value = conn.params[to_string(key)], do: Keyword.put(opts, key, value), else: opts
end
defp with_optional(opts, key, conn) do
if option(conn, key), do: Keyword.put(opts, key, option(conn, key)), else: opts
end
defp option(conn, key) do
Keyword.get(options(conn), key, Keyword.get(default_options(), key))
end
end
| 27.271523 | 99 | 0.643031 |
9efa464d9a2e3a4a8da2e76fbc4ceb2d315cbdad | 5,895 | exs | Elixir | test/oli_web/live/system_message_live_test.exs | candert1/oli-torus | b7408f7d7c04cc3e9cf537873d98c3a586ec3a66 | [
"MIT"
] | null | null | null | test/oli_web/live/system_message_live_test.exs | candert1/oli-torus | b7408f7d7c04cc3e9cf537873d98c3a586ec3a66 | [
"MIT"
] | null | null | null | test/oli_web/live/system_message_live_test.exs | candert1/oli-torus | b7408f7d7c04cc3e9cf537873d98c3a586ec3a66 | [
"MIT"
] | null | null | null | defmodule OliWeb.SystemMessageLiveTest do
use ExUnit.Case
use OliWeb.ConnCase
import Phoenix.LiveViewTest
import Oli.Factory
alias Oli.Notifications
alias Oli.Notifications.SystemMessage
@live_view_index_route Routes.live_path(OliWeb.Endpoint, OliWeb.SystemMessageLive.IndexView)
describe "user cannot access when is not logged in" do
test "redirects to new session when accessing the index view", %{conn: conn} do
{:error,
{:redirect, %{to: "/authoring/session/new?request_path=%2Fadmin%2Fsystem_messages"}}} =
live(conn, @live_view_index_route)
end
end
describe "user cannot access when is logged in as an author but is not a system admin" do
setup [:author_conn]
test "returns forbidden when accessing the index view", %{conn: conn} do
conn = get(conn, @live_view_index_route)
assert response(conn, 403)
end
end
describe "index" do
setup [:admin_conn, :set_timezone]
test "loads correctly when there are no system messages", %{conn: conn} do
{:ok, view, _html} = live(conn, @live_view_index_route)
refute has_element?(view, "#system_message_active")
assert render(view) =~ "Create"
end
test "lists all existing system messages", %{conn: conn} do
system_message = insert(:system_message)
{:ok, view, _html} = live(conn, @live_view_index_route)
assert view
|> element("form[phx-submit=\"save\"] textarea[id=\"system_message_message\"]")
|> render() =~ system_message.message
end
test "creates new system message when data is valid", %{conn: conn} do
{:ok, view, _html} = live(conn, @live_view_index_route)
params = params_for(:system_message)
view
|> element("form[phx-submit=\"create\"")
|> render_submit(%{
system_message: params
})
assert view
|> element("div.alert.alert-info")
|> render() =~
"System message successfully created."
[%SystemMessage{message: message} | _tail] = Notifications.list_system_messages()
assert ^message = params.message
end
test "displays error message when data is invalid", %{conn: conn} do
{:ok, view, _html} = live(conn, @live_view_index_route)
view
|> element("form[phx-submit=\"create\"")
|> render_submit(%{system_message: %{message: ""}})
assert view
|> element("div.alert.alert-danger")
|> render() =~
"System message couldn't be created: message can't be blank."
assert [] = Notifications.list_system_messages()
end
test "updates system message correctly when data is valid", %{
conn: conn
} do
system_message = insert(:system_message)
{:ok, view, _html} = live(conn, @live_view_index_route)
new_attributes = params_for(:system_message)
view
|> element("form[phx-submit=\"save\"")
|> render_submit(%{system_message: new_attributes})
assert view
|> element("div.alert.alert-info")
|> render() =~
"System message successfully updated."
%SystemMessage{message: new_message} = Notifications.get_system_message(system_message.id)
assert new_attributes.message == new_message
end
test "displays confirmation modal when updating a message status", %{
conn: conn
} do
insert(:active_system_message)
{:ok, view, _html} = live(conn, @live_view_index_route)
new_attributes = params_for(:system_message, active: false)
view
|> element("form[phx-submit=\"save\"")
|> render_submit(%{system_message: new_attributes})
assert has_element?(view, "#dialog")
assert view
|> element("#dialog")
|> render() =~
"Are you sure that you wish to <b>hide</b> this message to all users in the system?"
end
test "deletes the system message successfully", %{
conn: conn
} do
%SystemMessage{id: id} = insert(:system_message)
{:ok, view, _html} = live(conn, @live_view_index_route)
view
|> element("button[phx-click=\"delete\"]")
|> render_click()
assert view
|> element("div.alert.alert-info")
|> render() =~
"System message successfully deleted."
assert nil == Notifications.get_system_message(id)
end
end
describe "show" do
setup [:create_active_system_message]
test "displays system message when user is not logged in", %{
conn: conn,
system_message: system_message
} do
conn = get(conn, Routes.static_page_path(conn, :index))
assert html_response(conn, 200) =~ system_message.message
end
test "displays system message when user is logged in as an author", context do
{:ok, conn: conn, author: _} = author_conn(context)
conn = get(conn, Routes.static_page_path(conn, :index))
assert html_response(conn, 200) =~ context.system_message.message
end
test "displays system message when user is logged in as an instructor", context do
{:ok, conn: conn, user: _} = user_conn(context)
conn = get(conn, Routes.static_page_path(conn, :index))
assert html_response(conn, 200) =~ context.system_message.message
end
test "displays more than one system message if exist", %{
conn: conn,
system_message: system_message
} do
other_system_message = insert(:active_system_message)
conn = get(conn, Routes.static_page_path(conn, :index))
assert html_response(conn, 200) =~ system_message.message
assert html_response(conn, 200) =~ other_system_message.message
end
end
def create_active_system_message(_context) do
system_message = insert(:active_system_message)
{:ok, system_message: system_message}
end
end
| 30.544041 | 99 | 0.64665 |
9efa794eb2616555e2fa74bae3429d9a9bf1186b | 230 | ex | Elixir | lib/battle_city/tank/level3.ex | clszzyh/battle_city_core | 7a3aca9000b186382f1faf526ebce837a76ef86e | [
"MIT"
] | null | null | null | lib/battle_city/tank/level3.ex | clszzyh/battle_city_core | 7a3aca9000b186382f1faf526ebce837a76ef86e | [
"MIT"
] | 38 | 2020-12-17T12:40:44.000Z | 2022-03-31T02:05:59.000Z | lib/battle_city/tank/level3.ex | clszzyh/battle_city | 7a3aca9000b186382f1faf526ebce837a76ef86e | [
"MIT"
] | null | null | null | defmodule BattleCity.Tank.Level3 do
@moduledoc false
use BattleCity.Tank.Base,
points: 4000,
health: 1,
move_speed: 2,
bullet_speed: 2,
level: 3
@impl true
def handle_level_up(_), do: Tank.Level4
end
| 16.428571 | 41 | 0.673913 |
9efa944de498a5a104a9e4cdc3babfeca2e0fe7a | 14,004 | exs | Elixir | apps/neo_node/test/neo_node/neo_node_test.exs | decentralisedkev/neo-scan | c8a35a0952e8c46d40365e0ac76bce361ac5e558 | [
"MIT"
] | null | null | null | apps/neo_node/test/neo_node/neo_node_test.exs | decentralisedkev/neo-scan | c8a35a0952e8c46d40365e0ac76bce361ac5e558 | [
"MIT"
] | null | null | null | apps/neo_node/test/neo_node/neo_node_test.exs | decentralisedkev/neo-scan | c8a35a0952e8c46d40365e0ac76bce361ac5e558 | [
"MIT"
] | null | null | null | defmodule NeoNodeTest do
use ExUnit.Case
@fake_node_url "http://fakenode"
test "post/3" do
assert {:ok, _} = NeoNode.post(@fake_node_url, "getblock", [0, 1])
assert {:error, _} = NeoNode.post(@fake_node_url, "getblockerror", [0, 1])
end
test "get_block_by_height/2" do
assert {
:ok,
%{
confirmations: 2_326_310,
hash:
<<212, 37, 97, 227, 211, 14, 21, 190, 100, 0, 182, 223, 47, 50, 142, 2, 210, 191,
99, 84, 196, 29, 206, 67, 59, 197, 118, 135, 200, 33, 68, 191>>,
index: 0,
merkle_root:
<<128, 63, 244, 171, 227, 234, 101, 51, 188, 192, 190, 87, 78, 250, 2, 248, 58,
232, 253, 198, 81, 200, 121, 5, 107, 13, 155, 227, 54, 192, 27, 244>>,
next_block_hash:
<<215, 130, 219, 138, 56, 176, 238, 160, 215, 57, 78, 15, 0, 124, 97, 199, 23,
152, 134, 117, 120, 199, 124, 56, 124, 8, 17, 57, 3, 148, 108, 201>>,
next_consensus:
<<23, 89, 231, 93, 101, 43, 93, 56, 39, 191, 4, 193, 101, 187, 233, 239, 149,
204, 164, 191, 85, 165, 95, 119, 20>>,
nonce: <<0, 0, 0, 0, 124, 43, 172, 29>>,
previous_block_hash:
<<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0>>,
script: %{
"invocation" => "",
"verification" => "51"
},
size: 401,
time: DateTime.from_unix!(1_468_595_301),
tx: [
%{
asset: nil,
attributes: [],
block_hash:
<<212, 37, 97, 227, 211, 14, 21, 190, 100, 0, 182, 223, 47, 50, 142, 2, 210,
191, 99, 84, 196, 29, 206, 67, 59, 197, 118, 135, 200, 33, 68, 191>>,
block_time: DateTime.from_unix!(1_468_595_301),
claims: [],
hash:
<<251, 91, 215, 43, 45, 103, 146, 215, 93, 194, 241, 8, 79, 250, 158, 159,
112, 202, 133, 84, 60, 113, 122, 107, 19, 217, 149, 155, 69, 42, 87, 214>>,
net_fee: Decimal.new(0),
nonce: 2_083_236_893,
scripts: [],
size: 10,
sys_fee: Decimal.new(0),
type: :miner_transaction,
version: 0,
vins: [],
vouts: []
},
%{
asset: %{
admin:
<<23, 218, 23, 69, 233, 181, 73, 189, 11, 250, 26, 86, 153, 113, 199, 126,
186, 48, 205, 90, 75, 177, 213, 88, 117>>,
amount: Decimal.new(100_000_000),
available: nil,
expiration: nil,
frozen: nil,
issuer:
<<23, 218, 23, 69, 233, 181, 73, 189, 11, 250, 26, 86, 153, 113, 199, 126,
186, 48, 205, 90, 75, 177, 213, 88, 117>>,
name: [
%{"lang" => "zh-CN", "name" => "小蚁股"},
%{"lang" => "en", "name" => "AntShare"}
],
owner: "00",
precision: 0,
transaction_hash:
<<197, 111, 51, 252, 110, 207, 205, 12, 34, 92, 74, 179, 86, 254, 229, 147,
144, 175, 133, 96, 190, 14, 147, 15, 174, 190, 116, 166, 218, 255, 124,
155>>,
type: :governing_token,
version: nil
},
attributes: [],
block_hash:
<<212, 37, 97, 227, 211, 14, 21, 190, 100, 0, 182, 223, 47, 50, 142, 2, 210,
191, 99, 84, 196, 29, 206, 67, 59, 197, 118, 135, 200, 33, 68, 191>>,
block_time: DateTime.from_unix!(1_468_595_301),
claims: [],
hash:
<<197, 111, 51, 252, 110, 207, 205, 12, 34, 92, 74, 179, 86, 254, 229, 147,
144, 175, 133, 96, 190, 14, 147, 15, 174, 190, 116, 166, 218, 255, 124,
155>>,
net_fee: Decimal.new(0),
nonce: nil,
scripts: [],
size: 107,
sys_fee: Decimal.new(0),
type: :register_transaction,
version: 0,
vins: [],
vouts: []
},
%{
asset: %{
admin:
<<23, 159, 127, 208, 150, 211, 126, 210, 192, 227, 247, 240, 207, 201, 36,
190, 239, 79, 252, 235, 104, 117, 247, 96, 242>>,
amount: Decimal.new(100_000_000),
available: nil,
expiration: nil,
frozen: nil,
issuer:
<<23, 159, 127, 208, 150, 211, 126, 210, 192, 227, 247, 240, 207, 201, 36,
190, 239, 79, 252, 235, 104, 117, 247, 96, 242>>,
name: [
%{"lang" => "zh-CN", "name" => "小蚁币"},
%{"lang" => "en", "name" => "AntCoin"}
],
owner: "00",
precision: 8,
transaction_hash:
<<96, 44, 121, 113, 139, 22, 228, 66, 222, 88, 119, 142, 20, 141, 11, 16,
132, 227, 178, 223, 253, 93, 230, 183, 177, 108, 238, 121, 105, 40, 45,
231>>,
type: :utility_token,
version: nil
},
attributes: [],
block_hash:
<<212, 37, 97, 227, 211, 14, 21, 190, 100, 0, 182, 223, 47, 50, 142, 2, 210,
191, 99, 84, 196, 29, 206, 67, 59, 197, 118, 135, 200, 33, 68, 191>>,
block_time: DateTime.from_unix!(1_468_595_301),
claims: [],
hash:
<<96, 44, 121, 113, 139, 22, 228, 66, 222, 88, 119, 142, 20, 141, 11, 16,
132, 227, 178, 223, 253, 93, 230, 183, 177, 108, 238, 121, 105, 40, 45,
231>>,
net_fee: Decimal.new(0),
nonce: nil,
scripts: [],
size: 106,
sys_fee: Decimal.new(0),
type: :register_transaction,
version: 0,
vins: [],
vouts: []
},
%{
asset: nil,
attributes: [],
block_hash:
<<212, 37, 97, 227, 211, 14, 21, 190, 100, 0, 182, 223, 47, 50, 142, 2, 210,
191, 99, 84, 196, 29, 206, 67, 59, 197, 118, 135, 200, 33, 68, 191>>,
block_time: DateTime.from_unix!(1_468_595_301),
claims: [],
hash:
<<54, 49, 246, 96, 36, 202, 111, 91, 3, 61, 126, 8, 9, 235, 153, 52, 67, 55,
72, 48, 2, 90, 249, 4, 251, 81, 176, 51, 79, 18, 124, 218>>,
net_fee: Decimal.new(0),
nonce: nil,
scripts: [%{"invocation" => "", "verification" => "51"}],
size: 69,
sys_fee: Decimal.new(0),
type: :issue_transaction,
version: 0,
vins: [],
vouts: [
%{
address:
<<23, 95, 169, 157, 147, 48, 55, 117, 254, 80, 202, 17, 156, 50, 119, 89,
49, 62, 204, 250, 28, 241, 166, 178, 163>>,
asset:
<<197, 111, 51, 252, 110, 207, 205, 12, 34, 92, 74, 179, 86, 254, 229,
147, 144, 175, 133, 96, 190, 14, 147, 15, 174, 190, 116, 166, 218, 255,
124, 155>>,
n: 0,
value: Decimal.new(100_000_000)
}
]
}
],
version: 0
}
} == NeoNode.get_block_by_height(@fake_node_url, 0)
assert {:ok, %{index: 2_120_069}} = NeoNode.get_block_by_height(@fake_node_url, 2_120_069)
assert {:error, "error"} == NeoNode.get_block_by_height(@fake_node_url, 123_456)
assert {:error, ":timeout #{@fake_node_url}"} ==
NeoNode.get_block_by_height(@fake_node_url, 123_457)
end
test "get_block_by_hash/2" do
block_0_hash = "d42561e3d30e15be6400b6df2f328e02d2bf6354c41dce433bc57687c82144bf"
block_1_hash = "d782db8a38b0eea0d7394e0f007c61c71798867578c77c387c08113903946cc9"
assert {:ok, %{index: 0}} = NeoNode.get_block_by_hash(@fake_node_url, block_0_hash)
assert {:ok, %{index: 1}} = NeoNode.get_block_by_hash(@fake_node_url, block_1_hash)
assert {:error, %{"code" => -100, "message" => _message}} =
NeoNode.get_block_by_hash(
@fake_node_url,
"0000000000000000000000000000000000000000000000000000000000000000"
)
end
test "get_block_count/1" do
{:ok, count} = NeoNode.get_block_count(@fake_node_url)
assert 2_400_000 == count
end
test "get_transaction/2" do
txid = "0x9e9526615ee7d460ed445c873c4af91bf7bfcc67e6e43feaf051b962a6df0a98"
assert {
:ok,
%{
asset: nil,
attributes: [],
block_hash:
<<213, 0, 206, 91, 206, 23, 53, 30, 158, 90, 36, 131, 52, 153, 207, 128, 242,
217, 136, 130, 121, 60, 19, 183, 187, 242, 33, 255, 50, 164, 44, 205>>,
block_time: DateTime.from_unix!(1_476_647_836),
hash:
<<158, 149, 38, 97, 94, 231, 212, 96, 237, 68, 92, 135, 60, 74, 249, 27, 247,
191, 204, 103, 230, 228, 63, 234, 240, 81, 185, 98, 166, 223, 10, 152>>,
net_fee: Decimal.new(0),
nonce: 3_576_443_283,
scripts: [],
size: 10,
sys_fee: Decimal.new(0),
type: :miner_transaction,
version: 0,
vins: [],
vouts: [],
claims: []
}
} == NeoNode.get_transaction(@fake_node_url, txid)
assert {:ok, %{scripts: [%{}, %{"contract" => _}]}} =
NeoNode.get_transaction(
@fake_node_url,
"fd161ccd87deab812daa433cbc0f8f6468de24f1d708187beef5ab9ada7050f3"
)
assert {:ok, %{scripts: [%{}, %{}, %{"script" => _}]}} =
NeoNode.get_transaction(
@fake_node_url,
"45ced268026de0fcaf7035e4960e860b98fe1ae5122e716d9daac1163f13e534"
)
assert {:error, %{"code" => -100, "message" => _message}} =
NeoNode.get_transaction(
@fake_node_url,
"0000000000000000000000000000000000000000000000000000000000000000"
)
end
test "get_asset/2" do
txid = "0xc56f33fc6ecfcd0c225c4ab356fee59390af8560be0e930faebe74a6daff7c9b"
assert {
:ok,
%{
admin:
<<23, 218, 23, 69, 233, 181, 73, 189, 11, 250, 26, 86, 153, 113, 199, 126, 186,
48, 205, 90, 75, 177, 213, 88, 117>>,
amount: Decimal.new(100_000_000),
available: Decimal.new(100_000_000),
expiration: 4_000_000,
frozen: false,
issuer:
<<23, 218, 23, 69, 233, 181, 73, 189, 11, 250, 26, 86, 153, 113, 199, 126, 186,
48, 205, 90, 75, 177, 213, 88, 117>>,
name: [
%{"lang" => "zh-CN", "name" => "\\u5C0F\\u8681\\u80A1"},
%{"lang" => "en", "name" => "AntShare"}
],
owner: "00",
precision: 0,
transaction_hash:
<<197, 111, 51, 252, 110, 207, 205, 12, 34, 92, 74, 179, 86, 254, 229, 147, 144,
175, 133, 96, 190, 14, 147, 15, 174, 190, 116, 166, 218, 255, 124, 155>>,
type: :governing_token,
version: 0
}
} == NeoNode.get_asset(@fake_node_url, txid)
assert {:error, %{"code" => -100, "message" => _message}} =
NeoNode.get_asset(
@fake_node_url,
"0x0000000000000000000000000000000000000000000000000000000000000000"
)
end
test "get_contract/2" do
contract_hash = "0xecc6b20d3ccac1ee9ef109af5a7cdb85706b1df9"
assert {
:ok,
%{
author: "Red Pulse",
code_version: "1.0",
email: "[email protected]",
hash:
<<236, 198, 178, 13, 60, 202, 193, 238, 158, 241, 9, 175, 90, 124, 219, 133, 112,
107, 29, 249>>,
name: "RPX Sale",
parameters: ["String", "Array"],
properties: %{
"dynamic_invoke" => false,
"storage" => true
},
return_type: "ByteArray",
script: <<1, 31>>,
version: 0
}
} = NeoNode.get_contract(@fake_node_url, contract_hash)
assert {:error, %{"code" => -100, "message" => _message}} =
NeoNode.get_contract(@fake_node_url, "0x0000000000000000000000000000000000000000")
end
end
| 42.825688 | 98 | 0.417738 |
9efa9a3aa68e49df044ace06c6f96594ea357c36 | 3,309 | exs | Elixir | lib/ex_unit/test/ex_unit/doc_test_test.exs | bruce/elixir | d77ccf941541959079e5f677f8717da24b486fac | [
"Apache-2.0"
] | 1 | 2017-09-09T20:59:04.000Z | 2017-09-09T20:59:04.000Z | lib/ex_unit/test/ex_unit/doc_test_test.exs | bruce/elixir | d77ccf941541959079e5f677f8717da24b486fac | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/test/ex_unit/doc_test_test.exs | bruce/elixir | d77ccf941541959079e5f677f8717da24b486fac | [
"Apache-2.0"
] | null | null | null | Code.require_file "../test_helper.exs", __DIR__
defmodule ExUnit.DocTestTest.GoodModule do
@doc """
iex> test_fun
1
iex> test_fun + 1
2
"""
def test_fun, do: 1
@doc """
iex> a = 1
iex> b = a + 2
3
iex> a + b
4
"""
def single_context, do: :ok
@doc """
iex> 1 + (fn() -> "" end).()
** (ArithmeticError) bad argument in arithmetic expression
iex> 2 + (fn() -> :a end).()
** (ArithmeticError) bad argument in arithmetic expression
"""
def two_exceptions, do: :ok
@doc """
iex> 1 + (fn() -> :a end).()
** (ArithmeticError) bad argument in arithmetic expression
"""
def exception_test, do: :ok
end
defmodule ExUnit.DocTestTest.ExceptionModule do
@doc """
iex> 1 + ""
** (ArithmeticError) bad argument in arithmetic expression
iex> 2 + ""
** (ArithmeticError) bad argument in arithmetic expression
"""
def two_exceptions, do: :ok
end
defmodule ExUnit.DocTestTest.SomewhatGoodModuleWithOnly do
@doc """
iex> test_fun
1
iex> test_fun + 1
2
"""
def test_fun, do: 1
@doc """
iex> test_fun
1
iex> test_fun + 1
1
"""
def test_fun1, do: 1
end
defmodule ExUnit.DocTestTest.SomewhatGoodModuleWithExcept do
@doc """
iex> test_fun
1
iex> test_fun + 1
2
"""
def test_fun, do: 1
@doc """
iex> test_fun
1
iex> test_fun + 1
1
"""
def test_fun1, do: 1
end
defmodule ExUnit.DocTestTest.NoImport do
@doc """
iex> ExUnit.DocTestTest.NoImport.min(1, 2)
2
"""
def min(a, b), do: max(a, b)
end
defmodule ExUnit.DocTestTest.Invalid do
@doc """
iex> _a = 1
1
iex> _a + 1
2
"""
def no_leak, do: :ok
end
defmodule ExUnit.DocTestTest.IndentationHeredocs do
@doc %S'''
Receives a test and formats its failure.
## Examples
iex> " 1\n 2\n"
"""
1
2
"""
'''
def heredocs, do: :ok
end
defmodule ExUnit.DocTestTest.IndentationMismatchedPrompt do
@doc %S'''
iex> foo = 1
iex> bar = 2
iex> foo + bar
3
'''
def mismatched, do: :ok
end
defmodule ExUnit.DocTestTest.IndentationTooMuch do
@doc %S'''
iex> 1 + 2
3
'''
def too_much, do: :ok
end
defmodule ExUnit.DocTestTest.IndentationNotEnough do
@doc %S'''
iex> 1 + 2
3
'''
def not_enough, do: :ok
end
defmodule ExUnit.DocTestTest do
use ExUnit.Case
# This is intentional. The doctests in DocTest's docs fail
# for demonstration purposes.
# doctest ExUnit.DocTest
doctest ExUnit.DocTestTest.GoodModule, import: true
doctest ExUnit.DocTestTest.SomewhatGoodModuleWithOnly, only: [test_fun: 0], import: true
doctest ExUnit.DocTestTest.SomewhatGoodModuleWithExcept, except: [test_fun1: 0], import: true
doctest ExUnit.DocTestTest.NoImport
assert_raise ExUnit.DocTest.Error, fn ->
doctest ExUnit.DocTestTest.ExceptionModule
end
test :no_var_leak do
assert_raise CompileError, %r"function '_a'/0 undefined", fn ->
defmodule NeverCompiled do
import ExUnit.DocTest
doctest ExUnit.DocTestTest.Invalid
end
end
end
doctest ExUnit.DocTestTest.IndentationHeredocs
assert_raise ExUnit.DocTest.Error, fn ->
doctest ExUnit.DocTestTest.IndentationMismatchedPrompt
end
assert_raise ExUnit.DocTest.Error, fn ->
doctest ExUnit.DocTestTest.IndentationTooMuch
end
end
| 18.383333 | 95 | 0.654276 |
9efaa64fb79e604a043bb4ca684311aa64f89451 | 2,016 | ex | Elixir | clients/dataflow/lib/google_api/dataflow/v1b3/model/job_metrics.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/dataflow/lib/google_api/dataflow/v1b3/model/job_metrics.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/dataflow/lib/google_api/dataflow/v1b3/model/job_metrics.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Dataflow.V1b3.Model.JobMetrics do
@moduledoc """
JobMetrics contains a collection of metrics descibing the detailed progress of a Dataflow job. Metrics correspond to user-defined and system-defined metrics in the job. This resource captures only the most recent values of each metric; time-series data can be queried for them (under the same metric names) from Cloud Monitoring.
## Attributes
- metricTime (DateTime.t): Timestamp as of which metric values are current. Defaults to: `null`.
- metrics ([MetricUpdate]): All metrics for this job. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:metricTime => DateTime.t(),
:metrics => list(GoogleApi.Dataflow.V1b3.Model.MetricUpdate.t())
}
field(:metricTime, as: DateTime)
field(:metrics, as: GoogleApi.Dataflow.V1b3.Model.MetricUpdate, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Dataflow.V1b3.Model.JobMetrics do
def decode(value, options) do
GoogleApi.Dataflow.V1b3.Model.JobMetrics.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Dataflow.V1b3.Model.JobMetrics do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.529412 | 332 | 0.75 |
9efab5e913d9de800b0edbde169a0244b1b85641 | 1,003 | exs | Elixir | test/http/client_test.exs | LucidModules/ueberauth_line | 686e41ddeef83ecaf55e21ca0bf0337645deb9c1 | [
"MIT"
] | null | null | null | test/http/client_test.exs | LucidModules/ueberauth_line | 686e41ddeef83ecaf55e21ca0bf0337645deb9c1 | [
"MIT"
] | null | null | null | test/http/client_test.exs | LucidModules/ueberauth_line | 686e41ddeef83ecaf55e21ca0bf0337645deb9c1 | [
"MIT"
] | null | null | null | defmodule Http.ClientTest do
use ExUnit.Case, async: true
import Mock
alias Http.Client
alias Http.Config
alias Http.ClientMock
defp config_get_client!() do
ClientMock
end
describe "given client with configured client" do
setup_with_mocks([
{Config, [:passthrough],
[
get_client!: &config_get_client!/0
]}
]) do
Code.load_file("test/http/client_mock.exs")
:ok
end
test "it executes GET request" do
headers = %{foo: :bar}
url = "some/url"
assert %{
mocked: {
headers,
url
}
} = Client.get(headers, url)
end
test "it executes POST request" do
body = %{param: 42}
headers = %{foo: :bar}
url = "some/url"
assert %{
mocked: {
body,
headers,
url
}
} = Client.post(body, headers, url)
end
end
end
| 18.924528 | 51 | 0.497507 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.