hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7389815a7e54d559f1b51c0f09eb81809ca04271 | 958 | ex | Elixir | lib/docusign/model/new_account_summary.ex | gaslight/docusign_elixir | d9d88d53dd85d32a39d537bade9db28d779414e6 | [
"MIT"
] | 4 | 2020-12-21T12:50:13.000Z | 2022-01-12T16:50:43.000Z | lib/docusign/model/new_account_summary.ex | gaslight/docusign_elixir | d9d88d53dd85d32a39d537bade9db28d779414e6 | [
"MIT"
] | 12 | 2018-09-18T15:26:34.000Z | 2019-09-28T15:29:39.000Z | lib/docusign/model/new_account_summary.ex | gaslight/docusign_elixir | d9d88d53dd85d32a39d537bade9db28d779414e6 | [
"MIT"
] | 15 | 2020-04-29T21:50:16.000Z | 2022-02-11T18:01:51.000Z | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule DocuSign.Model.NewAccountSummary do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:accountId,
:accountIdGuid,
:accountName,
:apiPassword,
:baseUrl,
:billingPlanPreview,
:userId
]
@type t :: %__MODULE__{
:accountId => String.t(),
:accountIdGuid => String.t(),
:accountName => String.t(),
:apiPassword => String.t(),
:baseUrl => String.t(),
:billingPlanPreview => BillingPlanPreview,
:userId => String.t()
}
end
defimpl Poison.Decoder, for: DocuSign.Model.NewAccountSummary do
import DocuSign.Deserializer
def decode(value, options) do
value
|> deserialize(:billingPlanPreview, :struct, DocuSign.Model.BillingPlanPreview, options)
end
end
| 23.95 | 92 | 0.647182 |
7389b22e8d38923a164832985c2bd181a35b9927 | 2,529 | ex | Elixir | clients/text_to_speech/lib/google_api/text_to_speech/v1beta1/model/synthesize_speech_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/text_to_speech/lib/google_api/text_to_speech/v1beta1/model/synthesize_speech_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/text_to_speech/lib/google_api/text_to_speech/v1beta1/model/synthesize_speech_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.TextToSpeech.V1beta1.Model.SynthesizeSpeechResponse do
@moduledoc """
The message returned to the client by the `SynthesizeSpeech` method.
## Attributes
* `audioConfig` (*type:* `GoogleApi.TextToSpeech.V1beta1.Model.AudioConfig.t`, *default:* `nil`) - The audio metadata of `audio_content`.
* `audioContent` (*type:* `String.t`, *default:* `nil`) - The audio data bytes encoded as specified in the request, including the header for encodings that are wrapped in containers (e.g. MP3, OGG_OPUS). For LINEAR16 audio, we include the WAV header. Note: as with all bytes fields, protobuffers use a pure binary representation, whereas JSON representations use base64.
* `timepoints` (*type:* `list(GoogleApi.TextToSpeech.V1beta1.Model.Timepoint.t)`, *default:* `nil`) - A link between a position in the original request input and a corresponding time in the output audio. It's only supported via `` of SSML input.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:audioConfig => GoogleApi.TextToSpeech.V1beta1.Model.AudioConfig.t() | nil,
:audioContent => String.t() | nil,
:timepoints => list(GoogleApi.TextToSpeech.V1beta1.Model.Timepoint.t()) | nil
}
field(:audioConfig, as: GoogleApi.TextToSpeech.V1beta1.Model.AudioConfig)
field(:audioContent)
field(:timepoints, as: GoogleApi.TextToSpeech.V1beta1.Model.Timepoint, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.TextToSpeech.V1beta1.Model.SynthesizeSpeechResponse do
def decode(value, options) do
GoogleApi.TextToSpeech.V1beta1.Model.SynthesizeSpeechResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.TextToSpeech.V1beta1.Model.SynthesizeSpeechResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 47.716981 | 374 | 0.75089 |
7389b3720c3ec11990fb0b9a026999155d21e821 | 627 | ex | Elixir | lib/rockelivery/user/actions/create.ex | joabehenrique/rockelivery | 02f83fe99df8e27d4e2a6270fecb39a521f2ceaf | [
"MIT"
] | null | null | null | lib/rockelivery/user/actions/create.ex | joabehenrique/rockelivery | 02f83fe99df8e27d4e2a6270fecb39a521f2ceaf | [
"MIT"
] | null | null | null | lib/rockelivery/user/actions/create.ex | joabehenrique/rockelivery | 02f83fe99df8e27d4e2a6270fecb39a521f2ceaf | [
"MIT"
] | null | null | null | defmodule Rockelivery.User.Actions.Create do
alias Rockelivery.{Error, Repo}
alias Rockelivery.User.User
def call(%{"cep" => cep} = params) do
changeset = User.changeset(params)
with {:ok, %User{}} <- User.build(changeset),
{:ok, _cep_info} <- client().get_cep_info(cep),
{:ok, %User{}} = user <- Repo.insert(changeset) do
user
else
{:error, %Error{}} = error -> error
{:error, result} -> {:error, Error.build(:bad_request, result)}
end
end
defp client do
:rockelivery
|> Application.fetch_env!(__MODULE__)
|> Keyword.get(:via_cep_adapter)
end
end
| 26.125 | 69 | 0.620415 |
7389d9c98a1815af02a7c33e9db87d155be2b1e9 | 1,778 | ex | Elixir | test/support/model_case.ex | soarpatriot/flour | 52a57c553da84bd3abad5834014e06370f40a20b | [
"MIT"
] | 1 | 2016-08-05T07:03:43.000Z | 2016-08-05T07:03:43.000Z | test/support/model_case.ex | soarpatriot/flour | 52a57c553da84bd3abad5834014e06370f40a20b | [
"MIT"
] | null | null | null | test/support/model_case.ex | soarpatriot/flour | 52a57c553da84bd3abad5834014e06370f40a20b | [
"MIT"
] | null | null | null | defmodule Flour.ModelCase do
@moduledoc """
This module defines the test case to be used by
model tests.
You may define functions here to be used as helpers in
your model tests. See `errors_on/2`'s definition as reference.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias Flour.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Flour.ModelCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Flour.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Flour.Repo, {:shared, self()})
end
:ok
end
@doc """
Helper for returning list of errors in a struct when given certain data.
## Examples
Given a User schema that lists `:name` as a required field and validates
`:password` to be safe, it would return:
iex> errors_on(%User{}, %{password: "password"})
[password: "is unsafe", name: "is blank"]
You could then write your assertion like:
assert {:password, "is unsafe"} in errors_on(%User{}, %{password: "password"})
You can also create the changeset manually and retrieve the errors
field directly:
iex> changeset = User.changeset(%User{}, password: "password")
iex> {:password, "is unsafe"} in changeset.errors
true
"""
def errors_on(struct, data) do
struct.__struct__.changeset(struct, data)
|> Ecto.Changeset.traverse_errors(&Flour.ErrorHelpers.translate_error/1)
|> Enum.flat_map(fn {key, errors} -> for msg <- errors, do: {key, msg} end)
end
end
| 26.939394 | 84 | 0.68279 |
7389ebde9f4a8e31e634259c762865894727c589 | 1,859 | exs | Elixir | hermes/config/dev.exs | jparr721/Hermes | 0ce141897e292a6e4492461f6581f0619c43b8bf | [
"MIT"
] | null | null | null | hermes/config/dev.exs | jparr721/Hermes | 0ce141897e292a6e4492461f6581f0619c43b8bf | [
"MIT"
] | null | null | null | hermes/config/dev.exs | jparr721/Hermes | 0ce141897e292a6e4492461f6581f0619c43b8bf | [
"MIT"
] | null | null | null | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# command from your terminal:
#
# openssl req -new -newkey rsa:4096 -days 365 -nodes -x509 -subj "/C=US/ST=Denial/L=Springfield/O=Dis/CN=www.example.com" -keyout priv/server.key -out priv/server.pem
#
# The `http:` config below can be replaced with:
# https: [port: 4000, keyfile: "priv/server.key", certfile: "priv/server.pem"],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
config :hermes, Hermes.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [node: ["node_modules/brunch/bin/brunch", "watch", "--stdin",
cd: Path.expand("../", __DIR__)]]
# Watch static and templates for browser reloading.
config :hermes, Hermes.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{web/views/.*(ex)$},
~r{web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Configure your database
config :hermes, Hermes.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "root",
database: "hermes_dev",
hostname: "localhost",
pool_size: 10
| 32.614035 | 170 | 0.699839 |
738a092782f0b469dfba2e86950b1d86caaf5521 | 304 | ex | Elixir | lib/dml_runner/consumer.ex | sizumita/DMLRunner | 9200346be11cf7422f34d54c4065fab2010d79c4 | [
"MIT"
] | null | null | null | lib/dml_runner/consumer.ex | sizumita/DMLRunner | 9200346be11cf7422f34d54c4065fab2010d79c4 | [
"MIT"
] | null | null | null | lib/dml_runner/consumer.ex | sizumita/DMLRunner | 9200346be11cf7422f34d54c4065fab2010d79c4 | [
"MIT"
] | null | null | null | defmodule DmlRunner.Consumer do
use Nostrum.Consumer
alias DmlRunner.Command
alias Nostrum.Api
def start_link do
Consumer.start_link(__MODULE__)
end
def handle_event({:MESSAGE_CREATE, msg, _ws_state}) do
Command.handle(msg)
end
def handle_event(_event) do
:noop
end
end
| 16 | 56 | 0.733553 |
738a0d7414a22e6585f824e3bb41c7d303e2cf3e | 81 | ex | Elixir | lib/elixir/test/elixir/fixtures/parallel_compiler/bar.ex | chulkilee/elixir | 699231dcad52916a76f38856cbd7cf7c7bdadc51 | [
"Apache-2.0"
] | 1 | 2021-04-28T21:35:01.000Z | 2021-04-28T21:35:01.000Z | lib/elixir/test/elixir/fixtures/parallel_compiler/bar.ex | chulkilee/elixir | 699231dcad52916a76f38856cbd7cf7c7bdadc51 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/fixtures/parallel_compiler/bar.ex | chulkilee/elixir | 699231dcad52916a76f38856cbd7cf7c7bdadc51 | [
"Apache-2.0"
] | 8 | 2018-02-20T18:30:53.000Z | 2019-06-18T14:23:31.000Z | defmodule BarParallel do
end
require FooParallel
IO.puts(FooParallel.message())
| 13.5 | 30 | 0.82716 |
738a2dcce9051639c6b261216cfe3d427c0a1e2d | 4 | ex | Elixir | testData/org/elixir_lang/parser_definition/literal_sigil_line_parsing_test_case/Minimal.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/parser_definition/literal_sigil_line_parsing_test_case/Minimal.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/parser_definition/literal_sigil_line_parsing_test_case/Minimal.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | ~X{} | 4 | 4 | 0.25 |
738a39030ca65a50a41dc4412aa90a56e964becf | 144 | exs | Elixir | .formatter.exs | vforgione/plenario2 | 001526e5c60a1d32794a18f3fd65ead6cade1a29 | [
"Apache-2.0"
] | 13 | 2017-12-11T13:59:42.000Z | 2020-11-16T21:52:31.000Z | .formatter.exs | vforgione/plenario2 | 001526e5c60a1d32794a18f3fd65ead6cade1a29 | [
"Apache-2.0"
] | 310 | 2017-11-13T22:52:26.000Z | 2018-11-19T17:49:30.000Z | .formatter.exs | vforgione/plenario2 | 001526e5c60a1d32794a18f3fd65ead6cade1a29 | [
"Apache-2.0"
] | 3 | 2017-12-05T00:36:12.000Z | 2020-03-10T15:15:29.000Z | [
import_deps: [:ecto, :phoenix],
inputs: ["*.{ex,exs}", "{config,lib,priv,test}/**/*.{ex,exs}"],
subdirectories: ["priv/*/migrations"]
]
| 24 | 65 | 0.576389 |
738a4a204e19e0604831c4bdfc3061c5d43ab6d0 | 143 | exs | Elixir | test/chopperbot/split/order_calculator_test.exs | flipay/chopperbot | 29d81a343442bdd8eae7627bc6eb3c7d83cd0151 | [
"MIT"
] | 5 | 2019-12-14T03:12:28.000Z | 2020-03-04T12:58:44.000Z | test/chopperbot/split/order_calculator_test.exs | flipay/chopperbot | 29d81a343442bdd8eae7627bc6eb3c7d83cd0151 | [
"MIT"
] | 12 | 2020-01-07T09:31:33.000Z | 2020-03-27T06:11:21.000Z | test/chopperbot/split/order_calculator_test.exs | flipay/chopperbot | 29d81a343442bdd8eae7627bc6eb3c7d83cd0151 | [
"MIT"
] | 1 | 2020-01-09T10:35:37.000Z | 2020-01-09T10:35:37.000Z | defmodule Chopperbot.Split.OrderCalculatorTest do
use ExUnit.Case, async: true
doctest Chopperbot.Split.OrderCalculator, import: true
end
| 23.833333 | 56 | 0.818182 |
738a4a4ac2bcb2b6580a2ca191714f54f2d642e7 | 1,878 | exs | Elixir | clients/big_query_reservation/mix.exs | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/big_query_reservation/mix.exs | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/big_query_reservation/mix.exs | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.BigQueryReservation.Mixfile do
use Mix.Project
@version "0.16.0"
def project() do
[
app: :google_api_big_query_reservation,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/big_query_reservation"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.4"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
BigQuery Reservation API client library. A service to modify your BigQuery flat-rate reservations.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/big_query_reservation",
"Homepage" => "https://cloud.google.com/bigquery/"
}
]
end
end
| 28.029851 | 112 | 0.663472 |
738a5972998080607fea039cf6df91246fd176fa | 829 | exs | Elixir | test/radex/writer/example_test.exs | smartlogic/radex | 001d2b6fc564d9fbcbd98fcffdc89a03dc210c80 | [
"MIT"
] | 3 | 2017-10-12T18:23:33.000Z | 2019-05-14T21:28:09.000Z | test/radex/writer/example_test.exs | smartlogic/radex | 001d2b6fc564d9fbcbd98fcffdc89a03dc210c80 | [
"MIT"
] | null | null | null | test/radex/writer/example_test.exs | smartlogic/radex | 001d2b6fc564d9fbcbd98fcffdc89a03dc210c80 | [
"MIT"
] | null | null | null | defmodule Radex.Writer.ExampleTest do
use ExUnit.Case
alias Radex.Conn
alias Radex.Metadata
alias Radex.Writer.Example
test "filters out empty examples from metadata" do
metadata = %{
"key1" => %Metadata{success: true},
"key2" => %Metadata{success: true, conns: [%Conn{}]}
}
assert Example.examples(metadata) == [%Metadata{success: true, conns: [%Conn{}]}]
end
test "ignore examples that are missing conns" do
assert Example.use_example?(%Metadata{success: true, conns: [%Conn{}]})
refute Example.use_example?(%Metadata{success: true, conns: []})
end
test "ignore examples that are not marked as successful" do
assert Example.use_example?(%Metadata{conns: [%Conn{}], success: true})
refute Example.use_example?(%Metadata{conns: [%Conn{}], success: false})
end
end
| 30.703704 | 85 | 0.68275 |
738a64943264a199f50010642c785a6bd4db1e56 | 3,275 | exs | Elixir | test/wunderground/astronomy_test.exs | optikfluffel/wunderground | 67ebd8fbb83f2f0d1eb1a6fba1273afa3cec8233 | [
"Unlicense"
] | 2 | 2017-08-23T21:48:07.000Z | 2017-10-16T21:35:36.000Z | test/wunderground/astronomy_test.exs | optikfluffel/wunderground | 67ebd8fbb83f2f0d1eb1a6fba1273afa3cec8233 | [
"Unlicense"
] | 8 | 2017-08-23T10:02:35.000Z | 2017-09-03T11:35:36.000Z | test/wunderground/astronomy_test.exs | optikfluffel/wunderground | 67ebd8fbb83f2f0d1eb1a6fba1273afa3cec8233 | [
"Unlicense"
] | 1 | 2021-06-22T15:02:15.000Z | 2021-06-22T15:02:15.000Z | defmodule Wunderground.AstronomyTest do
@moduledoc false
use ExUnit.Case, async: false
use ExVCR.Mock, adapter: ExVCR.Adapter.Hackney
alias Wunderground.Astronomy
@not_found {:not_found, "No cities match your search query"}
@station_offline {:station_offline, "The station you're looking for either doesn't exist or is simply offline right now."}
describe "get/1" do
test "us" do
use_cassette "astronomy/us" do
assert {:ok, %Astronomy{}} = Astronomy.get({:us, "CA", "San_Francisco"})
end
end
test "us not_found" do
use_cassette "astronomy/us_not_found" do
assert {:error, @not_found} = Astronomy.get({:us, "CA", "USDUBFOURZEGBNUIZDSNGIUZFV"})
end
end
test "us_zip" do
use_cassette "astronomy/us_zip" do
assert {:ok, %Astronomy{}} = Astronomy.get({:us_zip, 60290})
end
end
test "us_zip not_found" do
use_cassette "astronomy/us_zip_not_found" do
assert {:error, @not_found} = Astronomy.get({:us_zip, -1})
end
end
test "international" do
use_cassette "astronomy/international" do
assert {:ok, %Astronomy{}} = Astronomy.get({:international, "Australia", "Sydney"})
end
end
test "international not_found" do
use_cassette "astronomy/international_not_found" do
assert {:error, @not_found} = Astronomy.get({:international, "Australia", "AUDUBFOURZEGBNUIZDSNGIUZFV"})
end
end
test "geo" do
use_cassette "astronomy/geo" do
assert {:ok, %Astronomy{}} = Astronomy.get({:geo, 37.8, -122.4})
end
end
test "geo not_found" do
use_cassette "astronomy/geo_not_found" do
assert {:error, @not_found} = Astronomy.get({:geo, 2500.0, -5000.0})
end
end
test "airport" do
use_cassette "astronomy/airport" do
assert {:ok, %Astronomy{}} = Astronomy.get({:airport, "KJFK"})
end
end
test "airport not_found" do
use_cassette "astronomy/airport_not_found" do
assert {:error, @not_found} = Astronomy.get({:airport, "AIRUBFOURZEGBNUIZDSNGIUZFV"})
end
end
test "pws" do
use_cassette "astronomy/pws" do
assert {:ok, %Astronomy{}} = Astronomy.get({:pws, "KCASANFR70"})
end
end
test "pws not_found" do
use_cassette "astronomy/pws_not_found" do
assert {:error, @station_offline} = Astronomy.get({:pws, "NOT_A_PWS_ID"})
end
end
test "auto_ip" do
use_cassette "astronomy/auto_ip" do
assert {:ok, %Astronomy{}} = Astronomy.get({:auto_ip})
end
end
test "auto_ip with given ip address" do
use_cassette "astronomy/auto_ip_custom" do
assert {:ok, %Astronomy{}} = Astronomy.get({:auto_ip, {185, 1, 74, 1}})
end
end
test "auto_ip with 'wrong' ip address tuple" do
assert_raise ArgumentError, fn ->
Astronomy.get({:auto_ip, {"185", "1", "74", "1"}})
end
end
test "auto_ip ArgumentError when no 4 element tuple is given" do
assert_raise ArgumentError, fn ->
Astronomy.get({:auto_ip, "185.1.74.1"})
end
end
test "ArgumentError" do
assert_raise ArgumentError, fn ->
Astronomy.get(:not_an_argument)
end
end
end
end
| 28.478261 | 124 | 0.630534 |
738a67be7d1d652d4ec30f64d305b7bfdb5b0e3e | 3,676 | ex | Elixir | apps/language_server/lib/language_server/providers/document_symbols.ex | tmepple/elixir-ls | 01d5ee857bb8b9e729da622dbc4f64b680aac3fc | [
"Apache-2.0"
] | 912 | 2017-06-08T03:58:03.000Z | 2021-09-06T03:42:07.000Z | apps/language_server/lib/language_server/providers/document_symbols.ex | tmepple/elixir-ls | 01d5ee857bb8b9e729da622dbc4f64b680aac3fc | [
"Apache-2.0"
] | 196 | 2017-06-09T23:32:16.000Z | 2021-10-15T15:38:43.000Z | apps/language_server/lib/language_server/providers/document_symbols.ex | tmepple/elixir-ls | 01d5ee857bb8b9e729da622dbc4f64b680aac3fc | [
"Apache-2.0"
] | 78 | 2017-07-06T18:35:34.000Z | 2020-04-12T08:10:45.000Z | defmodule ElixirLS.LanguageServer.Providers.DocumentSymbols do
@moduledoc """
Document Symbols provider
"""
@symbol_enum %{
file: 1,
module: 2,
namespace: 3,
package: 4,
class: 5,
method: 6,
property: 7,
field: 8,
constructor: 9,
enum: 10,
interface: 11,
function: 12,
variable: 13,
constant: 14,
string: 15,
number: 16,
boolean: 17,
array: 18,
object: 19,
key: 20,
null: 21,
enum_member: 22,
struct: 23,
event: 24,
operator: 25,
type_parameter: 26
}
def symbols(uri, text) do
symbols = list_symbols(text) |> Enum.map(&build_symbol_information(uri, &1))
{:ok, symbols}
end
defp list_symbols(src) do
{_ast, symbol_list} =
Code.string_to_quoted!(src, columns: true, line: 0)
|> Macro.prewalk([], fn ast, symbols ->
{ast, extract_module(ast) ++ symbols}
end)
symbol_list
end
# Identify and extract the module symbol, and the symbols contained within the module
defp extract_module({:defmodule, _, _child_ast} = ast) do
{_, _, [{:__aliases__, location, module_name}, [do: module_body]]} = ast
mod_defns =
case module_body do
{:__block__, [], mod_defns} -> mod_defns
stmt -> [stmt]
end
module_name = Enum.join(module_name, ".")
module_symbols =
mod_defns
|> Enum.map(&extract_symbol(module_name, &1))
|> Enum.reject(&is_nil/1)
[%{type: :module, name: module_name, location: location, container: nil}] ++ module_symbols
end
defp extract_module(_ast), do: []
# Module Variable
defp extract_symbol(_, {:@, _, [{:moduledoc, _, _}]}), do: nil
defp extract_symbol(_, {:@, _, [{:doc, _, _}]}), do: nil
defp extract_symbol(_, {:@, _, [{:spec, _, _}]}), do: nil
defp extract_symbol(_, {:@, _, [{:behaviour, _, _}]}), do: nil
defp extract_symbol(_, {:@, _, [{:impl, _, _}]}), do: nil
defp extract_symbol(_, {:@, _, [{:type, _, _}]}), do: nil
defp extract_symbol(_, {:@, _, [{:typedoc, _, _}]}), do: nil
defp extract_symbol(_, {:@, _, [{:enforce_keys, _, _}]}), do: nil
defp extract_symbol(current_module, {:@, _, [{name, location, _}]}) do
%{type: :constant, name: "@#{name}", location: location, container: current_module}
end
# Function
defp extract_symbol(current_module, {:def, _, [{_, location, _} = fn_head | _]}) do
%{
type: :function,
name: Macro.to_string(fn_head),
location: location,
container: current_module
}
end
# Private Function
defp extract_symbol(current_module, {:defp, _, [{_, location, _} = fn_head | _]}) do
%{
type: :function,
name: Macro.to_string(fn_head),
location: location,
container: current_module
}
end
# Macro
defp extract_symbol(current_module, {:defmacro, _, [{_, location, _} = fn_head | _]}) do
%{
type: :function,
name: Macro.to_string(fn_head),
location: location,
container: current_module
}
end
# Test
defp extract_symbol(current_module, {:test, location, [name | _]}) do
%{
type: :function,
name: ~s(test "#{name}"),
location: location,
container: current_module
}
end
defp extract_symbol(_, _), do: nil
defp build_symbol_information(uri, info) do
%{
name: info.name,
kind: @symbol_enum[info.type],
containerName: info.container,
location: %{
uri: uri,
range: %{
start: %{line: info.location[:line], character: info.location[:column] - 1},
end: %{line: info.location[:line], character: info.location[:column] - 1}
}
}
}
end
end
| 25.887324 | 95 | 0.593308 |
738a8191bae807243735af23d26ef3a83c49362d | 1,121 | exs | Elixir | test/game/command/debug_test.exs | jgsmith/ex_venture | 546adaa8fe80d45a72fde6de8d8d6906902c12d4 | [
"MIT"
] | 2 | 2019-05-14T11:36:44.000Z | 2020-07-01T08:54:04.000Z | test/game/command/debug_test.exs | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | null | null | null | test/game/command/debug_test.exs | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | 1 | 2021-01-29T14:12:40.000Z | 2021-01-29T14:12:40.000Z | defmodule Game.Command.DebugTest do
use ExVenture.CommandCase
alias Game.Command.Debug
doctest Debug
describe "list debug information for admins" do
setup do
user = %{flags: ["admin"]}
%{state: %{user: user, socket: :socket}}
end
test "displays debug information", %{state: state} do
:ok = Debug.run({:squabble}, state)
assert_socket_echo "node"
end
test "must be an admin", %{state: state} do
state = %{state | user: %{state.user | flags: []}}
:ok = Debug.run({:squabble}, state)
assert_socket_echo "must be an admin"
end
end
describe "list player information for admins" do
setup do
user = %{flags: ["admin"]}
%{state: %{user: user, socket: :socket}}
end
test "displays debug information", %{state: state} do
:ok = Debug.run({:players}, state)
assert_socket_echo "players"
end
test "must be an admin", %{state: state} do
state = %{state | user: %{state.user | flags: []}}
:ok = Debug.run({:players}, state)
assert_socket_echo "must be an admin"
end
end
end
| 21.557692 | 57 | 0.604817 |
738b0775d199b48fc57380f523900443f8fe1a67 | 3,939 | ex | Elixir | lib/phoenix/live_dashboard/components/shared_usage_card_component.ex | uumo/phoenix_live_dashboard | cd07929341d9dca0f85b58c624c62d63cf7c2f07 | [
"MIT"
] | null | null | null | lib/phoenix/live_dashboard/components/shared_usage_card_component.ex | uumo/phoenix_live_dashboard | cd07929341d9dca0f85b58c624c62d63cf7c2f07 | [
"MIT"
] | null | null | null | lib/phoenix/live_dashboard/components/shared_usage_card_component.ex | uumo/phoenix_live_dashboard | cd07929341d9dca0f85b58c624c62d63cf7c2f07 | [
"MIT"
] | null | null | null | defmodule Phoenix.LiveDashboard.SharedUsageCardComponent do
use Phoenix.LiveDashboard.Web, :live_component
@csp_nonces %{img: nil, script: nil, style: nil}
@impl true
def mount(socket) do
{:ok, socket}
end
def normalize_params(params) do
params
|> validate_required([:usages, :total_data, :total_legend, :total_usage, :dom_id])
|> validate_usages()
|> put_defaults()
end
defp validate_usages(params = %{usages: usages}) do
normalized_usages =
Enum.map(usages, fn usage ->
validate_required(usage, [:data, :dom_sub_id])
put_usage_defaults(usage)
end)
%{params | usages: normalized_usages}
end
defp validate_required(params, list) do
case Enum.find(list, &(not Map.has_key?(params, &1))) do
nil -> :ok
key -> raise ArgumentError, "expected #{inspect(key)} parameter to be received"
end
params
end
defp put_usage_defaults(params) do
params
|> Map.put_new(:title, nil)
end
defp put_defaults(params) do
params
|> Map.put_new(:title, nil)
|> Map.put_new(:hint, nil)
|> Map.put_new(:inner_title, nil)
|> Map.put_new(:inner_hint, nil)
|> Map.put_new(:csp_nonces, @csp_nonces)
|> Map.put_new(:total_formatter, &"#{&1} %")
end
@impl true
def render(assigns) do
~L"""
<%= if @title do %>
<h5 class="card-title">
<%= @title %>
<%= if @hint do %>
<%= hint(do: @hint) %>
<% end %>
</h5>
<% end %>
<div class="card">
<%= if @inner_title do %>
<h5 class="card-title">
<%= @inner_title %>
<%= if @inner_hint do %>
<%= hint(do: @inner_hint) %>
<% end %>
</h5>
<% end %>
<div class="card-body">
<div phx-hook="PhxColorBarHighlight" id="cpu-color-bars">
<%= for usage <- @usages do %>
<div class="flex-grow-1 mb-3">
<div class="progress color-bar-progress flex-grow-1 mb-3">
<span class="color-bar-progress-title"><%= usage.title %></span>
<%= for {{name, value, color, _desc}, index} <- Enum.with_index(usage.data) do %>
<style nonce="<%= @csp_nonces.style %>">#<%= "cpu-#{usage.dom_sub_id}-progress-#{index}" %>{width:<%= value %>%}</style>
<div
title="<%= name %> - <%= format_percent(value) %>"
class="progress-bar color-bar-progress-bar bg-gradient-<%= color %>"
role="progressbar"
aria-valuenow="<%= maybe_round(value) %>"
aria-valuemin="0"
aria-valuemax="100"
data-name="<%= name %>"
data-empty="<%= empty?(value) %>"
id="<%= "cpu-#{usage.dom_sub_id}-progress-#{index}" %>">
</div>
<% end %>
</div>
</div>
<% end %>
<div class="color-bar-legend">
<div class="row">
<%= for {name, value, color, hint} <- @total_data do %>
<div class="col-lg-6 d-flex align-items-center py-1 flex-grow-0 color-bar-legend-entry" data-name="<%= name %>">
<div class="color-bar-legend-color bg-<%= color %> mr-2"></div>
<span><%= name %> <%= hint && hint(do: hint) %></span>
<span class="flex-grow-1 text-right text-muted"><%= @total_formatter.(value) %></span>
</div>
<% end %>
</div>
</div>
<div class="resource-usage-total text-center py-1 mt-3">
<%= @total_legend %> <%= @total_usage %>
</div>
</div>
</div>
</div>
"""
end
defp maybe_round(num) when is_integer(num), do: num
defp maybe_round(num), do: Float.ceil(num, 1)
defp empty?(value) when is_number(value) and value > 0, do: false
defp empty?(_), do: true
end
| 32.553719 | 138 | 0.520183 |
738b233d19a832194f5c442979f6ae4639449de0 | 77 | exs | Elixir | v01/ch15/case.exs | oiax/elixir-primer | c8b89a29f108cc335b8e1341b7a1e90ec12adc66 | [
"MIT"
] | null | null | null | v01/ch15/case.exs | oiax/elixir-primer | c8b89a29f108cc335b8e1341b7a1e90ec12adc66 | [
"MIT"
] | null | null | null | v01/ch15/case.exs | oiax/elixir-primer | c8b89a29f108cc335b8e1341b7a1e90ec12adc66 | [
"MIT"
] | null | null | null | n = 3
case n do
1 -> IO.puts "A"
2 -> IO.puts "B"
_ -> IO.puts "C"
end
| 11 | 18 | 0.467532 |
738b317d08476b39c185b58e229cf4ef78a2ba4e | 405 | exs | Elixir | test/prop_web/views/error_view_test.exs | nolantait/prop | d68d87d623b1d5352ba1b0f1d44e17bd7a119a3e | [
"MIT"
] | 22 | 2021-06-29T09:25:52.000Z | 2022-03-16T09:17:24.000Z | test/prop_web/views/error_view_test.exs | nolantait/prop | d68d87d623b1d5352ba1b0f1d44e17bd7a119a3e | [
"MIT"
] | 54 | 2021-06-28T19:54:17.000Z | 2022-03-27T06:49:30.000Z | test/prop_web/views/error_view_test.exs | nolantait/prop | d68d87d623b1d5352ba1b0f1d44e17bd7a119a3e | [
"MIT"
] | 2 | 2022-01-08T10:16:30.000Z | 2022-02-20T09:45:50.000Z | defmodule PropWeb.ErrorViewTest do
use PropWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(PropWeb.ErrorView, "404.html", []) == "Not Found"
end
test "renders 500.html" do
assert render_to_string(PropWeb.ErrorView, "500.html", []) == "Internal Server Error"
end
end
| 27 | 89 | 0.725926 |
738b63856dd658e29b2233e791266a8d390ad7e4 | 11,682 | ex | Elixir | lib/epicenter_web/live/case_investigation_contact_live.ex | geometricservices/epi-viewpoin | ecb5316ea0f3f7299d5ff63e2de588539005ac1c | [
"Apache-2.0"
] | 5 | 2021-02-25T18:43:09.000Z | 2021-02-27T06:00:35.000Z | lib/epicenter_web/live/case_investigation_contact_live.ex | geometricservices/epi-viewpoint | ecb5316ea0f3f7299d5ff63e2de588539005ac1c | [
"Apache-2.0"
] | 3 | 2021-12-13T17:52:47.000Z | 2021-12-17T01:35:31.000Z | lib/epicenter_web/live/case_investigation_contact_live.ex | geometricservices/epi-viewpoint | ecb5316ea0f3f7299d5ff63e2de588539005ac1c | [
"Apache-2.0"
] | 1 | 2022-01-27T23:26:38.000Z | 2022-01-27T23:26:38.000Z | defmodule EpicenterWeb.CaseInvestigationContactLive do
use EpicenterWeb, :live_view
import Epicenter.PhiValidation, only: [validate_phi: 2]
import EpicenterWeb.ConfirmationModal, only: [confirmation_prompt: 1]
import EpicenterWeb.IconView, only: [back_icon: 0]
import EpicenterWeb.LiveHelpers,
only: [
assign_defaults: 1,
assign_form_changeset: 2,
assign_form_changeset: 3,
assign_page_title: 2,
authenticate_user: 2,
noreply: 1,
ok: 1
]
alias Epicenter.AuditLog
alias Epicenter.Cases
alias Epicenter.ContactInvestigations.ContactInvestigation
alias Epicenter.Cases.Person
alias Epicenter.ContactInvestigations
alias EpicenterWeb.Format
alias EpicenterWeb.Form
defmodule ContactForm do
use Ecto.Schema
import Ecto.Changeset
alias Epicenter.DateParser
alias Epicenter.Extra
alias Epicenter.Validation
alias Euclid.Exists
embedded_schema do
field :contact_investigation_id, :string
field :person_id, :string
field :phone_id, :string
field :demographic_id, :string
field :guardian_name, :string
field :guardian_phone, :string
field :first_name, :string
field :last_name, :string
field :relationship_to_case, :string
field :same_household, :boolean
field :under_18, :boolean
field :dob, :string
field :phone, :string
field :preferred_language, :string
field :most_recent_date_together, :string
end
def changeset(%ContactInvestigation{} = contact_investigation, attrs) do
person = contact_investigation.exposed_person || %Person{demographics: [], phones: []}
demographic = Person.coalesce_demographics(person)
phone = List.first(person.phones) || %{id: nil, number: ""}
%__MODULE__{
contact_investigation_id: contact_investigation.id,
guardian_name: contact_investigation.guardian_name,
guardian_phone: contact_investigation.guardian_phone,
person_id: person.id,
demographic_id: demographic.id,
phone_id: phone.id,
first_name: demographic.first_name || "",
last_name: demographic.last_name || "",
phone: phone.number,
under_18: contact_investigation.under_18 || false,
dob: Format.date(demographic.dob),
same_household: contact_investigation.household_member || false,
relationship_to_case: contact_investigation.relationship_to_case,
preferred_language: demographic.preferred_language,
most_recent_date_together: Format.date(contact_investigation.most_recent_date_together)
}
|> cast(attrs, [
:first_name,
:last_name,
:relationship_to_case,
:same_household,
:guardian_name,
:guardian_phone,
:under_18,
:dob,
:phone,
:preferred_language,
:most_recent_date_together
])
|> validate_required([
:first_name,
:last_name,
:relationship_to_case,
:same_household,
:under_18,
:most_recent_date_together
])
|> validate_phi(:contact_investigation_form)
|> ContactInvestigation.validate_guardian_fields()
|> Validation.validate_date(:most_recent_date_together)
|> Validation.validate_date(:dob)
|> validate_under_18_and_dob()
end
def contact_params(%Ecto.Changeset{} = formdata) do
with {:ok, data} <- apply_action(formdata, :insert) do
phone =
if Euclid.Exists.present?(data.phone) do
%{source: "form", number: data.phone}
else
nil
end
{:ok,
%{
id: data.contact_investigation_id,
guardian_name: data.guardian_name,
guardian_phone: data.guardian_phone,
most_recent_date_together: DateParser.parse_mm_dd_yyyy!(data.most_recent_date_together),
relationship_to_case: data.relationship_to_case,
under_18: data.under_18,
household_member: data.same_household,
exposed_person: %{
id: data.person_id,
form_demographic: %{
id: data.demographic_id,
source: "form",
first_name: data.first_name,
last_name: data.last_name,
dob: data.dob |> DateParser.parse_mm_dd_yyyy!(),
preferred_language: data.preferred_language
},
additive_phone: phone
}
}}
end
end
def validate_under_18_and_dob(changeset) do
under_18? = get_field(changeset, :under_18) || false
dob =
case Extra.Changeset.has_error_on_field(changeset, :dob) do
true -> nil
false -> get_field(changeset, :dob)
end
cond do
Exists.present?(dob) && under_18? && age(dob) >= 18 ->
changeset |> add_error(:dob, "Must be under 18 years if 'This person is under 18 years old' is checked")
Exists.present?(dob) && !under_18? && age(dob) < 18 ->
changeset |> add_error(:dob, "Must be over 18 years if 'This person is under 18 years old' is not checked")
true ->
changeset
end
end
defp age(dob) do
{:ok, date} = dob |> DateParser.parse_mm_dd_yyyy()
Extra.Date.years_ago(date)
end
end
def mount(%{"case_investigation_id" => case_investigation_id} = params, session, socket) do
socket = socket |> authenticate_user(session)
case_investigation =
case_investigation_id
|> Cases.get_case_investigation(socket.assigns.current_user)
|> Cases.preload_person()
|> Cases.preload_initiating_lab_result()
contact_investigation =
if id = params["id"] do
ContactInvestigations.get(id, socket.assigns.current_user) |> ContactInvestigations.preload_exposed_person()
else
%ContactInvestigation{exposed_person: %Person{demographics: [], phones: []}}
end
socket
|> assign_defaults()
|> assign_page_title("Case Investigation Contact")
|> assign_form_changeset(ContactForm.changeset(contact_investigation, %{}))
|> assign(:contact_investigation, contact_investigation)
|> assign(:case_investigation, case_investigation)
|> ok()
end
def handle_event("change", %{"contact_form" => params}, socket) do
socket
|> assign_form_changeset(ContactForm.changeset(socket.assigns.contact_investigation, params))
|> noreply()
end
def handle_event("save", %{"contact_form" => params}, socket) do
contact_investigation = socket.assigns.contact_investigation
with {:form, {:ok, data}} <- {:form, ContactForm.changeset(contact_investigation, params) |> ContactForm.contact_params()},
data = data |> Map.put(:exposing_case_id, socket.assigns.case_investigation.id),
{:created, {:ok, _}} <- {:created, create_or_update_contact_investigation(contact_investigation, data, socket.assigns.current_user)} do
socket
|> push_redirect(to: "#{Routes.profile_path(socket, EpicenterWeb.ProfileLive, socket.assigns.case_investigation.person)}#case-investigations")
|> noreply()
else
{:form, {:error, changeset}} ->
socket
|> assign_form_changeset(changeset, "Check errors above")
|> noreply()
{:created, {:error, _changeset}} ->
socket
# This case should be unreachable as long as UI validation is more strict than db validation
|> assign(:form_error, "Validation failed and your contact form changes could not be saved")
|> noreply()
end
end
defp create_or_update_contact_investigation(contact_investigation, data, author) do
if data.id do
ContactInvestigations.update(
contact_investigation,
{data,
%Epicenter.AuditLog.Meta{
author_id: author.id,
reason_action: AuditLog.Revision.create_contact_action(),
reason_event: AuditLog.Revision.create_contact_event()
}}
)
else
ContactInvestigations.create(
{data,
%Epicenter.AuditLog.Meta{
author_id: author.id,
reason_action: AuditLog.Revision.update_contact_investigation_action(),
reason_event: AuditLog.Revision.update_contact_event()
}}
)
end
end
@preferred_language_options [
{"English", "English"},
{"Spanish", "Spanish"},
{"Arabic", "Arabic"},
{"Bengali", "Bengali"},
{"Chinese (Cantonese)", "Chinese (Cantonese)"},
{"Chinese (Mandarin)", "Chinese (Mandarin)"},
{"French", "French"},
{"Haitian Creole", "Haitian Creole"},
{"Hebrew", "Hebrew"},
{"Hindi", "Hindi"},
{"Italian", "Italian"},
{"Korean", "Korean"},
{"Polish", "Polish"},
{"Russian", "Russian"},
{"Swahili", "Swahili"},
{"Yiddish", "Yiddish"}
]
@relationship_options [
"Family",
"Partner or roommate",
"Healthcare worker",
"Neighbor",
"Co-worker",
"Friend",
"Teacher or childcare",
"Service provider"
]
def contact_form_builder(form, case_investigation, form_error) do
onset_date = case_investigation.symptom_onset_on
sampled_date = case_investigation.initiating_lab_result.sampled_on
infectious_seed_date = onset_date || sampled_date
infectious_period =
if(infectious_seed_date,
do: "#{infectious_seed_date |> Date.add(-2) |> Format.date()} - #{infectious_seed_date |> Date.add(10) |> Format.date()}",
else: "Unavailable"
)
under_18 = Epicenter.Extra.Changeset.get_field_from_changeset(form.source, :under_18)
contact_information = fn
form, true = _under_18 ->
form
|> Form.line(&Form.text_field(&1, :guardian_name, "Guardian's name", span: 4))
|> Form.line(&Form.text_field(&1, :guardian_phone, "Guardian's phone", span: 4))
form, _ = _under_18 ->
form
|> Form.line(&Form.text_field(&1, :phone, "Phone", span: 4))
end
Form.new(form)
|> Form.line(
&Form.content_div(
&1,
"Include people who live in the same house, or are from workspaces, shared meals, volunteer activities, playing sports, parties, places of worship, gym or exercise class, gatherings or social events, sporting events, and concerts.",
span: 8
)
)
|> Form.line(fn line ->
line
|> Form.text_field(:first_name, "First name")
|> Form.text_field(:last_name, "Last name")
end)
|> Form.line(&Form.radio_button_list(&1, :relationship_to_case, "Relationship to case", @relationship_options, span: 4))
|> Form.line(&Form.checkbox_field(&1, :same_household, nil, "This person lives in the same household", span: 8))
|> Form.line(&Form.checkbox_field(&1, :under_18, "Age", "This person is under 18 years old", span: 8))
|> contact_information.(under_18)
|> Form.line(&Form.text_field(&1, :dob, "Date of birth", span: 4))
|> Form.line(&Form.radio_button_list(&1, :preferred_language, "Preferred Language", @preferred_language_options, span: 4))
|> Form.line(
&Form.date_field(
&1,
:most_recent_date_together,
"Most recent day together",
explanation_text:
Enum.join(
[
"Onset date: #{if(onset_date, do: Format.date(onset_date), else: "Unavailable")}",
"Positive lab sample: #{if(sampled_date, do: Format.date(sampled_date), else: "Unavailable")}",
"Infectious period: #{infectious_period}"
],
"\n"
),
span: 4
)
)
|> Form.line(&Form.footer(&1, form_error, span: 4))
|> Form.safe()
end
end
| 34.767857 | 240 | 0.64441 |
738b712dc003ccedbbc2313040006c7e08c307a5 | 185 | ex | Elixir | debian/ndn-perl-apache2-mod-perl.cron.d.ex | dreamhost/dpkg-ndn-perl-mod-perl | b22e73bc5bbff804cece98fd2b8138b14883efd3 | [
"Apache-2.0"
] | null | null | null | debian/ndn-perl-apache2-mod-perl.cron.d.ex | dreamhost/dpkg-ndn-perl-mod-perl | b22e73bc5bbff804cece98fd2b8138b14883efd3 | [
"Apache-2.0"
] | null | null | null | debian/ndn-perl-apache2-mod-perl.cron.d.ex | dreamhost/dpkg-ndn-perl-mod-perl | b22e73bc5bbff804cece98fd2b8138b14883efd3 | [
"Apache-2.0"
] | null | null | null | #
# Regular cron jobs for the ndn-perl-apache2-mod-perl package
#
0 4 * * * root [ -x /usr/bin/ndn-perl-apache2-mod-perl_maintenance ] && /usr/bin/ndn-perl-apache2-mod-perl_maintenance
| 37 | 118 | 0.724324 |
738b84e8e9cf54bae3ff4de8acd45ca19401c66e | 1,429 | exs | Elixir | test/test_helper.exs | pmarreck/coherence | aa0ef8403197dfd262863f4b0e592122a1a3e525 | [
"MIT"
] | 1,347 | 2016-07-04T23:20:10.000Z | 2022-02-10T20:10:48.000Z | test/test_helper.exs | pmarreck/coherence | aa0ef8403197dfd262863f4b0e592122a1a3e525 | [
"MIT"
] | 378 | 2016-07-06T16:30:28.000Z | 2021-09-16T13:34:05.000Z | test/test_helper.exs | pmarreck/coherence | aa0ef8403197dfd262863f4b0e592122a1a3e525 | [
"MIT"
] | 276 | 2016-07-06T20:26:17.000Z | 2021-12-06T19:32:41.000Z | ExUnit.start()
Application.ensure_all_started(:coherence)
# Code.require_file("./support/gettext.exs", __DIR__)
# Code.require_file("./support/messages.exs", __DIR__)
# Code.require_file("./support/view_helpers.exs", __DIR__)
# Code.require_file("./support/web.exs", __DIR__)
# Code.require_file("./support/dummy_controller.exs", __DIR__)
# Code.require_file("./support/schema.exs", __DIR__)
# Code.require_file("./support/migrations.exs", __DIR__)
# Code.require_file("./support/router.exs", __DIR__)
# Code.require_file("./support/endpoint.exs", __DIR__)
# Code.require_file("./support/model_case.exs", __DIR__)
# Code.require_file("./support/conn_case.exs", __DIR__)
# Code.require_file("./support/views.exs", __DIR__)
# Code.require_file("./support/email.exs", __DIR__)
# Code.require_file("./support/test_helpers.exs", __DIR__)
# Code.require_file("./support/redirect.exs", __DIR__)
# Code.require_file("./support/schemas.exs", __DIR__)
defmodule Coherence.RepoSetup do
use ExUnit.CaseTemplate
end
TestCoherence.Repo.__adapter__().storage_down(TestCoherence.Repo.config())
TestCoherence.Repo.__adapter__().storage_up(TestCoherence.Repo.config())
{:ok, _pid} = TestCoherenceWeb.Endpoint.start_link()
{:ok, _pid} = TestCoherence.Repo.start_link()
_ = Ecto.Migrator.up(TestCoherence.Repo, 0, TestCoherence.Migrations, log: false)
Process.flag(:trap_exit, true)
Ecto.Adapters.SQL.Sandbox.mode(TestCoherence.Repo, :manual)
| 43.30303 | 81 | 0.76697 |
738bac902e304793be260a0f306958fca1b093af | 55 | ex | Elixir | lib/recycling_itsavesplanet_org_web/views/page_view.ex | AlShu/recycling.itsavesplanet.org | dd26944e39b250f3b0d12cbb99abb91e8f8ff49e | [
"MIT"
] | null | null | null | lib/recycling_itsavesplanet_org_web/views/page_view.ex | AlShu/recycling.itsavesplanet.org | dd26944e39b250f3b0d12cbb99abb91e8f8ff49e | [
"MIT"
] | null | null | null | lib/recycling_itsavesplanet_org_web/views/page_view.ex | AlShu/recycling.itsavesplanet.org | dd26944e39b250f3b0d12cbb99abb91e8f8ff49e | [
"MIT"
] | null | null | null | defmodule RISPWeb.PageView do
use RISPWeb, :view
end
| 13.75 | 29 | 0.781818 |
738bb07ac4dd00ab29057467862cbbf9d9d64d78 | 881 | exs | Elixir | mix.exs | HandOfGod94/vscode_exunit_formatter | ff10c51904435082516c926fb7ee3ca2c9104efe | [
"MIT"
] | null | null | null | mix.exs | HandOfGod94/vscode_exunit_formatter | ff10c51904435082516c926fb7ee3ca2c9104efe | [
"MIT"
] | null | null | null | mix.exs | HandOfGod94/vscode_exunit_formatter | ff10c51904435082516c926fb7ee3ca2c9104efe | [
"MIT"
] | null | null | null | defmodule VSCodeExUnitFormatter.MixProject do
use Mix.Project
def project do
[
app: :vscode_exunit_formatter,
version: "0.1.0",
elixir: "~> 1.9",
start_permanent: Mix.env() == :prod,
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [coveralls: :test, "coveralls.json": :test, "coveralls.html": :test],
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:jason, "~> 1.2"},
{:excoveralls, "~> 0.13.3", only: :test},
{:credo, "~> 1.5.0-rc.5", only: [:dev, :test], runtime: false}
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
]
end
end
| 25.911765 | 94 | 0.584563 |
738be0875a58b776bb82b61b89dd9a0ab38be331 | 2,841 | ex | Elixir | lib/scrivener/headers.ex | Sardoan/scrivener_headers_json | 80a684b90ee59cf455864bf61f2d41fc9ee4bba3 | [
"MIT"
] | null | null | null | lib/scrivener/headers.ex | Sardoan/scrivener_headers_json | 80a684b90ee59cf455864bf61f2d41fc9ee4bba3 | [
"MIT"
] | null | null | null | lib/scrivener/headers.ex | Sardoan/scrivener_headers_json | 80a684b90ee59cf455864bf61f2d41fc9ee4bba3 | [
"MIT"
] | null | null | null | defmodule Scrivener.Headers do
@moduledoc """
Helpers for paginating API responses with [Scrivener](https://github.com/drewolson/scrivener) and HTTP headers. Implements [RFC-5988](https://mnot.github.io/I-D/rfc5988bis/), the proposed standard for Web linking.
Use `paginate/2` to set the pagination headers:
def index(conn, params) do
page = MyApp.Person
|> where([p], p.age > 30)
|> order_by([p], desc: p.age)
|> preload(:friends)
|> MyApp.Repo.paginate(params)
conn
|> Scrivener.Headers.paginate(page)
|> render("index.json", people: page.entries)
end
"""
import Plug.Conn, only: [put_resp_header: 3]
@doc """
Add HTTP headers for a `Scrivener.Page`.
"""
@spec paginate(Plug.Conn.t, Scrivener.Page.t) :: Plug.Conn.t
def paginate(conn, page) do
uri = %URI{scheme: Atom.to_string(conn.scheme),
host: conn.host,
port: conn.port,
path: conn.request_path,
query: conn.query_string}
conn
|> put_resp_header(Application.get_env(:scrivener_headers_json, :link), build_link_header(uri, page))
|> put_resp_header(Application.get_env(:scrivener_headers_json, :total), Integer.to_string(page.total_entries))
|> put_resp_header(Application.get_env(:scrivener_headers_json, :per_page), Integer.to_string(page.page_size))
|> put_resp_header(Application.get_env(:scrivener_headers_json, :total_pages), Integer.to_string(page.total_pages))
|> put_resp_header(Application.get_env(:scrivener_headers_json, :page_number), Integer.to_string(page.page_number))
end
@spec build_link_header(URI.t, Scrivener.Page.t) :: String.t
defp build_link_header(uri, page) do
map = %{}
map
|> Map.put("first", link_str(uri, 1))
|> Map.put("last", link_str(uri, page.total_pages))
|> maybe_add_prev(uri, page.page_number, page.total_pages)
|> maybe_add_next(uri, page.page_number, page.total_pages)
|> Poison.encode!
end
defp link_str(%{query: req_query} = uri, page_number) do
query =
req_query
|> URI.decode_query()
|> Map.put("page", page_number)
|> URI.encode_query()
uri_str =
%URI{uri | query: query}
|> URI.to_string()
uri_str
end
defp maybe_add_prev(links, uri, page_number, total_pages) when 1 < page_number and page_number <= total_pages do
Map.put(links, "prev", link_str(uri, page_number - 1))
end
defp maybe_add_prev(links, _uri, _page_number, _total_pages) do
links
end
defp maybe_add_next(links, uri, page_number, total_pages) when 1 <= page_number and page_number < total_pages do
Map.put(links, "next", link_str(uri, page_number + 1))
end
defp maybe_add_next(links, _uri, _page_number, _total_pages) do
links
end
end
| 36.896104 | 215 | 0.669835 |
738bedb9bb3fb84cddd85b09393c86754f8d2982 | 11,136 | exs | Elixir | test/swoosh/adapters/socket_labs_test.exs | Deepwalker/swoosh | 5970e1a20e5d787347ce825f4803e972ddc30095 | [
"MIT"
] | 1 | 2020-12-22T19:28:30.000Z | 2020-12-22T19:28:30.000Z | test/swoosh/adapters/socket_labs_test.exs | Deepwalker/swoosh | 5970e1a20e5d787347ce825f4803e972ddc30095 | [
"MIT"
] | 21 | 2021-03-08T10:04:20.000Z | 2022-03-23T10:20:17.000Z | test/swoosh/adapters/socket_labs_test.exs | nash-io/swoosh | 05c8676890da07403225c302f9a069fc7d221330 | [
"MIT"
] | 1 | 2019-11-05T19:06:55.000Z | 2019-11-05T19:06:55.000Z | defmodule Swoosh.Adapters.SocketLabsTest do
use Swoosh.AdapterCase, async: true
import Swoosh.Email
alias Swoosh.Adapters.SocketLabs
@success_response """
{
"ErrorCode": "Success",
"MessageResults": [],
"TransactionReceipt": null
}
"""
setup do
bypass = Bypass.open()
config = [base_url: "http://localhost:#{bypass.port}", server_id: "1234", api_key: "some_key"]
valid_email =
new()
|> from("[email protected]")
|> to("[email protected]")
|> subject("Hello, Avengers!")
|> html_body("<h1>Hello</h1>")
{:ok, bypass: bypass, valid_email: valid_email, config: config}
end
test "a sent email results in :ok", %{bypass: bypass, config: config, valid_email: email} do
Bypass.expect(bypass, fn conn ->
conn = parse(conn)
body_params = %{
"APIKey" => "some_key",
"Messages" => [
%{
"From" => %{
"emailAddress" => "[email protected]"
},
"HtmlBody" => "<h1>Hello</h1>",
"Subject" => "Hello, Avengers!",
"To" => [
%{
"emailAddress" => "[email protected]"
}
]
}
],
"serverId" => "1234"
}
assert body_params == conn.body_params
assert "/email" == conn.request_path
assert "POST" == conn.method
Plug.Conn.resp(conn, 200, @success_response)
end)
assert SocketLabs.deliver(email, config) ==
{:ok, %{response_code: "Success", message_results: [], receipt: nil}}
end
test "delivery/1 with all fields returns :ok", %{bypass: bypass, config: config} do
email =
new()
|> from({"T Stark", "[email protected]"})
|> to("[email protected]")
|> to({"Steve Rogers", "[email protected]"})
|> subject("Hello, Avengers!")
|> html_body("<h1>Hello</h1>")
|> cc({"Bruce Banner", "[email protected]"})
|> cc("[email protected]")
|> bcc({"Clinton Francis Barton", "[email protected]"})
|> bcc("[email protected]")
|> reply_to("[email protected]")
|> html_body("<h1>Hello</h1>")
|> text_body("Hello")
Bypass.expect(bypass, fn conn ->
conn = parse(conn)
body_params = %{
"APIKey" => "some_key",
"Messages" => [
%{
"BCC" => [
%{"emailAddress" => "[email protected]"},
%{
"emailAddress" => "[email protected]",
"friendlyName" => "Clinton Francis Barton"
}
],
"CC" => [
%{"emailAddress" => "[email protected]"},
%{
"emailAddress" => "[email protected]",
"friendlyName" => "Bruce Banner"
}
],
"From" => %{
"emailAddress" => "[email protected]",
"friendlyName" => "T Stark"
},
"HtmlBody" => "<h1>Hello</h1>",
"ReplyTo" => %{"emailAddress" => "[email protected]"},
"Subject" => "Hello, Avengers!",
"TextBody" => "Hello",
"To" => [
%{
"emailAddress" => "[email protected]",
"friendlyName" => "Steve Rogers"
},
%{"emailAddress" => "[email protected]"}
]
}
],
"serverId" => "1234"
}
assert body_params == conn.body_params
assert "/email" == conn.request_path
assert "POST" == conn.method
Plug.Conn.resp(conn, 200, @success_response)
end)
assert SocketLabs.deliver(email, config) ==
{:ok, %{response_code: "Success", message_results: [], receipt: nil}}
end
test "delivery/1 with api template field returns :ok", %{bypass: bypass, config: config} do
email =
new()
|> from({"T Stark", "[email protected]"})
|> to("[email protected]")
|> put_provider_option(:api_template, "12345")
Bypass.expect(bypass, fn conn ->
conn = parse(conn)
body_params = %{
"APIKey" => "some_key",
"Messages" => [
%{
"ApiTemplate" => "12345",
"From" => %{
"emailAddress" => "[email protected]",
"friendlyName" => "T Stark"
},
"Subject" => "",
"To" => [
%{
"emailAddress" => "[email protected]"
}
]
}
],
"serverId" => "1234"
}
assert body_params == conn.body_params
assert "/email" == conn.request_path
assert "POST" == conn.method
Plug.Conn.resp(conn, 200, @success_response)
end)
assert SocketLabs.deliver(email, config) ==
{:ok, %{response_code: "Success", message_results: [], receipt: nil}}
end
test "delivery/1 with message id field returns :ok", %{bypass: bypass, config: config} do
email =
new()
|> from({"T Stark", "[email protected]"})
|> to("[email protected]")
|> put_provider_option(:message_id, "12345")
Bypass.expect(bypass, fn conn ->
conn = parse(conn)
body_params = %{
"APIKey" => "some_key",
"Messages" => [
%{
"From" => %{
"emailAddress" => "[email protected]",
"friendlyName" => "T Stark"
},
"MessageId" => "12345",
"Subject" => "",
"To" => [
%{
"emailAddress" => "[email protected]"
}
]
}
],
"serverId" => "1234"
}
assert body_params == conn.body_params
assert "/email" == conn.request_path
assert "POST" == conn.method
Plug.Conn.resp(conn, 200, @success_response)
end)
assert SocketLabs.deliver(email, config) ==
{:ok, %{response_code: "Success", message_results: [], receipt: nil}}
end
test "delivery/1 with mailing id field returns :ok", %{bypass: bypass, config: config} do
email =
new()
|> from({"T Stark", "[email protected]"})
|> to("[email protected]")
|> put_provider_option(:mailing_id, "12345")
Bypass.expect(bypass, fn conn ->
conn = parse(conn)
body_params = %{
"APIKey" => "some_key",
"Messages" => [
%{
"From" => %{
"emailAddress" => "[email protected]",
"friendlyName" => "T Stark"
},
"MailingId" => "12345",
"Subject" => "",
"To" => [
%{
"emailAddress" => "[email protected]"
}
]
}
],
"serverId" => "1234"
}
assert body_params == conn.body_params
assert "/email" == conn.request_path
assert "POST" == conn.method
Plug.Conn.resp(conn, 200, @success_response)
end)
assert SocketLabs.deliver(email, config) ==
{:ok, %{response_code: "Success", message_results: [], receipt: nil}}
end
test "delivery/1 with charset field returns :ok", %{bypass: bypass, config: config} do
email =
new()
|> from({"T Stark", "[email protected]"})
|> to("[email protected]")
|> put_provider_option(:charset, "12345")
Bypass.expect(bypass, fn conn ->
conn = parse(conn)
body_params = %{
"APIKey" => "some_key",
"Messages" => [
%{
"Charset" => "12345",
"From" => %{
"emailAddress" => "[email protected]",
"friendlyName" => "T Stark"
},
"Subject" => "",
"To" => [
%{
"emailAddress" => "[email protected]"
}
]
}
],
"serverId" => "1234"
}
assert body_params == conn.body_params
assert "/email" == conn.request_path
assert "POST" == conn.method
Plug.Conn.resp(conn, 200, @success_response)
end)
assert SocketLabs.deliver(email, config) ==
{:ok, %{response_code: "Success", message_results: [], receipt: nil}}
end
test "delivery/1 with headers returns :ok", %{bypass: bypass, config: config} do
email =
new()
|> from({"T Stark", "[email protected]"})
|> to("[email protected]")
|> header("Header1", "1234567890")
|> header("Header2", "12345")
Bypass.expect(bypass, fn conn ->
conn = parse(conn)
body_params = %{
"APIKey" => "some_key",
"Messages" => [
%{
"CustomHeaders" => %{
"Header1" => "1234567890",
"Header2" => "12345"
},
"From" => %{
"emailAddress" => "[email protected]",
"friendlyName" => "T Stark"
},
"Subject" => "",
"To" => [
%{
"emailAddress" => "[email protected]"
}
]
}
],
"serverId" => "1234"
}
assert body_params == conn.body_params
assert "/email" == conn.request_path
assert "POST" == conn.method
Plug.Conn.resp(conn, 200, @success_response)
end)
assert SocketLabs.deliver(email, config) ==
{:ok, %{response_code: "Success", message_results: [], receipt: nil}}
end
test "delivery/1 with merge data returns :ok", %{bypass: bypass, config: config} do
email =
new()
|> from({"T Stark", "[email protected]"})
|> to("[email protected]")
|> put_provider_option(:merge_data, %{
"PerMessage" => %{
"per_message1" => "value1",
"per_message2" => "value2"
},
"Global" => %{
"global1" => "value1",
"global2" => "value2"
}
})
Bypass.expect(bypass, fn conn ->
conn = parse(conn)
body_params = %{
"APIKey" => "some_key",
"Messages" => [
%{
"From" => %{
"emailAddress" => "[email protected]",
"friendlyName" => "T Stark"
},
"MergeData" => %{
"PerMessage" => %{"per_message1" => "value1", "per_message2" => "value2"},
"Global" => %{"global1" => "value1", "global2" => "value2"}
},
"Subject" => "",
"To" => [
%{
"emailAddress" => "[email protected]"
}
]
}
],
"serverId" => "1234"
}
assert body_params == conn.body_params
assert "/email" == conn.request_path
assert "POST" == conn.method
Plug.Conn.resp(conn, 200, @success_response)
end)
assert SocketLabs.deliver(email, config) ==
{:ok, %{response_code: "Success", message_results: [], receipt: nil}}
end
end
| 28.335878 | 98 | 0.485004 |
738c01cbc5d7cc762685121764945f5c2467aa5b | 6,027 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/api/operating_system_versions.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/api/operating_system_versions.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/api/operating_system_versions.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.DFAReporting.V33.Api.OperatingSystemVersions do
@moduledoc """
API calls for all endpoints tagged `OperatingSystemVersions`.
"""
alias GoogleApi.DFAReporting.V33.Connection
alias GoogleApi.Gax.{Request, Response}
@doc """
Gets one operating system version by ID.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V33.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `id` (*type:* `String.t`) - Operating system version ID.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V33.Model.OperatingSystemVersion{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_operating_system_versions_get(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.DFAReporting.V33.Model.OperatingSystemVersion.t()}
| {:error, Tesla.Env.t()}
def dfareporting_operating_system_versions_get(
connection,
profile_id,
id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url(
"/dfareporting/v3.3/userprofiles/{profileId}/operatingSystemVersions/{id}",
%{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1),
"id" => URI.encode(id, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.DFAReporting.V33.Model.OperatingSystemVersion{}]
)
end
@doc """
Retrieves a list of operating system versions.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V33.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V33.Model.OperatingSystemVersionsListResponse{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_operating_system_versions_list(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.DFAReporting.V33.Model.OperatingSystemVersionsListResponse.t()}
| {:error, Tesla.Env.t()}
def dfareporting_operating_system_versions_list(
connection,
profile_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/dfareporting/v3.3/userprofiles/{profileId}/operatingSystemVersions", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.DFAReporting.V33.Model.OperatingSystemVersionsListResponse{}]
)
end
end
| 38.883871 | 187 | 0.640285 |
738c1aa5e6e8a48c831d9ab777555576e2d489ae | 66 | exs | Elixir | 2020/otp/dash/test/test_helper.exs | herminiotorres/programmer_passport | d1786518a3a5f82471457e0ace41c4c33343739a | [
"MIT"
] | null | null | null | 2020/otp/dash/test/test_helper.exs | herminiotorres/programmer_passport | d1786518a3a5f82471457e0ace41c4c33343739a | [
"MIT"
] | null | null | null | 2020/otp/dash/test/test_helper.exs | herminiotorres/programmer_passport | d1786518a3a5f82471457e0ace41c4c33343739a | [
"MIT"
] | null | null | null | ExUnit.start()
Ecto.Adapters.SQL.Sandbox.mode(Dash.Repo, :manual)
| 22 | 50 | 0.772727 |
738c25f342242810ce1cc8356a624719718e5bab | 1,764 | exs | Elixir | apps/admin_api/test/admin_api/v1/views/account_membership_view_test.exs | jimpeebles/ewallet | ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405 | [
"Apache-2.0"
] | null | null | null | apps/admin_api/test/admin_api/v1/views/account_membership_view_test.exs | jimpeebles/ewallet | ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405 | [
"Apache-2.0"
] | null | null | null | apps/admin_api/test/admin_api/v1/views/account_membership_view_test.exs | jimpeebles/ewallet | ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule AdminAPI.V1.AccountMembershipViewTest do
use AdminAPI.ViewCase, :v1
alias AdminAPI.V1.AccountMembershipView
alias EWallet.Web.V1.MembershipSerializer
alias EWalletDB.Repo
describe "AccountMembershipView.render/2" do
test "renders memberships.json with users response" do
membership1 = :membership |> insert() |> Repo.preload([:user, :role])
membership2 = :membership |> insert() |> Repo.preload([:user, :role])
memberships = [membership1, membership2]
expected = %{
version: @expected_version,
success: true,
data: %{
object: "list",
data: [
MembershipSerializer.serialize(membership1),
MembershipSerializer.serialize(membership2)
]
}
}
assert AccountMembershipView.render("memberships.json", %{memberships: memberships}) ==
expected
end
test "renders empty.json correctly" do
assert AccountMembershipView.render("empty.json", %{success: true}) ==
%{
version: @expected_version,
success: true,
data: %{}
}
end
end
end
| 33.283019 | 93 | 0.657596 |
738c7099b1c12f13e9ca26fcc68d712b65ca8cdd | 956 | ex | Elixir | test/support/properties_test_components.ex | darraghenright/surface | 6a6b1699f2f04c4f45686d96d48e33ab521423c9 | [
"MIT"
] | null | null | null | test/support/properties_test_components.ex | darraghenright/surface | 6a6b1699f2f04c4f45686d96d48e33ab521423c9 | [
"MIT"
] | null | null | null | test/support/properties_test_components.ex | darraghenright/surface | 6a6b1699f2f04c4f45686d96d48e33ab521423c9 | [
"MIT"
] | null | null | null | defmodule Surface.PropertiesTest.Components do
defmodule MyComponent do
use Surface.Component
@doc "The label"
property label, :string, required: true, default: ""
@doc "The class"
property class, :css_class
def render(assigns) do
~H"""
<div />
"""
end
end
defmodule MyComponentWithModuledoc do
use Surface.Component
@moduledoc """
My component with @moduledoc
"""
@doc "The label"
property label, :string, required: true, default: ""
@doc "The class"
property class, :css_class
def render(assigns) do
~H"""
<div />
"""
end
end
defmodule MyComponentWithModuledocFalse do
use Surface.Component
@moduledoc false
@doc "The label"
property label, :string, required: true, default: ""
@doc "The class"
property class, :css_class
def render(assigns) do
~H"""
<div />
"""
end
end
end
| 17.071429 | 56 | 0.60251 |
738c898e8c96747c9414c25ec81bdea5a4b938eb | 2,264 | exs | Elixir | test/ex_aws/instance_meta_test.exs | sivsushruth/ex_aws | 45a6fa6b6ed539066d1a5be0b04ef43d8305c4d0 | [
"MIT"
] | null | null | null | test/ex_aws/instance_meta_test.exs | sivsushruth/ex_aws | 45a6fa6b6ed539066d1a5be0b04ef43d8305c4d0 | [
"MIT"
] | null | null | null | test/ex_aws/instance_meta_test.exs | sivsushruth/ex_aws | 45a6fa6b6ed539066d1a5be0b04ef43d8305c4d0 | [
"MIT"
] | null | null | null | defmodule ExAws.InstanceMetaTest do
use ExUnit.Case, async: false
import Mox
# Let expect statements apply to ExAws.InstanceMetaTokenProvider process as well
setup :set_mox_from_context
test "instance_role" do
role_name = "dummy-role"
ExAws.Request.HttpMock
|> expect(:request, fn :put, _url, _body, _headers, _opts ->
{:ok, %{status_code: 200, body: "dummy-token"}}
end)
|> expect(:request, fn _method, _url, _body, _headers, _opts ->
{:ok, %{status_code: 200, body: role_name}}
end)
config =
ExAws.Config.new(:s3,
http_client: ExAws.Request.HttpMock,
access_key_id: "dummy",
secret_access_key: "dummy",
# Don't cache the metadata token, so we can always expect a request to get the token
no_metadata_token_cache: true
)
assert ExAws.InstanceMeta.instance_role(config) == role_name
end
describe "metadata options" do
setup %{metadata_opts: metadata_opts} do
metadata_opts_old = Application.get_env(:ex_aws, :metadata, nil)
on_exit(fn ->
case metadata_opts_old do
nil ->
Application.delete_env(:ex_aws, :metadata)
_other ->
Application.put_env(:ex_aws, :metadata, metadata_opts_old)
end
end)
Application.put_env(:ex_aws, :metadata, metadata_opts)
end
@tag metadata_opts: [http_opts: [pool: :ex_aws_metadata]]
test "separate http opts for instance metadata" do
role_name = "dummy-role"
ExAws.Request.HttpMock
|> expect(:request, fn :put, _url, _body, _headers, opts ->
{:ok, %{status_code: 200, body: "dummy-token"}}
end)
|> expect(:request, fn _method, _url, _body, _headers, opts ->
assert Keyword.get(opts, :pool) == :ex_aws_metadata
{:ok, %{status_code: 200, body: role_name}}
end)
config =
ExAws.Config.new(:ec2,
http_client: ExAws.Request.HttpMock,
access_key_id: "dummy",
secret_access_key: "dummy",
# Don't cache the metadata token, so we can always expect a request to get the token
no_metadata_token_cache: true
)
assert ExAws.InstanceMeta.instance_role(config) == role_name
end
end
end
| 30.186667 | 94 | 0.639134 |
738cd3bbe849a41dbb54206036efa962569938e0 | 152 | ex | Elixir | apps/snitch_api/lib/snitch_api_web/views/option_type_view.ex | Acrecio/avia | 54d264fc179b5b5f17d174854bdca063e1d935e9 | [
"MIT"
] | 456 | 2018-09-20T02:40:59.000Z | 2022-03-07T08:53:48.000Z | apps/snitch_api/lib/snitch_api_web/views/option_type_view.ex | Acrecio/avia | 54d264fc179b5b5f17d174854bdca063e1d935e9 | [
"MIT"
] | 273 | 2018-09-19T06:43:43.000Z | 2021-08-07T12:58:26.000Z | apps/snitch_api/lib/snitch_api_web/views/option_type_view.ex | Acrecio/avia | 54d264fc179b5b5f17d174854bdca063e1d935e9 | [
"MIT"
] | 122 | 2018-09-26T16:32:46.000Z | 2022-03-13T11:44:19.000Z | defmodule SnitchApiWeb.OptionTypeView do
use SnitchApiWeb, :view
use JaSerializer.PhoenixView
attributes([
:display_name,
:name
])
end
| 15.2 | 40 | 0.730263 |
738cd8e7c984198c3db8944bf69e81f0d624acbb | 1,646 | exs | Elixir | backend/config/config.exs | harlem88/edgehog | 7a278d119c3d592431fdbba406207376e194f7eb | [
"Apache-2.0"
] | null | null | null | backend/config/config.exs | harlem88/edgehog | 7a278d119c3d592431fdbba406207376e194f7eb | [
"Apache-2.0"
] | null | null | null | backend/config/config.exs | harlem88/edgehog | 7a278d119c3d592431fdbba406207376e194f7eb | [
"Apache-2.0"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
# General application configuration
import Config
config :edgehog,
ecto_repos: [Edgehog.Repo]
# Configures the endpoint
config :edgehog, EdgehogWeb.Endpoint,
url: [host: "localhost"],
render_errors: [view: EdgehogWeb.ErrorView, accepts: ~w(json), layout: false],
pubsub_server: Edgehog.PubSub,
live_view: [signing_salt: "aiSLZVyY"]
# Configures the mailer
#
# By default it uses the "Local" adapter which stores the emails
# locally. You can see the emails in your browser, at "/dev/mailbox".
#
# For production it's recommended to configure a different adapter
# at the `config/runtime.exs`.
config :edgehog, Edgehog.Mailer, adapter: Swoosh.Adapters.Local
# Swoosh API client is needed for adapters other than SMTP.
config :swoosh, :api_client, false
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Use Jason for JSON parsing in Phoenix
config :phoenix, :json_library, Jason
config :tesla, :adapter, {Tesla.Adapter.Finch, name: EdgehogFinch}
config :ex_aws,
json_codec: Jason
config :edgehog, EdgehogWeb.Auth.Token,
allowed_algos: [
"ES256",
"ES384",
"ES512",
"PS256",
"PS384",
"PS512",
"RS256",
"RS384",
"RS512"
]
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{config_env()}.exs"
| 26.983607 | 80 | 0.73147 |
738cfaf846876409570b9e1d39d6ccd9d8e10b66 | 64 | ex | Elixir | lib/web/views/setting_view.ex | coolandcodes/CodeSplintaCollector | fb1339022c23c11c336393b131dc6c1df4afdbd8 | [
"MIT"
] | null | null | null | lib/web/views/setting_view.ex | coolandcodes/CodeSplintaCollector | fb1339022c23c11c336393b131dc6c1df4afdbd8 | [
"MIT"
] | null | null | null | lib/web/views/setting_view.ex | coolandcodes/CodeSplintaCollector | fb1339022c23c11c336393b131dc6c1df4afdbd8 | [
"MIT"
] | null | null | null | defmodule TlcApp.Web.SettingView do
use TlcApp.Web, :view
end
| 16 | 35 | 0.78125 |
738d2a7dc6401009c9435729e5eddbd6a5554fbd | 1,836 | exs | Elixir | config/config.exs | danielhessell/wabanex | c0ad713a558a3cd52ca83bd22cd093e3c3c1b0f5 | [
"MIT"
] | null | null | null | config/config.exs | danielhessell/wabanex | c0ad713a558a3cd52ca83bd22cd093e3c3c1b0f5 | [
"MIT"
] | null | null | null | config/config.exs | danielhessell/wabanex | c0ad713a558a3cd52ca83bd22cd093e3c3c1b0f5 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
# General application configuration
import Config
config :wabanex,
ecto_repos: [Wabanex.Repo]
# To accept only UUID
config :wabanex, Wabanex.Repo,
migration_primary_key: [type: :binary_id],
migration_foreign_key: [type: :binary_id]
# Configures the endpoint
config :wabanex, WabanexWeb.Endpoint,
url: [host: "localhost"],
render_errors: [view: WabanexWeb.ErrorView, accepts: ~w(json), layout: false],
pubsub_server: Wabanex.PubSub,
live_view: [signing_salt: "Fz3CQTjZ"]
# Configures the mailer
#
# By default it uses the "Local" adapter which stores the emails
# locally. You can see the emails in your browser, at "/dev/mailbox".
#
# For production it's recommended to configure a different adapter
# at the `config/runtime.exs`.
config :wabanex, Wabanex.Mailer, adapter: Swoosh.Adapters.Local
# Swoosh API client is needed for adapters other than SMTP.
config :swoosh, :api_client, false
# Configure esbuild (the version is required)
config :esbuild,
version: "0.14.0",
default: [
args:
~w(js/app.js --bundle --target=es2017 --outdir=../priv/static/assets --external:/fonts/* --external:/images/*),
cd: Path.expand("../assets", __DIR__),
env: %{"NODE_PATH" => Path.expand("../deps", __DIR__)}
]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Use Jason for JSON parsing in Phoenix
config :phoenix, :json_library, Jason
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{config_env()}.exs"
| 31.655172 | 117 | 0.734205 |
738d2ff1b7bd78530119ad7ebc9e70cbf25f0321 | 242 | exs | Elixir | config/config.exs | hexedpackets/fault_tree | 340d08e8ea7733999531f79bdc712515053e0e85 | [
"Apache-2.0"
] | 3 | 2019-06-05T14:16:24.000Z | 2020-08-10T00:33:05.000Z | config/config.exs | hexedpackets/fault_tree | 340d08e8ea7733999531f79bdc712515053e0e85 | [
"Apache-2.0"
] | 15 | 2019-05-15T21:38:48.000Z | 2019-06-04T17:26:43.000Z | config/config.exs | hexedpackets/fault_tree | 340d08e8ea7733999531f79bdc712515053e0e85 | [
"Apache-2.0"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
config :trot, router: FaultTree.Router
config :logger, level: :debug
import_config "#{Mix.env()}.exs"
| 24.2 | 61 | 0.764463 |
738d6c5e85f28495fb86e1347ad19458499cc6cd | 917 | ex | Elixir | lib/changelog/transcripts/parser.ex | d-m-u/changelog.com | bb0d6ac6d29a3d64dbeb44892f9a8a1ff3ba6325 | [
"MIT"
] | 1 | 2021-01-06T18:21:45.000Z | 2021-01-06T18:21:45.000Z | lib/changelog/transcripts/parser.ex | codexn/changelog.com | 25ce501ee62eef76731c38d590667e8132096ba8 | [
"MIT"
] | null | null | null | lib/changelog/transcripts/parser.ex | codexn/changelog.com | 25ce501ee62eef76731c38d590667e8132096ba8 | [
"MIT"
] | null | null | null | defmodule Changelog.Transcripts.Parser do
@speaker_regex ~r{\*\*(.*?):\*\*}
def parse_text(string, participants \\ [])
def parse_text(string, participants) when is_nil(string), do: parse_text("", participants)
def parse_text(string, participants) do
@speaker_regex
|> Regex.split(string, include_captures: true, trim: true)
|> Enum.chunk_every(2)
|> Enum.map(fn tuple ->
[speaker_section, content_section] = tuple
speaker_name =
case Regex.run(@speaker_regex, speaker_section) do
[_, name] -> name
nil -> "Unknown"
end
speaker_id =
Enum.find_value(participants, fn x ->
if x.name == speaker_name do
x.id
end
end)
%{
"title" => speaker_name,
"person_id" => speaker_id,
"body" => String.trim(content_section)
}
end)
|> List.flatten()
end
end
| 25.472222 | 92 | 0.586696 |
738d7d7d0834a242a909afec85bd198c46ae7c1c | 1,192 | ex | Elixir | lib/time_tracking_web/channels/user_socket.ex | leifg/time_tracking | 115c53482398f5aa6a2dbb246bee14c9f075b767 | [
"MIT"
] | 6 | 2018-01-09T08:57:20.000Z | 2018-06-20T08:59:42.000Z | lib/time_tracking_web/channels/user_socket.ex | leifg/time_tracking | 115c53482398f5aa6a2dbb246bee14c9f075b767 | [
"MIT"
] | 80 | 2016-05-04T20:31:37.000Z | 2021-07-12T05:12:29.000Z | lib/time_tracking_web/channels/user_socket.ex | leifg/time_tracking | 115c53482398f5aa6a2dbb246bee14c9f075b767 | [
"MIT"
] | null | null | null | defmodule TimeTrackingWeb.UserSocket do
use Phoenix.Socket
## Channels
# channel "rooms:*", TimeTrackingWeb.RoomChannel
## Transports
transport(:websocket, Phoenix.Transports.WebSocket)
# transport :longpoll, Phoenix.Transports.LongPoll
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "users_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# TimeTrackingWeb.Endpoint.broadcast("users_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 31.368421 | 88 | 0.707215 |
738d9e333a70f3caffd0c6bc4c8f02d059589005 | 184 | exs | Elixir | test/util/words_test.exs | Celeo/gandalf_bot | f8e48b9a0b403fac5e29e514ce6c684d1bebeecc | [
"MIT"
] | null | null | null | test/util/words_test.exs | Celeo/gandalf_bot | f8e48b9a0b403fac5e29e514ce6c684d1bebeecc | [
"MIT"
] | null | null | null | test/util/words_test.exs | Celeo/gandalf_bot | f8e48b9a0b403fac5e29e514ce6c684d1bebeecc | [
"MIT"
] | null | null | null | defmodule Bot.Util.Words.Test do
alias Bot.Util.Words
use ExUnit.Case
test "can load words file" do
words = Words.load_words!()
assert length(words) > 100_000
end
end
| 18.4 | 34 | 0.701087 |
738dbe97c85df9b9b7835677df36ef085716f3a3 | 1,547 | ex | Elixir | fixtures/elixir_output/get_complex_url_params.ex | martinsirbe/curlconverter | c5324e85d2ca24ef4743fb2bb36139d23367e293 | [
"MIT"
] | 4,955 | 2015-01-02T09:04:20.000Z | 2021-10-06T03:54:43.000Z | fixtures/elixir_output/get_complex_url_params.ex | martinsirbe/curlconverter | c5324e85d2ca24ef4743fb2bb36139d23367e293 | [
"MIT"
] | 242 | 2015-03-27T05:59:11.000Z | 2021-10-03T08:36:05.000Z | fixtures/elixir_output/get_complex_url_params.ex | martinsirbe/curlconverter | c5324e85d2ca24ef4743fb2bb36139d23367e293 | [
"MIT"
] | 504 | 2015-01-02T16:04:36.000Z | 2021-10-01T03:43:55.000Z | request = %HTTPoison.Request{
method: :get,
url: "https://www.nomador.com/house-sitting/",
options: [],
headers: [],
params: [
{~s|page|, ~s|1|},
{~s|available|, ["", ~s|1|]},
{~s|location|, ~s|0|},
{~s|city[id]|, ~s|0|},
{~s|city[locality]|, ""},
{~s|city[locality_text]|, ""},
{~s|city[administrative_area_level_2]|, ""},
{~s|city[administrative_area_level_2_text]|, ""},
{~s|city[administrative_area_level_1]|, ""},
{~s|city[administrative_area_level_1_text]|, ""},
{~s|city[country]|, ""},
{~s|city[country_text]|, ""},
{~s|city[latitude]|, ""},
{~s|city[longitude]|, ""},
{~s|city[zoom]|, ""},
{~s|city[name]|, ""},
{~s|region[id]|, ~s|0|},
{~s|region[locality]|, ""},
{~s|region[locality_text]|, ""},
{~s|region[administrative_area_level_2]|, ""},
{~s|region[administrative_area_level_2_text]|, ""},
{~s|region[administrative_area_level_1]|, ""},
{~s|region[administrative_area_level_1_text]|, ""},
{~s|region[country]|, ""},
{~s|region[country_text]|, ""},
{~s|region[latitude]|, ""},
{~s|region[longitude]|, ""},
{~s|region[zoom]|, ""},
{~s|region[name]|, ""},
{~s|country|, ""},
{~s|environment|, ""},
{~s|population|, ""},
{~s|period|, ~s|0|},
{~s|date|, ~s|2017-03-03|},
{~s|datestart|, ~s|2017-03-03|},
{~s|dateend|, ~s|2017-06-24|},
{~s|season|, ""},
{~s|duration|, ""},
{~s|isfd|, ""},
{~s|stopover|, ""},
],
body: ""
}
response = HTTPoison.request(request)
| 29.75 | 55 | 0.51713 |
738e19733ab552c3ba598b379e74b56ca9128a4c | 4,323 | ex | Elixir | lib/prometheus_phx.ex | theblitzapp/prometheus-phx | 0234a2464be673d33fc3da0c44298d85e60f7af7 | [
"Apache-2.0"
] | 5 | 2020-09-19T22:29:41.000Z | 2021-03-28T03:17:00.000Z | lib/prometheus_phx.ex | theblitzapp/prometheus-phx | 0234a2464be673d33fc3da0c44298d85e60f7af7 | [
"Apache-2.0"
] | 1 | 2021-04-15T19:03:45.000Z | 2021-04-15T19:03:45.000Z | lib/prometheus_phx.ex | theblitzapp/prometheus-phx | 0234a2464be673d33fc3da0c44298d85e60f7af7 | [
"Apache-2.0"
] | 4 | 2020-10-29T15:28:08.000Z | 2021-12-15T17:44:31.000Z | defmodule PrometheusPhx do
@moduledoc """
Handle the telemetry messages broadcasted from Phoenix
To attach to the Phoenix telemetry messages call the `setup/0` function. Then the handlers will receive the messages and write to prometheus.
"""
use Prometheus.Metric
require Prometheus.Contrib.HTTP
alias Prometheus.Contrib.HTTP
@duration_unit :microseconds
def setup do
events = [
[:phoenix, :endpoint, :stop],
[:phoenix, :error_rendered],
[:phoenix, :channel_joined],
[:phoenix, :channel_handled_in]
]
:telemetry.attach_many(
"telemetry_web__event_handler",
events,
&handle_event/4,
nil
)
Histogram.declare(
name: :"phoenix_controller_call_duration_#{@duration_unit}",
help: "Whole controller pipeline execution time in #{@duration_unit}.",
labels: [:action, :controller, :status],
buckets: HTTP.microseconds_duration_buckets(),
duration_unit: @duration_unit,
registry: :default
)
Histogram.declare(
name: :"phoenix_controller_error_rendered_duration_#{@duration_unit}",
help: "View error rendering time in #{@duration_unit}.",
labels: [:action, :controller, :status],
buckets: HTTP.microseconds_duration_buckets(),
duration_unit: @duration_unit,
registry: :default
)
Histogram.declare(
name: :"phoenix_channel_join_duration_#{@duration_unit}",
help: "Phoenix channel join handler time in #{@duration_unit}",
labels: [:channel, :topic, :transport],
buckets: HTTP.microseconds_duration_buckets(),
duration_unit: @duration_unit,
registry: :default
)
Histogram.declare(
name: :"phoenix_channel_receive_duration_#{@duration_unit}",
help: "Phoenix channel receive handler time in #{@duration_unit}",
labels: [:channel, :topic, :transport, :event],
buckets: HTTP.microseconds_duration_buckets(),
duration_unit: @duration_unit,
registry: :default
)
end
def handle_event([:phoenix, :endpoint, :stop], %{duration: duration}, metadata, _config) do
with labels when is_list(labels) <- labels(metadata) do
Histogram.observe(
[
name: :"phoenix_controller_call_duration_#{@duration_unit}",
labels: labels,
registry: :default
],
duration
)
end
end
def handle_event([:phoenix, :error_rendered], %{duration: duration}, metadata, _config) do
with labels when is_list(labels) <- labels(metadata) do
Histogram.observe(
[
name: :"phoenix_controller_error_rendered_duration_#{@duration_unit}",
labels: labels,
registry: :default
],
duration
)
end
end
def handle_event([:phoenix, :channel_joined], %{duration: duration}, metadata, _config) do
with labels when is_list(labels) <- labels(metadata) do
Histogram.observe(
[
name: :"phoenix_channel_join_duration_#{@duration_unit}",
labels: labels,
registry: :default
],
duration
)
end
end
def handle_event(
[:phoenix, :channel_handled_in],
%{duration: duration},
metadata,
_config
) do
with labels when is_list(labels) <- labels(metadata) do
Histogram.observe(
[
name: :"phoenix_channel_receive_duration_#{@duration_unit}",
labels: labels,
registry: :default
],
duration
)
end
end
def labels(%{
status: status,
conn: %{private: %{phoenix_action: action, phoenix_controller: controller}}
}) do
[action, controller, status]
end
def labels(%{
conn: %{
status: status,
private: %{phoenix_action: action, phoenix_controller: controller}
}
}) do
[action, controller, status]
end
def labels(%{status: status, stacktrace: [{module, function, _, _} | _]}) do
[function, module, status]
end
def labels(%{event: event, socket: %{channel: channel, topic: topic, transport: transport}}) do
[channel, topic, transport, event]
end
def labels(%{socket: %{channel: channel, topic: topic, transport: transport}}) do
[channel, topic, transport]
end
def labels(_metadata), do: nil
end
| 28.254902 | 143 | 0.636826 |
738e1f5254fbdab4ee887a05071f0096b847dc24 | 2,123 | ex | Elixir | clients/apigee/lib/google_api/apigee/v1/model/google_iam_v1_set_iam_policy_request.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/apigee/lib/google_api/apigee/v1/model/google_iam_v1_set_iam_policy_request.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/apigee/lib/google_api/apigee/v1/model/google_iam_v1_set_iam_policy_request.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Apigee.V1.Model.GoogleIamV1SetIamPolicyRequest do
@moduledoc """
Request message for `SetIamPolicy` method.
## Attributes
* `policy` (*type:* `GoogleApi.Apigee.V1.Model.GoogleIamV1Policy.t`, *default:* `nil`) - REQUIRED: The complete policy to be applied to the `resource`. The size of
the policy is limited to a few 10s of KB. An empty policy is a
valid policy but certain Cloud Platform services (such as Projects)
might reject them.
* `updateMask` (*type:* `String.t`, *default:* `nil`) - OPTIONAL: A FieldMask specifying which fields of the policy to modify. Only
the fields in the mask will be modified. If no mask is provided, the
following default mask is used:
`paths: "bindings, etag"`
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:policy => GoogleApi.Apigee.V1.Model.GoogleIamV1Policy.t(),
:updateMask => String.t()
}
field(:policy, as: GoogleApi.Apigee.V1.Model.GoogleIamV1Policy)
field(:updateMask)
end
defimpl Poison.Decoder, for: GoogleApi.Apigee.V1.Model.GoogleIamV1SetIamPolicyRequest do
def decode(value, options) do
GoogleApi.Apigee.V1.Model.GoogleIamV1SetIamPolicyRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Apigee.V1.Model.GoogleIamV1SetIamPolicyRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.245614 | 167 | 0.732925 |
738e226e932e261bf4e832dd7db5c67af1479434 | 1,020 | ex | Elixir | test/process_managers/support/multi/todo_process_manager.ex | jccf091/commanded | 5d68a2b1b7a222b6f204c48d886f3d2c9670f26a | [
"MIT"
] | 1 | 2022-02-20T10:42:07.000Z | 2022-02-20T10:42:07.000Z | test/process_managers/support/multi/todo_process_manager.ex | jccf091/commanded | 5d68a2b1b7a222b6f204c48d886f3d2c9670f26a | [
"MIT"
] | null | null | null | test/process_managers/support/multi/todo_process_manager.ex | jccf091/commanded | 5d68a2b1b7a222b6f204c48d886f3d2c9670f26a | [
"MIT"
] | null | null | null | defmodule Commanded.ProcessManagers.TodoProcessManager do
@moduledoc false
alias Commanded.ProcessManagers.{TodoApp, TodoProcessManager}
use Commanded.ProcessManagers.ProcessManager,
application: TodoApp,
name: __MODULE__
@derive Jason.Encoder
defstruct [:todo_uuid]
alias Commanded.ProcessManagers.Todo.Commands.MarkDone
alias Commanded.ProcessManagers.Todo.Events.TodoCreated
alias Commanded.ProcessManagers.TodoList.Events.ListAllDone
def interested?(%TodoCreated{todo_uuid: todo_uuid}), do: {:start, todo_uuid}
def interested?(%ListAllDone{todo_uuids: todo_uuids}), do: {:continue, todo_uuids}
def handle(%TodoProcessManager{}, %TodoCreated{}), do: []
def handle(%TodoProcessManager{todo_uuid: todo_uuid}, %ListAllDone{}) do
%MarkDone{todo_uuid: todo_uuid}
end
def apply(%TodoProcessManager{} = state, %TodoCreated{todo_uuid: todo_uuid}) do
%TodoProcessManager{state | todo_uuid: todo_uuid}
end
def apply(%TodoProcessManager{} = state, _event), do: state
end
| 31.875 | 84 | 0.769608 |
738e2b1a578698935ad369cce137dd538a3e9328 | 559 | exs | Elixir | priv/repo/seeds.exs | eahanson/corex | 550020c5cbfc7dc828bc74e1edf0223c1cbffef1 | [
"MIT"
] | null | null | null | priv/repo/seeds.exs | eahanson/corex | 550020c5cbfc7dc828bc74e1edf0223c1cbffef1 | [
"MIT"
] | null | null | null | priv/repo/seeds.exs | eahanson/corex | 550020c5cbfc7dc828bc74e1edf0223c1cbffef1 | [
"MIT"
] | null | null | null | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# Corex.Repo.insert!(%Corex.SomeSchema{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
alias Corex.Accounts
%{email: "[email protected]", password: "password123", admin: true} |> Accounts.create_admin()
%{email: "[email protected]", password: "password123", admin: false} |> Accounts.create_user()
| 32.882353 | 94 | 0.710197 |
738e2e79bd469c951bd7f9c845c452316a66708f | 884 | ex | Elixir | clients/container/lib/google_api/container/v1/metadata.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/container/lib/google_api/container/v1/metadata.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/container/lib/google_api/container/v1/metadata.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Container.V1 do
@moduledoc """
API client metadata for GoogleApi.Container.V1.
"""
@discovery_revision "20220518"
def discovery_revision(), do: @discovery_revision
end
| 32.740741 | 74 | 0.75905 |
738e2f021d1dc35f9ac4d3a178a8a71aab80d718 | 1,786 | ex | Elixir | clients/drive/lib/google_api/drive/v3/model/comment_quoted_file_content.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/drive/lib/google_api/drive/v3/model/comment_quoted_file_content.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/drive/lib/google_api/drive/v3/model/comment_quoted_file_content.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Drive.V3.Model.CommentQuotedFileContent do
@moduledoc """
The file content to which the comment refers, typically within the anchor region. For a text file, for example, this would be the text at the location of the comment.
## Attributes
- mimeType (String.t): The MIME type of the quoted content. Defaults to: `null`.
- value (String.t): The quoted content itself. This is interpreted as plain text if set through the API. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:mimeType => any(),
:value => any()
}
field(:mimeType)
field(:value)
end
defimpl Poison.Decoder, for: GoogleApi.Drive.V3.Model.CommentQuotedFileContent do
def decode(value, options) do
GoogleApi.Drive.V3.Model.CommentQuotedFileContent.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Drive.V3.Model.CommentQuotedFileContent do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.019608 | 168 | 0.740761 |
738e3abbe08da4e730018b336d5d722519bcf5be | 203 | ex | Elixir | lib/phone/cr.ex | davidkovsky/phone | 83108ab1042efe62778c7363f5d02ef888883408 | [
"Apache-2.0"
] | 97 | 2016-04-05T13:08:41.000Z | 2021-12-25T13:08:34.000Z | lib/phone/cr.ex | davidkovsky/phone | 83108ab1042efe62778c7363f5d02ef888883408 | [
"Apache-2.0"
] | 70 | 2016-06-14T00:56:00.000Z | 2022-02-10T19:43:14.000Z | lib/phone/cr.ex | davidkovsky/phone | 83108ab1042efe62778c7363f5d02ef888883408 | [
"Apache-2.0"
] | 31 | 2016-04-21T22:26:12.000Z | 2022-01-24T21:40:00.000Z | defmodule Phone.CR do
@moduledoc false
use Helper.Country
def regex, do: ~r/^(506)()(.{8})/
def country, do: "Costa Rica"
def a2, do: "CR"
def a3, do: "CRI"
matcher(:regex, ["506"])
end
| 15.615385 | 35 | 0.596059 |
738e7d46d8fb02554a09708569b0b5641228c04d | 539 | exs | Elixir | test/sugar/templates/template_test.exs | pct/templates | 8a537eeca1db8c1c4877d76452a379ea4ec55bf5 | [
"MIT"
] | 7 | 2016-01-28T13:31:07.000Z | 2021-01-30T06:20:08.000Z | test/sugar/templates/template_test.exs | pct/templates | 8a537eeca1db8c1c4877d76452a379ea4ec55bf5 | [
"MIT"
] | 3 | 2015-05-08T07:05:17.000Z | 2015-08-23T19:03:33.000Z | test/sugar/templates/template_test.exs | pct/templates | 8a537eeca1db8c1c4877d76452a379ea4ec55bf5 | [
"MIT"
] | 8 | 2015-05-08T06:31:46.000Z | 2019-05-30T19:56:18.000Z | defmodule Sugar.Templates.TemplateTest do
use ExUnit.Case
test "__struct__" do
template = %Sugar.Templates.Template{
key: "main/index.html.eex",
source: "<%= 2 + 2 %>",
binary: nil,
engine: Sugar.Templates.Engines.EEx,
updated_at: {{2014, 05, 02}, {22, 41, 00}}
}
assert template.key === "main/index.html.eex"
assert template.source === "<%= 2 + 2 %>"
assert template.engine === Sugar.Templates.Engines.EEx
assert template.updated_at === {{2014, 05, 02}, {22, 41, 00}}
end
end
| 28.368421 | 65 | 0.61039 |
738e7f71fb73e2b89bc3ddc0c888712c5462c7e4 | 3,045 | ex | Elixir | lib/elixir/lib/macro/env.ex | knewter/elixir | 8310d62499e292d78d5c9d79d5d15a64e32fb738 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/macro/env.ex | knewter/elixir | 8310d62499e292d78d5c9d79d5d15a64e32fb738 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/macro/env.ex | knewter/elixir | 8310d62499e292d78d5c9d79d5d15a64e32fb738 | [
"Apache-2.0"
] | null | null | null | defmodule Macro.Env do
@moduledoc """
A record that holds compile time environment information.
The current environment can be accessed at any time as
`__ENV__`. Inside macros, the caller environment can be
accessed as `__CALLER__`. It contains the following fields:
* `module` - the current module name.
* `file` - the current file name as a binary
* `line` - the current line as an integer
* `function` - a tuple as `{ atom, integer` }, where the first
element is the function name and the seconds its arity. Returns
`nil` if not inside a function
* `aliases` - a list of two item tuples, where the first
item is the aliased name and the second the actual name
* `context` - the context of the environment. It can be nil
(default context), inside a guard or inside an assign
* `requires` - the list of required modules
* `functions` - a list of functions imported from each module
* `macros` - a list of macros imported from each module
* `context_modules` - a list of modules defined in the current context
* `macro_aliases` - a list of aliases defined inside the current macro
* `vars` - a list keeping all defined varaibles as { var, context }
"""
@type name_arity :: { atom, non_neg_integer }
@type file :: binary
@type line :: non_neg_integer
@type aliases :: [{ module, module }]
@type context :: :match | :guard | nil
@type requires :: [module]
@type functions :: [{ module, [name_arity] }]
@type macros :: [{ module, [name_arity] }]
@type context_modules :: [module]
@type vars :: [{ atom, atom }]
@type lexical_tracker :: pid
fields = [:module, :file, :line, :function, :aliases, :context, :requires, :functions,
:macros, :context_modules, :macro_aliases, :vars, :lexical_tracker]
types = quote do: [module: module, file: file, line: line,
function: name_arity, aliases: aliases, requires: requires,
functions: functions, macros: macros, context_modules: context_modules,
macro_aliases: aliases, vars: vars, lexical_tracker: lexical_tracker]
Record.deffunctions(fields, __MODULE__)
Record.deftypes(fields, types, __MODULE__)
@doc """
Returns a keyword list containing the file and line
information as keys.
"""
def location(record) do
[file: file(record), line: line(record)]
end
@doc """
Returns whether the compilation environment is currently
inside a guard.
"""
def in_guard?(record), do: context(record) == :guard
@doc """
Returns whether the compilation environment is currently
inside a match clause.
"""
def in_match?(record), do: context(record) == :match
@doc """
Returns the environment stacktrace.
"""
def stacktrace(record) do
cond do
nil?(record.module) ->
[{ :elixir_compiler, :__FILE__, 2, location(record) }]
nil?(record.function) ->
[{ module(record), :__MODULE__, 0, location(record) }]
true ->
{ name, arity } = record.function
[{ module(record), name, arity, location(record) }]
end
end
end
| 35.823529 | 88 | 0.677833 |
738e8b5d4502d34a4c2cab4e31c613bcf9f4d20f | 882 | ex | Elixir | test/support/conn_case.ex | empex2019liveview/namedb | fd01151dd293f9740204435fed524bc364f81069 | [
"MIT"
] | null | null | null | test/support/conn_case.ex | empex2019liveview/namedb | fd01151dd293f9740204435fed524bc364f81069 | [
"MIT"
] | null | null | null | test/support/conn_case.ex | empex2019liveview/namedb | fd01151dd293f9740204435fed524bc364f81069 | [
"MIT"
] | null | null | null | defmodule NamedbWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
alias NamedbWeb.Router.Helpers, as: Routes
# The default endpoint for testing
@endpoint NamedbWeb.Endpoint
end
end
setup _tags do
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 26.727273 | 59 | 0.727891 |
738e961e6de392c83845dd0d2f86473703963103 | 348 | exs | Elixir | backend/priv/repo/migrations/20170522185843_create_file.exs | ui-icts/aptamer-web | a28502c22a4e55ab1fbae8bbeaa6b11c9a477c06 | [
"MIT"
] | null | null | null | backend/priv/repo/migrations/20170522185843_create_file.exs | ui-icts/aptamer-web | a28502c22a4e55ab1fbae8bbeaa6b11c9a477c06 | [
"MIT"
] | 7 | 2019-02-08T18:28:49.000Z | 2022-02-12T06:44:59.000Z | backend/priv/repo/migrations/20170522185843_create_file.exs | ui-icts/aptamer-web | a28502c22a4e55ab1fbae8bbeaa6b11c9a477c06 | [
"MIT"
] | null | null | null | defmodule Aptamer.Repo.Migrations.CreateFile do
use Ecto.Migration
def change do
create table(:files, primary_key: false) do
add(:id, :binary_id, primary_key: true)
add(:file_name, :string)
add(:uploaded_on, :naive_datetime)
add(:file_purpose, :string)
add(:data, :bytea)
timestamps()
end
end
end
| 23.2 | 47 | 0.66092 |
738ed0e659d34242d54619d7f11b5788dcd319b1 | 159 | ex | Elixir | lib/web/views/admin/user_view.ex | yknx4/opencov | dc961a41e29b41b0657bc2a64bb67350a65477b8 | [
"MIT"
] | 8 | 2021-08-22T10:37:57.000Z | 2022-01-10T11:27:06.000Z | lib/web/views/admin/user_view.ex | yknx4/librecov | dc961a41e29b41b0657bc2a64bb67350a65477b8 | [
"MIT"
] | 109 | 2021-08-20T04:08:04.000Z | 2022-01-03T07:39:18.000Z | lib/web/views/admin/user_view.ex | Librecov/librecov | dc961a41e29b41b0657bc2a64bb67350a65477b8 | [
"MIT"
] | null | null | null | defmodule Librecov.Admin.UserView do
use Librecov.Web, :view
import Scrivener.HTML
alias Librecov.Helpers.Display
alias Librecov.Helpers.Datetime
end
| 19.875 | 36 | 0.798742 |
738ed4bf329b113f58bb1fdbb631320ea1d679a7 | 914 | exs | Elixir | test/bootstrap_form/textarea_test.exs | feliperenan/bootstrap_form | a12d0665973687bfefeee499f8581398a25ccf75 | [
"MIT"
] | 6 | 2019-02-07T00:37:24.000Z | 2021-05-29T23:37:32.000Z | test/bootstrap_form/textarea_test.exs | feliperenan/bootstrap_form | a12d0665973687bfefeee499f8581398a25ccf75 | [
"MIT"
] | 7 | 2019-02-07T00:19:39.000Z | 2019-11-04T17:01:50.000Z | test/bootstrap_form/textarea_test.exs | feliperenan/bootstrap_form | a12d0665973687bfefeee499f8581398a25ccf75 | [
"MIT"
] | 3 | 2019-10-30T13:49:44.000Z | 2021-09-26T23:45:02.000Z | defmodule BootstrapForm.TextareaTest do
use ExUnit.Case
alias BootstrapForm.Textarea
import Phoenix.HTML, only: [safe_to_string: 1]
doctest Textarea
describe "build/3" do
test "generates an textarea input" do
expected =
~s(<div class="form-group wrapper-class">) <>
~s(<label class="control-label" for="user_bio">Bio</label>) <>
~s(<textarea class="form-control" id="user_bio" name="user[bio]">\n</textarea>) <>
~s(</div>)
input = Textarea.build(:user, :bio, wrapper_html: [class: "wrapper-class"])
assert safe_to_string(input) == expected
end
test "textarea supports custom options" do
expected =
~s(<textarea class="form-control my-class" id="user_bio" name="user[bio]">\n</textarea>)
input = Textarea.build(:user, :bio, class: "my-class")
assert safe_to_string(input) =~ expected
end
end
end
| 27.69697 | 96 | 0.640044 |
738ef06b26fd817a431c2c6a39790a9535dd98a3 | 1,898 | exs | Elixir | test/test_router.exs | FarmBot-Labs/laughing-octo-telegram | 9d3d161362b17da82378c51a815f4c46e0a7a09d | [
"MIT"
] | 2 | 2016-09-18T03:06:37.000Z | 2016-12-26T23:55:40.000Z | test/test_router.exs | FarmBot-Labs/laughing-octo-telegram | 9d3d161362b17da82378c51a815f4c46e0a7a09d | [
"MIT"
] | 3 | 2016-09-30T08:38:15.000Z | 2016-10-01T19:26:10.000Z | test/test_router.exs | FarmBot-Labs/laughing-octo-telegram | 9d3d161362b17da82378c51a815f4c46e0a7a09d | [
"MIT"
] | 5 | 2016-09-30T17:13:42.000Z | 2021-01-06T17:58:15.000Z | defmodule TestRouter do
use Plug.Router
plug CORSPlug
plug Plug.Parsers, parsers: [:urlencoded, :json],
pass: ["text/*"],
json_decoder: Poison
plug :match
plug :dispatch
def test_key do
"-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzXxHfRyMjsl6s4RMn/T3\nRaKWax8wRhKfVkkrbE7uAtRMlRmvLMlOPGQTD6E+CrhqknGwFiXBy9hfhs9aPBPX\nhhZfI/2QZok4lxvIK7gQzYfF9E5VZWRbv7MjvyVWkqOf1Ab9jTOefvyZL39EgIrM\n9d1g5qPc/a4TBJnrJas1/IzfSZhvFCHYQ7SaONo6UqhkqP+JOOFBXfxYiWP02U1p\nQ253g8Vnu5LjQBQJHkIQQ3jZjQw1ArhP7BM09gINVjyU+igSL+64qH3D5/jjMswv\nd0z9hRA7uCoLQIcbVCfQXQRITCjbVmvM/P3NRuxUtARD/9ZHXokOg0DsnWC1ljpx\ncQIDAQAB\n-----END PUBLIC KEY-----\n"
end
def test_token do
"{\"token\":{\"unencoded\":{\"sub\":\"[email protected]\",\"iat\":1475157438,\"jti\":\"264e86bd-41ad-45df-a5ce-f9afbd951e10\",\"iss\":\"http://localhost:3000\",\"exp\":1475503038,\"mqtt\":\"192.168.29.154\",\"bot\":\"856b27df-65b5-4089-be55-c2c7aab17837\"},\"encoded\":\"eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJzdWIiOiJhZG1pbkBhZG1pbi5jb20iLCJpYXQiOjE0NzUxNTc0MzgsImp0aSI6IjI2NGU4NmJkLTQxYWQtNDVkZi1hNWNlLWY5YWZiZDk1MWUxMCIsImlzcyI6Imh0dHA6Ly9sb2NhbGhvc3Q6MzAwMCIsImV4cCI6MTQ3NTUwMzAzOCwibXF0dCI6IjE5Mi4xNjguMjkuMTU0IiwiYm90IjoiODU2YjI3ZGYtNjViNS00MDg5LWJlNTUtYzJjN2FhYjE3ODM3In0.h9j2X9WKuMvox491W2-GSpCB3OriH9BF60cxwbmst8Lo3XUbnP0wVmOCL6fQgvjJRWYGhYojrIjK5sLAeUyQ3SSh1PZrPwhBtw4eSnjCZ8iTRHur5TWui-9221k9JSpe5anYPn6fzkAM25-x1txf39T1M6ddf8UWmTNp7v-VW-byS2hqg3RWWllOzTE8GpVO5ZIdAr_ZnP8NpJxmQezlC45Vo6elnl5RzOho8xpX-OIeL2KNe3eO3cIcptSQ7kvl2Rlwha3tx2ahFOxBdRz9THj96I7rHXvWTqql7nuyvOkGMTFyUT2GeIw4vrghTgSLKoyC2jN9lJ7xgDaRr53cKw\"}}"
end
get "/api/public_key" do
send_resp(conn, 200, test_key)
end
post "/api/tokens" do
send_resp(conn, 200, test_token)
end
match _ do
send_resp(conn, 404, "Whatever you did could not be found.")
end
end
| 65.448276 | 930 | 0.810327 |
738f13df0ecd9adde230bb2b23a8f40ab2e52c0e | 15,985 | ex | Elixir | kousa/lib/kousa/room.ex | LeonardSSH/dogehouse | 584055ad407bc37fa35cdf36ebb271622e29d436 | [
"MIT"
] | 8 | 2021-06-30T07:02:52.000Z | 2021-08-30T18:58:38.000Z | kousa/lib/kousa/room.ex | LeonardSSH/dogehouse | 584055ad407bc37fa35cdf36ebb271622e29d436 | [
"MIT"
] | 12 | 2021-07-06T12:51:13.000Z | 2022-03-16T12:38:18.000Z | kousa/lib/kousa/room.ex | LeonardSSH/dogehouse | 584055ad407bc37fa35cdf36ebb271622e29d436 | [
"MIT"
] | 4 | 2021-07-15T20:33:50.000Z | 2022-03-27T12:46:47.000Z | defmodule Kousa.Room do
alias Kousa.Utils.VoiceServerUtils
alias Beef.Users
alias Beef.Follows
alias Beef.Rooms
# note the following 2 module aliases are on the chopping block!
alias Beef.RoomPermissions
alias Beef.RoomBlocks
alias Onion.PubSub
alias Onion.UserSession
alias Broth.SocketHandler
def set_auto_speaker(user_id, value) do
if room = Rooms.get_room_by_creator_id(user_id) do
Onion.RoomSession.set_auto_speaker(room.id, value)
end
end
@spec make_room_public(any, any) :: nil | :ok
def make_room_public(user_id, new_name) do
# this needs to be refactored if a user can have multiple rooms
case Beef.Rooms.set_room_privacy_by_creator_id(user_id, false, new_name) do
{1, [room]} ->
Onion.RoomSession.broadcast_ws(
room.id,
%{op: "room_privacy_change", d: %{roomId: room.id, name: room.name, isPrivate: false}}
)
_ ->
nil
end
end
@spec make_room_private(any, any) :: nil | :ok
def make_room_private(user_id, new_name) do
# this needs to be refactored if a user can have multiple rooms
case Rooms.set_room_privacy_by_creator_id(user_id, true, new_name) do
{1, [room]} ->
Onion.RoomSession.broadcast_ws(
room.id,
%{op: "room_privacy_change", d: %{roomId: room.id, name: room.name, isPrivate: true}}
)
_ ->
nil
end
end
def invite_to_room(user_id, user_id_to_invite) do
user = Beef.Users.get_by_id(user_id)
if user.currentRoomId && Follows.following_me?(user_id, user_id_to_invite) do
# @todo store room name in RoomSession to avoid db lookups
room = Rooms.get_room_by_id(user.currentRoomId)
if not is_nil(room) do
Onion.RoomSession.create_invite(
user.currentRoomId,
user_id_to_invite,
%{
roomName: room.name,
displayName: user.displayName,
username: user.username,
avatarUrl: user.avatarUrl,
bannerUrl: user.bannerUrl,
type: "invite"
}
)
end
end
end
defp internal_kick_from_room(user_id_to_kick, room_id) do
case UserSession.lookup(user_id_to_kick) do
[{_, _}] ->
ws_pid = UserSession.get(user_id_to_kick, :pid)
if ws_pid do
SocketHandler.unsub(ws_pid, "chat:" <> room_id)
end
_ ->
nil
end
current_room_id = Beef.Users.get_current_room_id(user_id_to_kick)
if current_room_id == room_id do
Rooms.kick_from_room(user_id_to_kick, current_room_id)
Onion.RoomSession.kick_from_room(current_room_id, user_id_to_kick)
end
end
@spec block_from_room(String.t(), String.t(), boolean()) ::
nil
| :ok
| {:askedToSpeak | :creator | :listener | :mod | nil | :speaker,
atom | %{:creatorId => any, optional(any) => any}}
def block_from_room(user_id, user_id_to_block_from_room, should_ban_ip \\ false) do
with {status, room} when status in [:creator, :mod] <-
Rooms.get_room_status(user_id) do
if room.creatorId != user_id_to_block_from_room do
RoomBlocks.upsert(%{
modId: user_id,
userId: user_id_to_block_from_room,
roomId: room.id,
ip: if(should_ban_ip, do: Users.get_ip(user_id_to_block_from_room), else: nil)
})
internal_kick_from_room(user_id_to_block_from_room, room.id)
end
end
end
###################################################################
## AUTH
@doc """
sets the authorization level of the user in the room that they're in.
This could be 'user', 'mod', or 'owner'.
Authorization to do so is pulled from the options `:by` keyword.
TODO: move room into the opts field, and have it be passed in by the
socket.
"""
def set_auth(user_id, auth, opts) do
room_id = Beef.Users.get_current_room_id(user_id)
case auth do
_ when is_nil(room_id) ->
:noop
:owner ->
set_owner(room_id, user_id, opts[:by])
:mod ->
set_mod(room_id, user_id, opts[:by])
:user ->
set_user(room_id, user_id, opts[:by])
end
end
####################################################################
# owner
def set_owner(room_id, user_id, setter_id) do
with {:creator, _} <- Rooms.get_room_status(setter_id), {1, _} <- Rooms.replace_room_owner(setter_id, user_id) do
Onion.RoomSession.set_room_creator_id(room_id, user_id)
internal_set_speaker(setter_id, room_id)
Onion.RoomSession.broadcast_ws(
room_id,
%{
op: "new_room_creator",
d: %{roomId: room_id, userId: user_id}
}
)
end
end
####################################################################
# mod
# only creators can set someone to be mod.
defp set_mod(room_id, user_id, setter_id) do
# TODO: refactor this to pull from preloads.
case Rooms.get_room_status(setter_id) do
{:creator, _} ->
RoomPermissions.set_is_mod(user_id, room_id, true)
Onion.Chat.set_can_chat(room_id, user_id)
Onion.RoomSession.broadcast_ws(
room_id,
%{
op: "mod_changed",
d: %{roomId: room_id, userId: user_id, isMod: true}
}
)
_ ->
:noop
end
end
####################################################################
# plain user
# mods can demote their own mod status.
defp set_user(room_id, user_id, user_id) do
case Rooms.get_room_status(user_id) do
{:mod, _} ->
RoomPermissions.set_is_mod(user_id, room_id, true)
Onion.RoomSession.broadcast_ws(
room_id,
%{
op: "mod_changed",
d: %{roomId: room_id, userId: user_id, isMod: false}
}
)
_ ->
:noop
end
end
# only creators can demote mods
defp set_user(room_id, user_id, setter_id) do
case Rooms.get_room_status(setter_id) do
{:creator, _} ->
RoomPermissions.set_is_mod(user_id, room_id, false)
Onion.RoomSession.broadcast_ws(
room_id,
%{
op: "mod_changed",
d: %{roomId: room_id, userId: user_id, isMod: false}
}
)
_ ->
:noop
end
end
####################################################################
## ROLE
@doc """
sets the role of the user in the room that they're in. Authorization
to do so is pulled from the options `:by` keyword.
TODO: move room into the opts field, and have it be passed in by the
socket.
"""
def set_role(user_id, role, opts) do
room_id = Beef.Users.get_current_room_id(user_id)
case role do
_ when is_nil(room_id) ->
:noop
:listener ->
set_listener(room_id, user_id, opts[:by])
:speaker ->
set_speaker(room_id, user_id, opts[:by])
:raised_hand ->
set_raised_hand(room_id, user_id, opts[:by])
end
end
####################################################################
## listener
defp set_listener(nil, _, _), do: :noop
# you are always allowed to set yourself as listener
defp set_listener(room_id, user_id, user_id) do
internal_set_listener(user_id, room_id)
end
defp set_listener(room_id, user_id, setter_id) do
# TODO: refactor this to be simpler. The list of
# creators and mods should be in the preloads of the room.
with {auth, _} <- Rooms.get_room_status(setter_id),
{role, _} <- Rooms.get_room_status(user_id) do
if auth == :creator or (auth == :mod and role not in [:creator, :mod]) do
internal_set_listener(user_id, room_id)
end
end
end
defp internal_set_listener(user_id, room_id) do
RoomPermissions.make_listener(user_id, room_id)
Onion.RoomSession.remove_speaker(room_id, user_id)
end
####################################################################
## speaker
defp set_speaker(nil, _, _), do: :noop
defp set_speaker(room_id, user_id, setter_id) do
if not RoomPermissions.asked_to_speak?(user_id, room_id) do
:noop
else
case Rooms.get_room_status(setter_id) do
{_, nil} ->
:noop
{:mod, _} ->
internal_set_speaker(user_id, room_id)
{:creator, _} ->
internal_set_speaker(user_id, room_id)
{_, _} ->
:noop
end
end
end
@spec internal_set_speaker(any, any) :: nil | :ok | {:err, {:error, :not_found}}
defp internal_set_speaker(user_id, room_id) do
case RoomPermissions.set_speaker(user_id, room_id, true) do
{:ok, _} ->
Onion.Chat.set_can_chat(room_id, user_id)
# kind of horrible to have to make a double genserver call
# here, we'll have to think about how this works (who owns muting)
Onion.RoomSession.add_speaker(
room_id,
user_id,
Onion.UserSession.get(user_id, :muted),
Onion.UserSession.get(user_id, :deafened)
)
err ->
{:err, err}
end
catch
_, _ ->
{:error, "room not found"}
end
# only you can raise your own hand
defp set_raised_hand(room_id, user_id, setter_id) do
if user_id == setter_id do
if Onion.RoomSession.get(room_id, :auto_speaker) do
internal_set_speaker(user_id, room_id)
else
case RoomPermissions.ask_to_speak(user_id, room_id) do
{:ok, %{isSpeaker: true}} ->
internal_set_speaker(user_id, room_id)
_ ->
Onion.RoomSession.broadcast_ws(
room_id,
%{
op: "hand_raised",
d: %{userId: user_id, roomId: room_id}
}
)
end
end
end
end
######################################################################
## UPDATE
def update(user_id, data) do
if room = Rooms.get_room_by_creator_id(user_id) do
case Rooms.edit(room.id, data) do
ok = {:ok, room} ->
Onion.RoomSession.broadcast_ws(room.id, %{
op: "new_room_details",
d: %{
name: room.name,
description: room.description,
chatThrottle: room.chatThrottle,
isPrivate: room.isPrivate,
roomId: room.id
}
})
ok
error = {:error, _} ->
error
end
end
end
def join_vc_room(user_id, room, speaker? \\ nil) do
speaker? =
if is_nil(speaker?),
do:
room.creatorId == user_id or
RoomPermissions.speaker?(user_id, room.id),
else: speaker?
op =
if speaker?,
do: "join-as-speaker",
else: "join-as-new-peer"
Onion.VoiceRabbit.send(room.voiceServerId, %{
op: op,
d: %{roomId: room.id, peerId: user_id},
uid: user_id
})
end
@spec create_room(
String.t(),
String.t(),
String.t(),
boolean(),
String.t() | nil,
boolean() | nil
) ::
{:error, any}
| {:ok, %{room: atom | %{:id => any, :voiceServerId => any, optional(any) => any}}}
def create_room(
user_id,
room_name,
room_description,
is_private,
user_id_to_invite \\ nil,
auto_speaker \\ nil
) do
room_id = Users.get_current_room_id(user_id)
if not is_nil(room_id) do
leave_room(user_id, room_id)
end
id = Ecto.UUID.generate()
case Rooms.create(%{
id: id,
name: room_name,
description: room_description,
creatorId: user_id,
numPeopleInside: 1,
voiceServerId: VoiceServerUtils.get_next_voice_server_id(),
isPrivate: is_private
}) do
{:ok, room} ->
Onion.RoomSession.start_supervised(
room_id: room.id,
voice_server_id: room.voiceServerId,
auto_speaker: auto_speaker,
chat_throttle: room.chatThrottle,
chat_mode: room.chatMode,
room_creator_id: room.creatorId
)
muted? = Onion.UserSession.get(user_id, :muted)
deafened? = Onion.UserSession.get(user_id, :deafened)
Onion.RoomSession.join_room(room.id, user_id, muted?, deafened?, no_fan: true)
Onion.VoiceRabbit.send(room.voiceServerId, %{
op: "create-room",
d: %{roomId: id},
uid: user_id
})
join_vc_room(user_id, room, true)
if not is_private do
Kousa.Follow.notify_followers_you_created_a_room(user_id, room)
end
if not is_nil(user_id_to_invite) do
# TODO: change this to Task.Supervised
Task.start(fn ->
Kousa.Room.invite_to_room(user_id, user_id_to_invite)
end)
end
# subscribe to this room's chat
Onion.PubSub.subscribe("chat:" <> id)
{:ok, %{room: room}}
{:error, x} ->
{:error, Kousa.Utils.Errors.changeset_to_first_err_message_with_field_name(x)}
end
end
# NB this function does not correctly return an updated room struct if the
# action is valid.
# NB2, this function has an non-idiomatic parameter order. room_id should
# come first.
def join_room(user_id, room_id) do
currentRoomId = Beef.Users.get_current_room_id(user_id)
if currentRoomId == room_id do
%{room: Rooms.get_room_by_id(room_id)}
else
case Rooms.can_join_room(room_id, user_id) do
{:error, message} ->
%{error: message}
{:ok, room} ->
# private rooms can now be joined by anyone who has the link
# they are functioning closer to an "unlisted" room
if currentRoomId do
leave_room(user_id, currentRoomId)
end
# subscribe to the new room chat
PubSub.subscribe("chat:" <> room_id)
updated_user = Rooms.join_room(room, user_id)
{muted, deafened} =
case Onion.UserSession.lookup(user_id) do
[{_, _}] ->
{
Onion.UserSession.get(user_id, :muted),
Onion.UserSession.get(user_id, :deafened)
}
_ ->
{false, false}
end
Onion.RoomSession.join_room(room_id, user_id, muted, deafened)
canSpeak =
case updated_user do
%{roomPermissions: %{isSpeaker: true}} -> true
_ -> false
end
join_vc_room(user_id, room, canSpeak || room.isPrivate)
%{room: room}
end
end
catch
_, _ ->
{:error, "that room doesn't exist"}
end
def leave_room(user_id, current_room_id \\ nil) do
current_room_id =
if is_nil(current_room_id),
do: Beef.Users.get_current_room_id(user_id),
else: current_room_id
if current_room_id do
case Rooms.leave_room(user_id, current_room_id) do
# the room should be destroyed
{:bye, room} ->
Onion.RoomSession.destroy(current_room_id, user_id)
Onion.VoiceRabbit.send(room.voiceServerId, %{
op: "destroy-room",
uid: user_id,
d: %{peerId: user_id, roomId: current_room_id}
})
# the room stays alive with new room creator
x ->
case x do
{:new_creator_id, creator_id} ->
Onion.RoomSession.broadcast_ws(
current_room_id,
%{op: "new_room_creator", d: %{roomId: current_room_id, userId: creator_id}}
)
_ ->
nil
end
Onion.RoomSession.leave_room(current_room_id, user_id)
end
# unsubscribe to the room chat
PubSub.unsubscribe("chat:" <> current_room_id)
{:ok, %{roomId: current_room_id}}
else
{:error, "you are not in a room"}
end
end
end
| 27.751736 | 117 | 0.572662 |
738f1bf1dd7e40c2bdb84fd137e5b968592ab852 | 3,195 | ex | Elixir | clients/sheets/lib/google_api/sheets/v4/model/pivot_value.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/sheets/lib/google_api/sheets/v4/model/pivot_value.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/sheets/lib/google_api/sheets/v4/model/pivot_value.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Sheets.V4.Model.PivotValue do
@moduledoc """
The definition of how a value in a pivot table should be calculated.
## Attributes
* `calculatedDisplayType` (*type:* `String.t`, *default:* `nil`) - If specified, indicates that pivot values should be displayed as the result of a calculation with another pivot value. For example, if calculated_display_type is specified as PERCENT_OF_GRAND_TOTAL, all the pivot values are displayed as the percentage of the grand total. In the Sheets editor, this is referred to as "Show As" in the value section of a pivot table.
* `dataSourceColumnReference` (*type:* `GoogleApi.Sheets.V4.Model.DataSourceColumnReference.t`, *default:* `nil`) - The reference to the data source column that this value reads from.
* `formula` (*type:* `String.t`, *default:* `nil`) - A custom formula to calculate the value. The formula must start with an `=` character.
* `name` (*type:* `String.t`, *default:* `nil`) - A name to use for the value.
* `sourceColumnOffset` (*type:* `integer()`, *default:* `nil`) - The column offset of the source range that this value reads from. For example, if the source was `C10:E15`, a `sourceColumnOffset` of `0` means this value refers to column `C`, whereas the offset `1` would refer to column `D`.
* `summarizeFunction` (*type:* `String.t`, *default:* `nil`) - A function to summarize the value. If formula is set, the only supported values are SUM and CUSTOM. If sourceColumnOffset is set, then `CUSTOM` is not supported.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:calculatedDisplayType => String.t() | nil,
:dataSourceColumnReference =>
GoogleApi.Sheets.V4.Model.DataSourceColumnReference.t() | nil,
:formula => String.t() | nil,
:name => String.t() | nil,
:sourceColumnOffset => integer() | nil,
:summarizeFunction => String.t() | nil
}
field(:calculatedDisplayType)
field(:dataSourceColumnReference, as: GoogleApi.Sheets.V4.Model.DataSourceColumnReference)
field(:formula)
field(:name)
field(:sourceColumnOffset)
field(:summarizeFunction)
end
defimpl Poison.Decoder, for: GoogleApi.Sheets.V4.Model.PivotValue do
def decode(value, options) do
GoogleApi.Sheets.V4.Model.PivotValue.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Sheets.V4.Model.PivotValue do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 50.714286 | 436 | 0.720814 |
738f4bb0fbff9df31996a0acfb9502fed1a3184f | 1,773 | ex | Elixir | lib/yummy_web/endpoint.ex | MatthieuSegret/yummy-phoenix | 85b490075e3a0395b4e7cfa9f06936659e9d12b5 | [
"MIT"
] | 5 | 2017-08-27T19:45:43.000Z | 2019-06-28T08:12:25.000Z | lib/yummy_web/endpoint.ex | MatthieuSegret/yummy-phoenix | 85b490075e3a0395b4e7cfa9f06936659e9d12b5 | [
"MIT"
] | null | null | null | lib/yummy_web/endpoint.ex | MatthieuSegret/yummy-phoenix | 85b490075e3a0395b4e7cfa9f06936659e9d12b5 | [
"MIT"
] | null | null | null | defmodule YummyWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :yummy
socket "/socket", YummyWeb.UserSocket
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phoenix.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/", from: :yummy, gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
if Mix.env == :dev do
plug Plug.Static,
at: "/uploads", from: Path.expand('./uploads'), gzip: false
end
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Logger
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Poison
plug Plug.MethodOverride
plug Plug.Head
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
plug Plug.Session,
store: :cookie,
key: "_yummy_key",
signing_salt: "yLY0n7bJ"
plug YummyWeb.Router
@doc """
Callback invoked for dynamically configuring the endpoint.
It receives the endpoint configuration and checks if
configuration should be loaded from the system environment.
"""
def init(_key, config) do
if config[:load_from_system_env] do
port = System.get_env("PORT") || raise "expected the PORT environment variable to be set"
{:ok, Keyword.put(config, :http, [:inet6, port: port])}
else
{:ok, config}
end
end
end
| 28.142857 | 95 | 0.697124 |
738f4c4860dd3e36e147b9dd11afd0c7730bb67e | 870 | ex | Elixir | lib/ex_money_web/models/budget.ex | van-mronov/ex_money | 39010f02fd822657e3b5694e08b872bd2ab72c26 | [
"0BSD"
] | 184 | 2015-11-23T20:51:50.000Z | 2022-03-30T01:01:39.000Z | lib/ex_money_web/models/budget.ex | van-mronov/ex_money | 39010f02fd822657e3b5694e08b872bd2ab72c26 | [
"0BSD"
] | 15 | 2015-11-26T16:00:20.000Z | 2018-05-25T20:13:39.000Z | lib/ex_money_web/models/budget.ex | van-mronov/ex_money | 39010f02fd822657e3b5694e08b872bd2ab72c26 | [
"0BSD"
] | 21 | 2015-11-26T21:34:40.000Z | 2022-03-26T02:56:42.000Z | defmodule ExMoney.Budget do
use ExMoney.Web, :model
alias ExMoney.Budget
schema "budgets" do
field :accounts, {:array, :integer}
field :items, :map
field :start_date, :date
field :end_date, :date
field :income, :decimal
field :goal, :decimal
field :expectation, :decimal
belongs_to :user, ExMoney.User
timestamps()
end
def changeset(model, params \\ %{}) do
model
|> cast(params, ~w(accounts items start_date end_date user_id income goal expectation)a)
|> validate_required(~w(accounts start_date end_date user_id)a)
end
def by_user_id(user_id) do
from bt in Budget,
where: bt.user_id == ^user_id
end
def current_by_user_id(user_id) do
from b in Budget,
where: b.user_id == ^user_id,
where: b.start_date <= ^Timex.local,
where: b.end_date >= ^Timex.local
end
end
| 22.894737 | 92 | 0.665517 |
738f60ca68985f0d0572650be0d9344f2fdec219 | 1,119 | exs | Elixir | 2017/elixir/day13/config/config.exs | zakora/elixir-aoc2017 | 216e92cef370081cc0792102e0b40dd3a518d8bf | [
"Unlicense"
] | null | null | null | 2017/elixir/day13/config/config.exs | zakora/elixir-aoc2017 | 216e92cef370081cc0792102e0b40dd3a518d8bf | [
"Unlicense"
] | null | null | null | 2017/elixir/day13/config/config.exs | zakora/elixir-aoc2017 | 216e92cef370081cc0792102e0b40dd3a518d8bf | [
"Unlicense"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :day13, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:day13, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.096774 | 73 | 0.75067 |
738f890855d381d674a3b2b0106f005c3b022c8e | 1,559 | ex | Elixir | hermes/test/support/model_case.ex | jparr721/Hermes | 0ce141897e292a6e4492461f6581f0619c43b8bf | [
"MIT"
] | null | null | null | hermes/test/support/model_case.ex | jparr721/Hermes | 0ce141897e292a6e4492461f6581f0619c43b8bf | [
"MIT"
] | null | null | null | hermes/test/support/model_case.ex | jparr721/Hermes | 0ce141897e292a6e4492461f6581f0619c43b8bf | [
"MIT"
] | null | null | null | defmodule Hermes.ModelCase do
@moduledoc """
This module defines the test case to be used by
model tests.
You may define functions here to be used as helpers in
your model tests. See `errors_on/2`'s definition as reference.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias Hermes.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Hermes.ModelCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Hermes.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Hermes.Repo, {:shared, self()})
end
:ok
end
@doc """
Helper for returning list of errors in a struct when given certain data.
## Examples
Given a User schema that lists `:name` as a required field and validates
`:password` to be safe, it would return:
iex> errors_on(%User{}, %{password: "password"})
[password: "is unsafe", name: "is blank"]
You could then write your assertion like:
assert {:password, "is unsafe"} in errors_on(%User{}, %{password: "password"})
"""
def errors_on(struct, data) do
struct.__struct__.changeset(struct, data)
|> Ecto.Changeset.traverse_errors(&Hermes.ErrorHelpers.translate_error/1)
|> Enum.flat_map(fn {key, errors} -> for msg <- errors, do: {key, msg} end)
end
end
| 26.423729 | 84 | 0.683772 |
738f8c07a94d3636e814cc7ecab51625c9d59251 | 462 | ex | Elixir | lib/rank/parsers/meta.ex | denispeplin/rank | f60eaad6ce9bae43a8ccbf9d0f667fb47f500863 | [
"MIT"
] | 58 | 2017-06-12T19:17:10.000Z | 2022-03-08T10:47:43.000Z | lib/rank/parsers/meta.ex | denispeplin/rank | f60eaad6ce9bae43a8ccbf9d0f667fb47f500863 | [
"MIT"
] | 2 | 2017-06-15T05:53:43.000Z | 2019-03-31T16:01:05.000Z | lib/rank/parsers/meta.ex | denispeplin/rank | f60eaad6ce9bae43a8ccbf9d0f667fb47f500863 | [
"MIT"
] | 5 | 2017-12-06T03:33:12.000Z | 2021-07-30T20:35:33.000Z | defmodule Rank.Parsers.Meta do
@moduledoc """
Parser for list of awesome lists
"""
require Logger
alias Rank.Parsers.Readme
@owner "sindresorhus"
@repo "awesome"
@doc """
Parse Awesome Meta
"""
def parse do
Logger.debug("Parsing meta")
Readme.parse(@owner, @repo)
|> Rank.Store.write_index
end
def path do
Path.join(@owner, @repo)
end
def is_meta?(owner, repo) do
owner == @owner && repo == @repo
end
end
| 15.931034 | 36 | 0.634199 |
738f90cfd703a2ad9b420c460b42c5f9651f37f2 | 30,430 | ex | Elixir | clients/recommender/lib/google_api/recommender/v1beta1/api/projects.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/recommender/lib/google_api/recommender/v1beta1/api/projects.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/recommender/lib/google_api/recommender/v1beta1/api/projects.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Recommender.V1beta1.Api.Projects do
@moduledoc """
API calls for all endpoints tagged `Projects`.
"""
alias GoogleApi.Recommender.V1beta1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Gets the requested insight. Requires the recommender.*.get IAM permission for the specified insight type.
## Parameters
* `connection` (*type:* `GoogleApi.Recommender.V1beta1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. Name of the insight.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1Insight{}}` on success
* `{:error, info}` on failure
"""
@spec recommender_projects_locations_insight_types_insights_get(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1Insight.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def recommender_projects_locations_insight_types_insights_get(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1beta1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++
[struct: %GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1Insight{}]
)
end
@doc """
Lists insights for a Cloud project. Requires the recommender.*.list IAM permission for the specified insight type.
## Parameters
* `connection` (*type:* `GoogleApi.Recommender.V1beta1.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. The container resource on which to execute the request. Acceptable formats: 1. "projects/[PROJECT_NUMBER]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]", LOCATION here refers to GCP Locations: https://cloud.google.com/about/locations/ INSIGHT_TYPE_ID refers to supported insight types: https://cloud.google.com/recommender/docs/insights/insight-types.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:filter` (*type:* `String.t`) - Optional. Filter expression to restrict the insights returned. Supported filter fields: state Eg: `state:"DISMISSED" or state:"ACTIVE"
* `:pageSize` (*type:* `integer()`) - Optional. The maximum number of results to return from this request. Non-positive values are ignored. If not specified, the server will determine the number of results to return.
* `:pageToken` (*type:* `String.t`) - Optional. If present, retrieves the next batch of results from the preceding call to this method. `page_token` must be the value of `next_page_token` from the previous response. The values of other method parameters must be identical to those in the previous call.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1ListInsightsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec recommender_projects_locations_insight_types_insights_list(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok,
GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1ListInsightsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def recommender_projects_locations_insight_types_insights_list(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:filter => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1beta1/{+parent}/insights", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++
[
struct:
%GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1ListInsightsResponse{}
]
)
end
@doc """
Marks the Insight State as Accepted. Users can use this method to indicate to the Recommender API that they have applied some action based on the insight. This stops the insight content from being updated. MarkInsightAccepted can be applied to insights in ACTIVE state. Requires the recommender.*.update IAM permission for the specified insight.
## Parameters
* `connection` (*type:* `GoogleApi.Recommender.V1beta1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. Name of the insight.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1MarkInsightAcceptedRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1Insight{}}` on success
* `{:error, info}` on failure
"""
@spec recommender_projects_locations_insight_types_insights_mark_accepted(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1Insight.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def recommender_projects_locations_insight_types_insights_mark_accepted(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1beta1/{+name}:markAccepted", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++
[struct: %GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1Insight{}]
)
end
@doc """
Gets the requested recommendation. Requires the recommender.*.get IAM permission for the specified recommender.
## Parameters
* `connection` (*type:* `GoogleApi.Recommender.V1beta1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. Name of the recommendation.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1Recommendation{}}` on success
* `{:error, info}` on failure
"""
@spec recommender_projects_locations_recommenders_recommendations_get(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok,
GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1Recommendation.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def recommender_projects_locations_recommenders_recommendations_get(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1beta1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++
[
struct:
%GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1Recommendation{}
]
)
end
@doc """
Lists recommendations for a Cloud project. Requires the recommender.*.list IAM permission for the specified recommender.
## Parameters
* `connection` (*type:* `GoogleApi.Recommender.V1beta1.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. The container resource on which to execute the request. Acceptable formats: 1. "projects/[PROJECT_NUMBER]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]", LOCATION here refers to GCP Locations: https://cloud.google.com/about/locations/ RECOMMENDER_ID refers to supported recommenders: https://cloud.google.com/recommender/docs/recommenders.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:filter` (*type:* `String.t`) - Filter expression to restrict the recommendations returned. Supported filter fields: state_info.state Eg: `state_info.state:"DISMISSED" or state_info.state:"FAILED"
* `:pageSize` (*type:* `integer()`) - Optional. The maximum number of results to return from this request. Non-positive values are ignored. If not specified, the server will determine the number of results to return.
* `:pageToken` (*type:* `String.t`) - Optional. If present, retrieves the next batch of results from the preceding call to this method. `page_token` must be the value of `next_page_token` from the previous response. The values of other method parameters must be identical to those in the previous call.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1ListRecommendationsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec recommender_projects_locations_recommenders_recommendations_list(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok,
GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1ListRecommendationsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def recommender_projects_locations_recommenders_recommendations_list(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:filter => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1beta1/{+parent}/recommendations", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++
[
struct:
%GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1ListRecommendationsResponse{}
]
)
end
@doc """
Marks the Recommendation State as Claimed. Users can use this method to indicate to the Recommender API that they are starting to apply the recommendation themselves. This stops the recommendation content from being updated. Associated insights are frozen and placed in the ACCEPTED state. MarkRecommendationClaimed can be applied to recommendations in CLAIMED or ACTIVE state. Requires the recommender.*.update IAM permission for the specified recommender.
## Parameters
* `connection` (*type:* `GoogleApi.Recommender.V1beta1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. Name of the recommendation.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1MarkRecommendationClaimedRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1Recommendation{}}` on success
* `{:error, info}` on failure
"""
@spec recommender_projects_locations_recommenders_recommendations_mark_claimed(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok,
GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1Recommendation.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def recommender_projects_locations_recommenders_recommendations_mark_claimed(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1beta1/{+name}:markClaimed", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++
[
struct:
%GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1Recommendation{}
]
)
end
@doc """
Marks the Recommendation State as Failed. Users can use this method to indicate to the Recommender API that they have applied the recommendation themselves, and the operation failed. This stops the recommendation content from being updated. Associated insights are frozen and placed in the ACCEPTED state. MarkRecommendationFailed can be applied to recommendations in ACTIVE, CLAIMED, SUCCEEDED, or FAILED state. Requires the recommender.*.update IAM permission for the specified recommender.
## Parameters
* `connection` (*type:* `GoogleApi.Recommender.V1beta1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. Name of the recommendation.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1MarkRecommendationFailedRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1Recommendation{}}` on success
* `{:error, info}` on failure
"""
@spec recommender_projects_locations_recommenders_recommendations_mark_failed(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok,
GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1Recommendation.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def recommender_projects_locations_recommenders_recommendations_mark_failed(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1beta1/{+name}:markFailed", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++
[
struct:
%GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1Recommendation{}
]
)
end
@doc """
Marks the Recommendation State as Succeeded. Users can use this method to indicate to the Recommender API that they have applied the recommendation themselves, and the operation was successful. This stops the recommendation content from being updated. Associated insights are frozen and placed in the ACCEPTED state. MarkRecommendationSucceeded can be applied to recommendations in ACTIVE, CLAIMED, SUCCEEDED, or FAILED state. Requires the recommender.*.update IAM permission for the specified recommender.
## Parameters
* `connection` (*type:* `GoogleApi.Recommender.V1beta1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. Name of the recommendation.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1MarkRecommendationSucceededRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1Recommendation{}}` on success
* `{:error, info}` on failure
"""
@spec recommender_projects_locations_recommenders_recommendations_mark_succeeded(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok,
GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1Recommendation.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def recommender_projects_locations_recommenders_recommendations_mark_succeeded(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1beta1/{+name}:markSucceeded", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++
[
struct:
%GoogleApi.Recommender.V1beta1.Model.GoogleCloudRecommenderV1beta1Recommendation{}
]
)
end
end
| 46.959877 | 508 | 0.639829 |
738f9292f36ca3f7aa336ab0dc64b8ac9f65ab7d | 1,769 | ex | Elixir | clients/logging/lib/google_api/logging/v2/model/list_locations_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/logging/lib/google_api/logging/v2/model/list_locations_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/logging/lib/google_api/logging/v2/model/list_locations_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Logging.V2.Model.ListLocationsResponse do
@moduledoc """
The response message for Locations.ListLocations.
## Attributes
* `locations` (*type:* `list(GoogleApi.Logging.V2.Model.Location.t)`, *default:* `nil`) - A list of locations that matches the specified filter in the request.
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - The standard List next-page token.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:locations => list(GoogleApi.Logging.V2.Model.Location.t()) | nil,
:nextPageToken => String.t() | nil
}
field(:locations, as: GoogleApi.Logging.V2.Model.Location, type: :list)
field(:nextPageToken)
end
defimpl Poison.Decoder, for: GoogleApi.Logging.V2.Model.ListLocationsResponse do
def decode(value, options) do
GoogleApi.Logging.V2.Model.ListLocationsResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Logging.V2.Model.ListLocationsResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.38 | 163 | 0.734313 |
738f93c0e54b757b87f5f31bc3c64ce157fa19ba | 8,235 | ex | Elixir | lib/sanbase/api_call_limit/ets.ex | santiment/sanbase2 | 9ef6e2dd1e377744a6d2bba570ea6bd477a1db31 | [
"MIT"
] | 81 | 2017-11-20T01:20:22.000Z | 2022-03-05T12:04:25.000Z | lib/sanbase/api_call_limit/ets.ex | santiment/sanbase2 | 9ef6e2dd1e377744a6d2bba570ea6bd477a1db31 | [
"MIT"
] | 359 | 2017-10-15T14:40:53.000Z | 2022-01-25T13:34:20.000Z | lib/sanbase/api_call_limit/ets.ex | santiment/sanbase2 | 9ef6e2dd1e377744a6d2bba570ea6bd477a1db31 | [
"MIT"
] | 16 | 2017-11-19T13:57:40.000Z | 2022-02-07T08:13:02.000Z | defmodule Sanbase.ApiCallLimit.ETS do
@moduledoc ~s"""
Track the API Call quotas (get and update) of the user and remote IPs.
The quota is fetched from the central database and the progress of using it is
tracked in-memory in an ETS table. When API calls are made, the progress is
updated in the ETS table until `quota` number of API calls are made. Then
the API calls count is updated in the central DB and a new quota is fetched.
"""
use GenServer
alias Sanbase.ApiCallLimit
alias Sanbase.Accounts.User
@type entity_type :: :remote_ip | :user
@type remote_ip :: String.t()
@type entity :: remote_ip | %User{}
@ets_table :api_call_limit_ets_table
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, opts, name: Keyword.get(opts, :name, __MODULE__))
end
@impl true
def init(_opts) do
ets_table =
:ets.new(@ets_table, [
:set,
:public,
:named_table,
read_concurrency: true,
write_concurrency: true
])
{:ok, %{ets_table: ets_table}}
end
def clear_all(), do: :ets.delete_all_objects(@ets_table)
def clear_data(:user, %User{id: user_id}), do: :ets.delete(@ets_table, user_id)
def clear_data(:remote_ip, remote_ip), do: :ets.delete(@ets_table, remote_ip)
@doc ~s"""
Get a quota that represent the number of API calls that can be made and tracked
in-memory in an ETS table before checking the postgres database again.
A special case is when the authentication is Basic Authentication. It is used
exclusievly from internal services and there will be no limit imposed.
"""
@spec get_quota(entity_type, entity, atom()) ::
{:ok, :infinity} | {:ok, map()} | {:error, map()}
def get_quota(_type, _entity, :basic), do: {:ok, %{quota: :infinity}}
def get_quota(:user, %User{} = user, _auth_method), do: do_get_quota(:user, user, user.id)
def get_quota(:remote_ip, ip, _auth_method), do: do_get_quota(:remote_ip, ip, ip)
@doc ~s"""
Updates the number of api calls made by a user or an ip address. The number of
API calls is tracked in-memory in an ETS table and after a certain number of
API calls is made, the number is updated in the centralized database.
"""
def update_usage(_type, _entity, _count, :basic), do: :ok
def update_usage(:user, %User{} = user, count, _auth_method),
do: do_update_usage(:user, user, user.id, count)
def update_usage(:remote_ip, remote_ip, count, _auth_method),
do: do_update_usage(:remote_ip, remote_ip, remote_ip, count)
# Private functions
defp do_get_quota(entity_type, entity, entity_key) do
case :ets.lookup(@ets_table, entity_key) do
[] ->
get_quota_db_and_update_ets(entity_type, entity, entity_key)
[{^entity_key, :rate_limited, error_map}] ->
# Try again after `retry_again_after` datetime in case something changed.
# This handles cases where the data changed without a plan upgrade, for
# example changing the `has_limits` in the admin panel manually.
# User plan upgrades are handled separately by clearing the ETS records
# for the user.
now = DateTime.utc_now()
case DateTime.compare(now, error_map.retry_again_after) do
:lt ->
# Update the `blocked_for_seconds` field in order to properly return
# the report the time left until unblocked
error_map =
error_map
|> Map.put(
:blocked_for_seconds,
abs(DateTime.diff(error_map.blocked_until, now))
)
{:error, error_map}
_ ->
get_quota_db_and_update_ets(entity_type, entity, entity_key)
end
[{^entity_key, :infinity, :infinity, metadata, _refresh_after}] ->
{:ok, %{metadata | quota: :infinity}}
[{^entity_key, api_calls_remaining, quota, _metadata, _refresh_after}]
when api_calls_remaining <= 0 ->
# quota - api_calls_remaining works both with positive and negative api calls
# remaining.
{:ok, _} =
ApiCallLimit.update_usage_db(
entity_type,
entity,
quota - api_calls_remaining
)
get_quota_db_and_update_ets(entity_type, entity, entity_key)
[{^entity_key, api_calls_remaining, _quota, metadata, refresh_after}] ->
case DateTime.compare(DateTime.utc_now(), refresh_after) do
:gt -> get_quota_db_and_update_ets(entity_type, entity, entity_key)
_ -> {:ok, %{metadata | quota: api_calls_remaining}}
end
end
end
defp do_update_usage(entity_type, entity, entity_key, count) do
case :ets.lookup(@ets_table, entity_key) do
[] ->
{:ok, _} = ApiCallLimit.update_usage_db(entity_type, entity, count)
get_quota_db_and_update_ets(entity_type, entity, entity_key)
:ok
[{^entity_key, :infinity, :infinity, _metadata, _refresh_after}] ->
:ok
[{^entity_key, api_calls_remaining, _quota, _metadata, _refresh_after}]
when api_calls_remaining <= count ->
# If 2+ processes execute :ets.lookup/2 at the same time with the same key
# it could happen that both processes enter this path. This will lead to
# updating the DB more than once, thus leading to storing more api calls
# than the user actually made.
# This issue is not neatly solved by using a mutex so the read/writes
# happen sequentially. A better solution would be sought that uses
# techniques similar to CAS operation.
lock = Mutex.await(Sanbase.ApiCallLimitMutex, entity_key, 5_000)
# Do another lookup do re-fetch the data in case we waited for the
# mutex while some other process was doing work here.
[{^entity_key, api_calls_remaining, quota, metadata, _refresh_after}] =
:ets.lookup(@ets_table, entity_key)
if api_calls_remaining <= count do
# Update the value with the number of API calls made so far. This is the
# number of the difference between the quota gained and the api calls left
# plus the number of count that are being processed right now
api_calls_made = quota - api_calls_remaining + count
{:ok, _} = ApiCallLimit.update_usage_db(entity_type, entity, api_calls_made)
get_quota_db_and_update_ets(entity_type, entity, entity_key)
else
true = do_upate_ets_usage(entity_key, api_calls_remaining, count, metadata)
end
Mutex.release(Sanbase.ApiCallLimitMutex, lock)
:ok
[{^entity_key, api_calls_remaining, _quota, metadata, _refresh_after}] ->
true = do_upate_ets_usage(entity_key, api_calls_remaining, count, metadata)
:ok
end
end
defp do_upate_ets_usage(entity_key, api_calls_remaining, count, metadata) do
remaining = metadata.api_calls_remaining
metadata =
Map.put(metadata, :api_calls_remaining, %{
month: Enum.max([remaining.month - count, 0]),
hour: Enum.max([remaining.hour - count, 0]),
minute: Enum.max([remaining.minute - count, 0])
})
true =
:ets.update_element(
@ets_table,
entity_key,
{2, api_calls_remaining - count}
)
true = :ets.update_element(@ets_table, entity_key, {4, metadata})
end
defp get_quota_db_and_update_ets(entity_type, entity, entity_key) do
case ApiCallLimit.get_quota_db(entity_type, entity) do
{:ok, %{quota: quota} = metadata} ->
now = Timex.now()
refresh_after = Timex.shift(now, seconds: 60 - now.second)
true =
:ets.insert(
@ets_table,
{entity_key, quota, quota, metadata, refresh_after}
)
{:ok, metadata}
{:error, %{} = error_map} ->
retry_again_after =
Enum.min(
[
error_map.blocked_until,
DateTime.add(DateTime.utc_now(), 60, :second)
],
DateTime
)
error_map = Map.put(error_map, :retry_again_after, retry_again_after)
true = :ets.insert(@ets_table, {entity_key, :rate_limited, error_map})
{:error, error_map}
end
end
end
| 35.960699 | 92 | 0.653309 |
738fab31c6660f81acf7e83a6c7b4f7726ba2e92 | 371 | exs | Elixir | test/test_helper.exs | bitpay/elixir-client | cccf8abbad5da7ad3d01d186bd14bf69eca68770 | [
"MIT"
] | 35 | 2015-01-18T02:16:11.000Z | 2021-11-14T01:55:34.000Z | test/test_helper.exs | philosodad/bitpay-elixir | cccf8abbad5da7ad3d01d186bd14bf69eca68770 | [
"MIT"
] | 3 | 2015-06-24T15:18:57.000Z | 2017-05-17T17:52:05.000Z | test/test_helper.exs | bitpay/elixir-client | cccf8abbad5da7ad3d01d186bd14bf69eca68770 | [
"MIT"
] | 13 | 2015-01-16T21:11:22.000Z | 2021-10-29T23:23:59.000Z | ExUnit.start()
#def an_illegal_claim_code
# legal_map = [*'A'..'Z'] + [*'a'..'z'] + [*0..9]
# first_length = rand(6)
# short_code = (0..first_length).map{legal_map.sample}.join
# second_length = [*8..25].sample
# long_code = [*8..25].sample.times.inject([]){|arr| arr << legal_map.sample}.join
# [nil, short_code, long_code].sample
#end
| 33.727273 | 83 | 0.590296 |
738fb1b73d5e61507d38acd3a3d1a9ce81daeba1 | 1,882 | ex | Elixir | clients/cloud_billing/lib/google_api/cloud_billing/v1/model/list_skus_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/cloud_billing/lib/google_api/cloud_billing/v1/model/list_skus_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/cloud_billing/lib/google_api/cloud_billing/v1/model/list_skus_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudBilling.V1.Model.ListSkusResponse do
@moduledoc """
Response message for `ListSkus`.
## Attributes
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - A token to retrieve the next page of results. To retrieve the next page, call `ListSkus` again with the `page_token` field set to this value. This field is empty if there are no more results to retrieve.
* `skus` (*type:* `list(GoogleApi.CloudBilling.V1.Model.Sku.t)`, *default:* `nil`) - The list of public SKUs of the given service.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:nextPageToken => String.t() | nil,
:skus => list(GoogleApi.CloudBilling.V1.Model.Sku.t()) | nil
}
field(:nextPageToken)
field(:skus, as: GoogleApi.CloudBilling.V1.Model.Sku, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.CloudBilling.V1.Model.ListSkusResponse do
def decode(value, options) do
GoogleApi.CloudBilling.V1.Model.ListSkusResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudBilling.V1.Model.ListSkusResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.64 | 266 | 0.731668 |
738fbc70f1858ace938c708c3e29f9d2f72ca3ed | 1,102 | ex | Elixir | test/support/conn_case.ex | Baransu/togither | 87483348f0b800c2a28b2dabc77180a5b49ea59e | [
"BSD-3-Clause"
] | 1 | 2016-08-11T04:06:29.000Z | 2016-08-11T04:06:29.000Z | test/support/conn_case.ex | Baransu/togither | 87483348f0b800c2a28b2dabc77180a5b49ea59e | [
"BSD-3-Clause"
] | 1 | 2017-02-17T19:20:29.000Z | 2017-02-17T20:27:44.000Z | test/support/conn_case.ex | Baransu/togither | 87483348f0b800c2a28b2dabc77180a5b49ea59e | [
"BSD-3-Clause"
] | null | null | null | defmodule Togither.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build and query models.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
alias Togither.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Togither.Router.Helpers
# The default endpoint for testing
@endpoint Togither.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Togither.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Togither.Repo, {:shared, self()})
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 24.488889 | 70 | 0.705989 |
738fc8480d6e7a0b42e932230ba4e1ad0fc8f7f7 | 2,479 | exs | Elixir | test/link_preview/parsers/html_test.exs | PabloG6/link_preview | b00e6029e5f941e566e5b31b7dc7b9a7d490b11e | [
"Apache-2.0"
] | 15 | 2017-01-31T14:18:27.000Z | 2021-05-24T18:25:01.000Z | test/link_preview/parsers/html_test.exs | PabloG6/link_preview | b00e6029e5f941e566e5b31b7dc7b9a7d490b11e | [
"Apache-2.0"
] | 10 | 2016-09-18T23:55:28.000Z | 2016-09-30T06:35:10.000Z | test/link_preview/parsers/html_test.exs | PabloG6/link_preview | b00e6029e5f941e566e5b31b7dc7b9a7d490b11e | [
"Apache-2.0"
] | 11 | 2018-02-20T04:36:41.000Z | 2022-03-02T13:16:54.000Z | defmodule LinkPreview.Parsers.HtmlTest do
use LinkPreview.Case
alias LinkPreview.Parsers.Html
alias LinkPreview.Page
@page %Page{original_url: "http://example.com/", website_url: "example.com"}
setup [:reset_defaults]
describe "title" do
test "optimistic case" do
assert Html.title(@page, @html) == %Page{@page | title: "HTML Test Title"}
end
test "pessimistic case" do
assert Html.title(@page, @opengraph) == @page
end
end
describe "description" do
test "optimistic case" do
assert Html.description(@page, @html) == %Page{@page | description: "HTML Test Description"}
end
test "pessimistic case" do
assert Html.description(@page, @opengraph) == @page
end
end
describe "images" do
test "optimistic case without additional options" do
assert Html.images(@page, @html).images == [
%{url: "http://example.com/images/html1.jpg"},
%{url: "example.com/images/html2.jpg"},
%{url: "/images/html3.jpg"},
%{url: "images/html4.jpg"},
%{url: "https://example.com/images/html5.jpg"}
]
end
test "doesn't limit images" do
images = Html.images(@page, @image_spam).images
assert Enum.count(images) == 73
end
test "limits images with :force_images_absolute_url" do
Application.put_env(:link_preview, :force_images_absolute_url, true)
images = Html.images(@page, @image_spam).images
assert Enum.count(images) == 50
end
test "limits images with :force_images_url_schema" do
Application.put_env(:link_preview, :force_images_url_schema, true)
images = Html.images(@page, @image_spam).images
assert Enum.count(images) == 50
end
@tag :excluded
test "limits images with :filter_small_images" do
Application.put_env(:link_preview, :filter_small_images, 1)
images = Html.images(@page, @image_spam).images
assert Enum.count(images) == 50
end
test "pessimistic case" do
assert Html.images(@page, @opengraph) == @page
end
end
defp reset_defaults(opts) do
on_exit(fn ->
Application.put_env(:link_preview, :friendly_strings, true)
Application.put_env(:link_preview, :force_images_absolute_url, false)
Application.put_env(:link_preview, :force_images_url_schema, false)
Application.put_env(:link_preview, :filter_small_images, false)
end)
{:ok, opts}
end
end
| 29.511905 | 98 | 0.65591 |
738fdbd38282f0ae5135eaab658cf4d801d90da6 | 2,059 | ex | Elixir | lib/file_size/ecto/bit_with_unit.ex | tlux/file_size_ecto | 654fe0c75c151c0ede53e0b9f2928bf412abeedb | [
"MIT"
] | null | null | null | lib/file_size/ecto/bit_with_unit.ex | tlux/file_size_ecto | 654fe0c75c151c0ede53e0b9f2928bf412abeedb | [
"MIT"
] | null | null | null | lib/file_size/ecto/bit_with_unit.ex | tlux/file_size_ecto | 654fe0c75c151c0ede53e0b9f2928bf412abeedb | [
"MIT"
] | null | null | null | defmodule FileSize.Ecto.BitWithUnit do
@moduledoc """
An Ecto type that represents a file size in bits, supporting storage of
different units. The value is stored as map in the database (i.e. jsonb when
using PostgreSQL).
## Example
defmodule MySchema do
use Ecto.Schema
schema "my_table" do
field :file_size, FileSize.Ecto.BitWithUnit
end
end
"""
use Ecto.Type
alias FileSize.Bit
alias FileSize.Ecto.Bit, as: BitType
alias FileSize.Ecto.Utils
@impl true
def type, do: :map
@impl true
def cast(term)
def cast(%Bit{} = size) do
{:ok, size}
end
def cast(%{"bits" => bits, "unit" => unit}) do
cast(%{bits: bits, unit: unit})
end
def cast(%{"value" => value, "unit" => unit}) do
cast(%{value: value, unit: unit})
end
def cast(%{bits: bits, unit: unit}) when is_integer(bits) do
with {:ok, unit} <- parse_unit(unit) do
{:ok, FileSize.from_bits(bits, unit)}
end
end
def cast(%{value: value, unit: unit}) do
with {:ok, value} <- Utils.assert_value(value),
{:ok, unit} <- parse_unit(unit) do
{:ok, FileSize.new(value, unit)}
end
end
def cast(str) when is_binary(str) do
case FileSize.parse(str) do
{:ok, %Bit{} = size} -> {:ok, size}
_ -> :error
end
end
def cast(term) do
BitType.cast(term)
end
@impl true
def dump(term)
def dump(%Bit{} = size) do
{:ok,
%{
"bits" => FileSize.to_integer(size),
"unit" => Utils.serialize_unit(size.unit)
}}
end
def dump(_term), do: :error
@impl true
def embed_as(_format), do: :dump
@impl true
defdelegate equal?(size, other_size), to: Utils
@impl true
def load(term)
def load(%{"bits" => bits, "unit" => unit_str})
when is_integer(bits) and is_binary(unit_str) do
with {:ok, unit} <- parse_unit(unit_str) do
{:ok, FileSize.from_bits(bits, unit)}
end
end
def load(_term), do: :error
defp parse_unit(unit) do
Utils.parse_unit_for_type(unit, Bit)
end
end
| 20.386139 | 78 | 0.608548 |
738ff48f12c513423dde4d00d902fe2dd915e17f | 3,823 | ex | Elixir | lib/credo/check/readability/string_sigils.ex | hrzndhrn/credo | 71a7b24a5ca8e7a48416e0cdfb42cf8a0fef9593 | [
"MIT"
] | 4,590 | 2015-09-28T06:01:43.000Z | 2022-03-29T08:48:57.000Z | lib/credo/check/readability/string_sigils.ex | hrzndhrn/credo | 71a7b24a5ca8e7a48416e0cdfb42cf8a0fef9593 | [
"MIT"
] | 890 | 2015-11-16T21:07:07.000Z | 2022-03-29T08:52:07.000Z | lib/credo/check/readability/string_sigils.ex | hrzndhrn/credo | 71a7b24a5ca8e7a48416e0cdfb42cf8a0fef9593 | [
"MIT"
] | 479 | 2015-11-17T19:42:40.000Z | 2022-03-29T00:09:21.000Z | defmodule Credo.Check.Readability.StringSigils do
alias Credo.SourceFile
alias Credo.Code.Heredocs
use Credo.Check,
base_priority: :low,
param_defaults: [
maximum_allowed_quotes: 3
],
explanations: [
check: ~S"""
If you used quoted strings that contain quotes, you might want to consider
switching to the use of sigils instead.
# okay
"<a href=\"http://elixirweekly.net\">#\{text}</a>"
# not okay, lots of escaped quotes
"<a href=\"http://elixirweekly.net\" target=\"_blank\">#\{text}</a>"
# refactor to
~S(<a href="http://elixirweekly.net" target="_blank">#\{text}</a>)
This allows us to remove the noise which results from the need to escape
quotes within quotes.
Like all `Readability` issues, this one is not a technical concern.
But you can improve the odds of others reading and liking your code by making
it easier to follow.
""",
params: [
maximum_allowed_quotes: "The maximum amount of escaped quotes you want to tolerate."
]
]
@quote_codepoint 34
@doc false
@impl true
def run(%SourceFile{} = source_file, params) do
issue_meta = IssueMeta.for(source_file, params)
maximum_allowed_quotes = Params.get(params, :maximum_allowed_quotes, __MODULE__)
case remove_heredocs_and_convert_to_ast(source_file) do
{:ok, ast} ->
Credo.Code.prewalk(ast, &traverse(&1, &2, issue_meta, maximum_allowed_quotes))
{:error, errors} ->
IO.warn("Unexpected error while parsing #{source_file.filename}: #{inspect(errors)}")
[]
end
end
defp remove_heredocs_and_convert_to_ast(source_file) do
source_file
|> Heredocs.replace_with_spaces()
|> Credo.Code.ast()
end
defp traverse(
{maybe_sigil, meta, [str | rest_ast]} = ast,
issues,
issue_meta,
maximum_allowed_quotes
) do
line_no = meta[:line]
cond do
is_sigil(maybe_sigil) ->
{rest_ast, issues}
is_binary(str) ->
{
rest_ast,
issues_for_string_literal(
str,
maximum_allowed_quotes,
issues,
issue_meta,
line_no
)
}
true ->
{ast, issues}
end
end
defp traverse(ast, issues, _issue_meta, _maximum_allowed_quotes) do
{ast, issues}
end
defp is_sigil(maybe_sigil) when is_atom(maybe_sigil) do
maybe_sigil
|> Atom.to_string()
|> String.starts_with?("sigil_")
end
defp is_sigil(_), do: false
defp issues_for_string_literal(
string,
maximum_allowed_quotes,
issues,
issue_meta,
line_no
) do
if too_many_quotes?(string, maximum_allowed_quotes) do
[issue_for(issue_meta, line_no, string, maximum_allowed_quotes) | issues]
else
issues
end
end
defp too_many_quotes?(string, limit) do
too_many_quotes?(string, 0, limit)
end
defp too_many_quotes?(_string, count, limit) when count > limit do
true
end
defp too_many_quotes?(<<>>, _count, _limit) do
false
end
defp too_many_quotes?(<<c::utf8, rest::binary>>, count, limit)
when c == @quote_codepoint do
too_many_quotes?(rest, count + 1, limit)
end
defp too_many_quotes?(<<_::utf8, rest::binary>>, count, limit) do
too_many_quotes?(rest, count, limit)
end
defp too_many_quotes?(<<_::binary>>, _count, _limit) do
false
end
defp issue_for(issue_meta, line_no, trigger, maximum_allowed_quotes) do
format_issue(
issue_meta,
message:
"More than #{maximum_allowed_quotes} quotes found inside string literal, consider using a sigil instead.",
trigger: trigger,
line_no: line_no
)
end
end
| 24.824675 | 114 | 0.631441 |
739010476ebabe1c5efcbf486788cdde7af153bf | 2,144 | ex | Elixir | lib/kaffe/config/producer.ex | jgaviria/kaffe | f7d54086bb7062c62cb012a1f73359843a0a625b | [
"MIT"
] | null | null | null | lib/kaffe/config/producer.ex | jgaviria/kaffe | f7d54086bb7062c62cb012a1f73359843a0a625b | [
"MIT"
] | null | null | null | lib/kaffe/config/producer.ex | jgaviria/kaffe | f7d54086bb7062c62cb012a1f73359843a0a625b | [
"MIT"
] | null | null | null | defmodule Kaffe.Config.Producer do
import Kaffe.Config, only: [heroku_kafka_endpoints: 0, parse_endpoints: 1]
def configuration do
%{
endpoints: endpoints(),
producer_config: client_producer_config(),
client_name: config_get(:client_name, :kaffe_producer_client),
topics: producer_topics(),
partition_strategy: config_get(:partition_strategy, :md5)
}
end
def producer_topics, do: config_get!(:topics)
def endpoints do
if heroku_kafka?() do
heroku_kafka_endpoints()
else
parse_endpoints(config_get!(:endpoints))
end
end
def client_producer_config do
default_client_producer_config() ++ maybe_heroku_kafka_ssl() ++ sasl_options() ++ ssl_options()
end
def sasl_options do
:sasl
|> config_get(%{})
|> Kaffe.Config.sasl_config()
end
def maybe_heroku_kafka_ssl do
case heroku_kafka?() do
true -> Kaffe.Config.ssl_config()
false -> []
end
end
def ssl_options do
:ssl
|> config_get(false)
|> Kaffe.Config.ssl_config()
end
def default_client_producer_config do
[
auto_start_producers: true,
allow_topic_auto_creation: config_get(:allow_topic_auto_creation, false),
default_producer_config: [
required_acks: config_get(:required_acks, -1),
ack_timeout: config_get(:ack_timeout, 1000),
partition_buffer_limit: config_get(:partition_buffer_limit, 512),
partition_onwire_limit: config_get(:partition_onwire_limit, 1),
max_batch_size: config_get(:max_batch_size, 1_048_576),
max_retries: config_get(:max_retries, 3),
retry_backoff_ms: config_get(:retry_backoff_ms, 500),
compression: config_get(:compression, :no_compression),
min_compression_batch_size: config_get(:min_compression_batch_size, 1024)
]
]
end
def heroku_kafka? do
config_get(:heroku_kafka_env, false)
end
def config_get!(key) do
Application.get_env(:kaffe, :producer)
|> Keyword.fetch!(key)
end
def config_get(key, default) do
Application.get_env(:kaffe, :producer)
|> Keyword.get(key, default)
end
end
| 27.139241 | 99 | 0.696362 |
7390667e4defe33714c3a2ed4b532fd05eb2cc4f | 18,709 | ex | Elixir | lib/memcache.ex | thecodeboss/memcachex | d6de70a8b93524a71dc54f736fcb79b5377718fd | [
"MIT"
] | null | null | null | lib/memcache.ex | thecodeboss/memcachex | d6de70a8b93524a71dc54f736fcb79b5377718fd | [
"MIT"
] | null | null | null | lib/memcache.ex | thecodeboss/memcachex | d6de70a8b93524a71dc54f736fcb79b5377718fd | [
"MIT"
] | null | null | null | defmodule Memcache do
@moduledoc """
This module provides a user friendly API to interact with the
memcached server.
## Example
{:ok, pid} = Memcache.start_link()
{:ok} = Memcache.set(pid, "hello", "world")
{:ok, "world"} = Memcache.get(pid, "hello")
## Coder
`Memcache.Coder` allows you to specify how the value should be encoded before
sending it to the server and how it should be decoded after it is
retrived. There are four built-in coders namely `Memcache.Coder.Raw`,
`Memcache.Coder.Erlang`, `Memcache.Coder.JSON`,
`Memcache.Coder.ZIP`. Custom coders can be created by implementing
the `Memcache.Coder` behaviour.
## CAS
CAS feature allows to atomically perform two commands on a key. Get
the cas version number associated with a key during the first
command and pass that value during the second command. The second
command will fail if the value has changed by someone else in the
mean time.
{:ok, "hello", cas} = Memcache.get(pid, "key", cas: true)
{:ok} = Memcache.set_cas(pid, "key", "world", cas)
Memcache module provides a *_cas variant for most of the
functions. This function will take an additional argument named
`cas` and returns the same value as their counterpart except in case
of CAS error. In case of CAS error the returned value would be equal
to `{:error, "Key exists"}`
## Options
Most the functions in this module accept an optional `Keyword`
list. The below list specifies the behavior of each option. The list
of option accepted by a specific function will be documented in the
specific funcion.
* `:cas` - (boolean) returns the CAS value associated with the
data. This value will be either in second or third position
of the returned tuple depending on the command. Defaults to `false`.
* `:ttl` - (integer) specifies the expiration time in seconds for
the corresponding key. Can be set to `0` to disable
expiration. The Default value can be configured using
`start_link/2`.
"""
@type error :: {:error, binary | atom}
@type result :: {:ok} | {:ok, integer} | {:ok, any} | {:ok, any, integer} | error
@type fetch_result :: {:ok, any} | {:ok, any, integer} | error
@type fetch_integer_result :: {:ok, integer} | {:ok, integer, integer} | error
@type store_result :: {:ok} | {:ok, integer} | error
alias Memcache.Connection
alias Memcache.Registry
@default_opts [
ttl: 0,
namespace: nil,
key_coder: nil,
coder: {Memcache.Coder.Raw, []}
]
@doc """
Creates a connection using `Memcache.Connection.start_link/2`
## Connection Options
This is a superset of the connection options accepted by the
`Memcache.Connection.start_link/2`. The following list specifies the
additional options.
* `:ttl` - (integer) a default expiration time in seconds. This
value will be used if the `:ttl` value is not specified for a
operation. Defaults to `0`(means forever).
* `:namespace` - (string) prepend each key with the given value.
* `:key_coder` - ({module, function}) Used to transform the key completely.
The function needs to accept one argument, the key and return a new key.
* `:coder` - (module | {module, options}) Can be either a module or
tuple contains the module and options. Defaults to
`{Memcache.Coder.Raw, []}`.
## Options
The second option is passed directly to the underlying
`GenServer.start_link/3`, so it can be used to create named process.
"""
@spec start_link(Keyword.t(), Keyword.t()) :: GenServer.on_start()
def start_link(connection_options \\ [], options \\ []) do
extra_opts = [:ttl, :namespace, :key_coder, :coder]
connection_options =
@default_opts
|> Keyword.merge(connection_options)
|> Keyword.update!(:coder, &normalize_coder/1)
{state, connection_options} = Keyword.split(connection_options, extra_opts)
{:ok, pid} = Connection.start_link(connection_options, options)
state =
state
|> Map.new()
|> Map.put(:connection, pid)
Registry.associate(pid, state)
{:ok, pid}
end
@doc """
Closes the connection to the memcached server.
"""
@spec stop(GenServer.server()) :: {:ok}
def stop(server) do
Connection.close(server)
end
@doc """
Gets the value associated with the key. Returns `{:error, "Key not
found"}` if the given key doesn't exist.
Accepted option: `:cas`
"""
@spec get(GenServer.server(), binary, Keyword.t()) :: fetch_result
def get(server, key, opts \\ []) do
execute_k(server, :GET, [key], opts)
end
@doc """
Gets the values associated with the list of keys. Returns a
map. Keys that are not found in the server are filtered from the
result.
Accepted option: `:cas`
"""
@spec multi_get(GenServer.server(), [binary], Keyword.t()) :: {:ok, map} | error
def multi_get(server, keys, opts \\ []) do
commands = Enum.map(keys, &{:GETQ, [&1], opts})
with {:ok, values} <- execute_quiet_k(server, commands) do
result =
keys
|> Enum.zip(values)
|> Enum.reduce(%{}, fn
{key, {:ok, value}}, acc -> Map.put(acc, key, value)
{key, {:ok, value, cas}}, acc -> Map.put(acc, key, {value, cas})
{_key, {:error, _}}, acc -> acc
end)
{:ok, result}
end
end
@doc """
Sets the key to value
Accepted options: `:cas`, `:ttl`
"""
@spec set(GenServer.server(), binary, binary, Keyword.t()) :: store_result
def set(server, key, value, opts \\ []) do
set_cas(server, key, value, 0, opts)
end
@doc """
Sets the key to value if the key exists and has CAS value equal to
the provided value
Accepted options: `:cas`, `:ttl`
"""
@spec set_cas(GenServer.server(), binary, binary, integer, Keyword.t()) :: store_result
def set_cas(server, key, value, cas, opts \\ []) do
server_options = get_server_options(server)
execute_kv(
server,
:SET,
[key, value, cas, ttl_or_default(server_options, opts)],
opts,
server_options
)
end
@doc """
Multi version of `set/4`. Accepts a map or a list of `{key, value}`.
Accepted options: `:cas`, `:ttl`
"""
@spec multi_set(GenServer.server(), [{binary, binary}] | map, Keyword.t()) ::
{:ok, [store_result]} | error
def multi_set(server, commands, opts \\ []) do
commands = Enum.map(commands, fn {key, value} -> {key, value, 0} end)
multi_set_cas(server, commands, opts)
end
@doc """
Multi version of `set_cas/4`. Accepts a list of `{key, value, cas}`.
Accepted options: `:cas`, `:ttl`
"""
@spec multi_set_cas(GenServer.server(), [{binary, binary, integer}], Keyword.t()) ::
{:ok, [store_result]} | error
def multi_set_cas(server, commands, opts \\ []) do
op = if Keyword.get(opts, :cas, false), do: :SET, else: :SETQ
server_options = get_server_options(server)
commands =
Enum.map(commands, fn {key, value, cas} ->
{op, [key, value, cas, ttl_or_default(server_options, opts)], opts}
end)
execute_quiet_kv(server, commands, server_options)
end
@cas_error {:error, "Key exists"}
@doc """
Compare and swap value using optimistic locking.
1. Get the existing value for key
2. If it exists, call the update function with the value
3. Set the returned value for key
The 3rd operation will fail if someone else has updated the value
for the same key in the mean time. In that case, by default, this
function will go to step 1 and try again. Retry behavior can be
disabled by passing `[retry: false]` option.
"""
@spec cas(GenServer.server(), binary, (binary -> binary), Keyword.t()) :: {:ok, any} | error
def cas(server, key, update, opts \\ []) do
with {:ok, value, cas} <- get(server, key, cas: true),
new_value = update.(value),
{:ok} <- set_cas(server, key, new_value, cas) do
{:ok, new_value}
else
@cas_error ->
if Keyword.get(opts, :retry, true) do
cas(server, key, update)
else
@cas_error
end
err ->
err
end
end
@doc """
Sets the key to value if the key doesn't exist already. Returns
`{:error, "Key exists"}` if the given key already exists.
Accepted options: `:cas`, `:ttl`
"""
@spec add(GenServer.server(), binary, binary, Keyword.t()) :: store_result
def add(server, key, value, opts \\ []) do
server_options = get_server_options(server)
execute_kv(
server,
:ADD,
[key, value, ttl_or_default(server_options, opts)],
opts,
server_options
)
end
@doc """
Sets the key to value if the key already exists. Returns `{:error,
"Key not found"}` if the given key doesn't exist.
Accepted options: `:cas`, `:ttl`
"""
@spec replace(GenServer.server(), binary, binary, Keyword.t()) :: store_result
def replace(server, key, value, opts \\ []) do
replace_cas(server, key, value, 0, opts)
end
@doc """
Sets the key to value if the key already exists and has CAS value
equal to the provided value.
Accepted options: `:cas`, `:ttl`
"""
@spec replace_cas(GenServer.server(), binary, binary, integer, Keyword.t()) :: store_result
def replace_cas(server, key, value, cas, opts \\ []) do
server_options = get_server_options(server)
execute_kv(
server,
:REPLACE,
[key, value, cas, ttl_or_default(server_options, opts)],
opts,
server_options
)
end
@doc """
Removes the item with the given key value. Returns `{:error, "Key
not found"}` if the given key is not found
"""
@spec delete(GenServer.server(), binary) :: store_result
def delete(server, key) do
execute_k(server, :DELETE, [key])
end
@doc """
Removes the item with the given key value if the CAS value is equal
to the provided value
"""
@spec delete_cas(GenServer.server(), binary, integer) :: store_result
def delete_cas(server, key, cas) do
execute_k(server, :DELETE, [key, cas])
end
@doc """
Flush all the items in the server. `ttl` option will cause the flush
to be delayed by the specified time.
Accepted options: `:ttl`
"""
@spec flush(GenServer.server(), Keyword.t()) :: store_result
def flush(server, opts \\ []) do
execute(server, :FLUSH, [Keyword.get(opts, :ttl, 0)])
end
@doc """
Appends the value to the end of the current value of the
key. Returns `{:error, "Item not stored"}` if the item is not present
in the server already
Accepted options: `:cas`
"""
@spec append(GenServer.server(), binary, binary, Keyword.t()) :: store_result
def append(server, key, value, opts \\ []) do
execute_kv(server, :APPEND, [key, value], opts)
end
@doc """
Appends the value to the end of the current value of the
key if the CAS value is equal to the provided value
Accepted options: `:cas`
"""
@spec append_cas(GenServer.server(), binary, binary, integer, Keyword.t()) :: store_result
def append_cas(server, key, value, cas, opts \\ []) do
execute_kv(server, :APPEND, [key, value, cas], opts)
end
@doc """
Prepends the value to the start of the current value of the
key. Returns `{:error, "Item not stored"}` if the item is not present
in the server already
Accepted options: `:cas`
"""
@spec prepend(GenServer.server(), binary, binary, Keyword.t()) :: store_result
def prepend(server, key, value, opts \\ []) do
execute_kv(server, :PREPEND, [key, value], opts)
end
@doc """
Prepends the value to the start of the current value of the
key if the CAS value is equal to the provided value
Accepted options: `:cas`
"""
@spec prepend_cas(GenServer.server(), binary, binary, integer, Keyword.t()) :: store_result
def prepend_cas(server, key, value, cas, opts \\ []) do
execute_kv(server, :PREPEND, [key, value, cas], opts)
end
@doc """
Increments the current value. Only integer value can be
incremented. Returns `{:error, "Incr/Decr on non-numeric value"}` if
the value stored in the server is not numeric.
## Options
* `:by` - (integer) The amount to add to the existing
value. Defaults to `1`.
* `:default` - (integer) Default value to use in case the key is not
found. Defaults to `0`.
other options: `:cas`, `:ttl`
"""
@spec incr(GenServer.server(), binary, Keyword.t()) :: fetch_integer_result
def incr(server, key, opts \\ []) do
incr_cas(server, key, 0, opts)
end
@doc """
Increments the current value if the CAS value is equal to the
provided value.
## Options
* `:by` - (integer) The amount to add to the existing
value. Defaults to `1`.
* `:default` - (integer) Default value to use in case the key is not
found. Defaults to `0`.
other options: `:cas`, `:ttl`
"""
@spec incr_cas(GenServer.server(), binary, integer, Keyword.t()) :: fetch_integer_result
def incr_cas(server, key, cas, opts \\ []) do
defaults = [by: 1, default: 0]
opts = Keyword.merge(defaults, opts)
server_options = get_server_options(server)
execute_k(
server,
:INCREMENT,
[
key,
Keyword.get(opts, :by),
Keyword.get(opts, :default),
cas,
ttl_or_default(server_options, opts)
],
opts,
server_options
)
end
@doc """
Decremens the current value. Only integer value can be
decremented. Returns `{:error, "Incr/Decr on non-numeric value"}` if
the value stored in the server is not numeric.
## Options
* `:by` - (integer) The amount to add to the existing
value. Defaults to `1`.
* `:default` - (integer) Default value to use in case the key is not
found. Defaults to `0`.
other options: `:cas`, `:ttl`
"""
@spec decr(GenServer.server(), binary, Keyword.t()) :: fetch_integer_result
def decr(server, key, opts \\ []) do
decr_cas(server, key, 0, opts)
end
@doc """
Decrements the current value if the CAS value is equal to the
provided value.
## Options
* `:by` - (integer) The amount to add to the existing
value. Defaults to `1`.
* `:default` - (integer) Default value to use in case the key is not
found. Defaults to `0`.
other options: `:cas`, `:ttl`
"""
@spec decr_cas(GenServer.server(), binary, integer, Keyword.t()) :: fetch_integer_result
def decr_cas(server, key, cas, opts \\ []) do
defaults = [by: 1, default: 0]
opts = Keyword.merge(defaults, opts)
server_options = get_server_options(server)
execute_k(
server,
:DECREMENT,
[
key,
Keyword.get(opts, :by),
Keyword.get(opts, :default),
cas,
ttl_or_default(server_options, opts)
],
opts,
server_options
)
end
@doc """
Gets the default set of server statistics
"""
@spec stat(GenServer.server()) :: {:ok, map} | error
def stat(server) do
execute(server, :STAT, [])
end
@doc """
Gets the specific set of server statistics
"""
@spec stat(GenServer.server(), String.t()) :: {:ok, map} | error
def stat(server, key) do
execute(server, :STAT, [key])
end
@doc """
Gets the version of the server
"""
@spec version(GenServer.server()) :: String.t() | error
def version(server) do
execute(server, :VERSION, [])
end
@doc """
Sends a noop command
"""
@spec noop(GenServer.server()) :: {:ok} | error
def noop(server) do
execute(server, :NOOP, [])
end
## Private
defp get_server_options(server) do
Registry.lookup(server)
end
defp normalize_coder(spec) when is_tuple(spec), do: spec
defp normalize_coder(module) when is_atom(module), do: {module, []}
defp encode(server_options, value) do
coder = server_options.coder
apply(elem(coder, 0), :encode, [value, elem(coder, 1)])
end
defp decode(server_options, value) do
coder = server_options.coder
apply(elem(coder, 0), :decode, [value, elem(coder, 1)])
end
defp decode_response({:ok, value}, server_options) when is_binary(value) do
{:ok, decode(server_options, value)}
end
defp decode_response({:ok, value, cas}, server_options) when is_binary(value) do
{:ok, decode(server_options, value), cas}
end
defp decode_response(rest, _server_options), do: rest
defp decode_multi_response({:ok, values}, server_options) when is_list(values) do
{:ok, Enum.map(values, &decode_response(&1, server_options))}
end
defp decode_multi_response(rest, _server_options), do: rest
defp ttl_or_default(server_options, opts) do
if Keyword.has_key?(opts, :ttl) do
opts[:ttl]
else
server_options.ttl
end
end
# This takes care of both namespacing and key coding.
defp key_with_namespace(server_options, key) do
key =
case server_options.namespace do
nil -> key
namespace -> "#{namespace}:#{key}"
end
case server_options.key_coder do
{module, function} -> apply(module, function, [key])
_ -> key
end
end
defp execute_k(server, command, args, opts \\ []),
do: execute_k(server, command, args, opts, get_server_options(server))
defp execute_k(server, command, [key | rest], opts, server_options) do
server
|> execute(command, [key_with_namespace(server_options, key) | rest], opts)
|> decode_response(server_options)
end
defp execute_kv(server, command, args, opts),
do: execute_kv(server, command, args, opts, get_server_options(server))
defp execute_kv(server, command, [key | [value | rest]], opts, server_options) do
server
|> execute(
command,
[key_with_namespace(server_options, key) | [encode(server_options, value) | rest]],
opts
)
|> decode_response(server_options)
end
defp execute(server, command, args, opts \\ []) do
Connection.execute(server, command, args, opts)
end
defp execute_quiet_k(server, commands),
do: execute_quiet_k(server, commands, get_server_options(server))
defp execute_quiet_k(server, commands, server_options) do
commands =
Enum.map(commands, fn {command, [key | rest], opts} ->
{command, [key_with_namespace(server_options, key) | rest], opts}
end)
server
|> execute_quiet(commands)
|> decode_multi_response(server_options)
end
defp execute_quiet_kv(server, commands, server_options) do
commands =
Enum.map(commands, fn {command, [key | [value | rest]], opts} ->
{command,
[key_with_namespace(server_options, key) | [encode(server_options, value) | rest]], opts}
end)
server
|> execute_quiet(commands)
|> decode_multi_response(server_options)
end
defp execute_quiet(server, commands) do
Connection.execute_quiet(server, commands)
end
end
| 29.370487 | 98 | 0.65113 |
739081cfea8a8b9ef50d545a0363b5a041b430a9 | 3,719 | ex | Elixir | lib/marco_polo/connection/auth.ex | EdmondFrank/marco_polo | aa094be7bffc1a7b254124b2bb7628a56de01aef | [
"Apache-2.0"
] | null | null | null | lib/marco_polo/connection/auth.ex | EdmondFrank/marco_polo | aa094be7bffc1a7b254124b2bb7628a56de01aef | [
"Apache-2.0"
] | null | null | null | lib/marco_polo/connection/auth.ex | EdmondFrank/marco_polo | aa094be7bffc1a7b254124b2bb7628a56de01aef | [
"Apache-2.0"
] | null | null | null | defmodule MarcoPolo.Connection.Auth do
@moduledoc false
import MarcoPolo.Protocol.BinaryHelpers
alias MarcoPolo.Error
alias MarcoPolo.Protocol
@typep state :: Map.t
@protocol 32
@min_protocol 28
@serialization_protocol "ORecordSerializerBinary"
@timeout 5000
@doc """
Authenticate to the OrientDB server to perform server or database operations.
The type of connection (either a `REQUEST_CONNECT` or a `REQUEST_DB_OPEN`) is
stored in `opts[:connection]`. `:connection` is a required key in the options.
"""
@spec connect(state) :: {:ok, state} | {:error, term, state} | {:tcp_error, term, state}
def connect(s) do
case negotiate_protocol(s) do
{:ok, s} ->
authenticate(s)
{:tcp_error, reason} ->
{:tcp_error, reason, s}
end
end
# Waits for the 2 byte protocol version, checks that the protocol is supported
# and stores it in the state.
defp negotiate_protocol(%{socket: socket, opts: opts} = s) do
case :gen_tcp.recv(socket, 2, opts[:timeout] || @timeout) do
{:ok, <<version :: short>>} ->
check_min_protocol!(version)
{:ok, %{s | protocol_version: version}}
{:error, reason} ->
{:tcp_error, reason}
end
end
# Sends the authentication data (based on the `opts` in the state) over the
# wire and waits for a response.
defp authenticate(%{socket: socket} = s) do
{op, args} = op_and_connection_args(s)
req = Protocol.encode_op(op, args)
case :gen_tcp.send(socket, req) do
:ok ->
wait_for_connection_response(s, op)
{:error, reason} ->
{:tcp_error, reason, s}
end
end
defp op_and_connection_args(%{opts: opts, protocol_version: _protocol}) do
{op, other_args} = op_and_args_from_connection_type(Keyword.fetch!(opts, :connection))
static_args = [
nil, # session id, nil (-1) for first-time connections
Application.get_env(:marco_polo, :client_name),
Application.get_env(:marco_polo, :version),
{:short, @protocol},
"client id",
@serialization_protocol,
false, # token-based auth, not supported
]
user = Keyword.fetch!(opts, :user)
password = Keyword.fetch!(opts, :password)
{op, static_args ++ other_args ++ [user, password]}
end
defp op_and_args_from_connection_type(:server),
do: {:connect, []}
defp op_and_args_from_connection_type({:db, name, type})
when type in [:document, :graph],
do: {:db_open, [name, Atom.to_string(type)]}
defp op_and_args_from_connection_type({:db, _, type}),
do: raise(ArgumentError, "unknown database type: #{inspect type}, valid ones are :document, :graph")
defp op_and_args_from_connection_type(_type),
do: raise(ArgumentError, "invalid connection type, valid ones are :server or {:db, name, type}")
defp wait_for_connection_response(%{socket: socket, opts: opts} = s, connection_type) do
case :gen_tcp.recv(socket, 0, opts[:timeout] || @timeout) do
{:error, reason} ->
{:tcp_error, reason, s}
{:ok, new_data} ->
data = s.tail <> new_data
case Protocol.parse_connection_resp(data, connection_type) do
:incomplete ->
wait_for_connection_response(%{s | tail: data}, connection_type)
{-1, {:error, err}, rest} ->
{:error, err, %{s | tail: rest}}
{-1, {:ok, [sid, _token]}, rest} ->
{:ok, %{s | session_id: sid, tail: rest}}
end
end
end
defp check_min_protocol!(protocol) when protocol < @min_protocol do
raise Error, """
the minimum supported protocol is #{@min_protocol}, the server is using #{protocol}
"""
end
defp check_min_protocol!(_) do
:ok
end
end
| 32.33913 | 104 | 0.648293 |
73908255eaba074f93f419eda29c14748f5c39b4 | 678 | ex | Elixir | lib/update_handler.ex | cjwadair/phx_in_place | 12ea71ec1d3dc8ac2b2f220d0b3f4c676706c570 | [
"MIT"
] | 4 | 2018-05-17T09:56:51.000Z | 2022-02-26T21:24:47.000Z | lib/update_handler.ex | cjwadair/phx_in_place | 12ea71ec1d3dc8ac2b2f220d0b3f4c676706c570 | [
"MIT"
] | null | null | null | lib/update_handler.ex | cjwadair/phx_in_place | 12ea71ec1d3dc8ac2b2f220d0b3f4c676706c570 | [
"MIT"
] | null | null | null | defmodule UpdateHandler do
@moduledoc false
@repo Application.get_env(:phx_in_place, :repo)
def update_repo(struct, id, attrs) do
changeset =
struct
|> @repo.get!(id)
|> struct.changeset(attrs)
case changeset.valid? do
true -> @repo.update(changeset)
false ->
{:error,
Ecto.Changeset.traverse_errors(changeset, fn {msg, opts} ->
Enum.reduce(opts, msg, fn {key, value}, acc ->
String.replace(acc, "%{#{key}}", to_string(value))
end)
end)
# errors: changeset.errors
}
# {:error, "validation errors detected"}
end
end
end
| 20.545455 | 69 | 0.554572 |
7390ab6df544874c6a550184986d630f74e3e28b | 838 | exs | Elixir | mix.exs | asummers/testing_gocd_elixir | 67658c9ea17a99605ec5537ae3d1045784c1dbca | [
"MIT"
] | null | null | null | mix.exs | asummers/testing_gocd_elixir | 67658c9ea17a99605ec5537ae3d1045784c1dbca | [
"MIT"
] | null | null | null | mix.exs | asummers/testing_gocd_elixir | 67658c9ea17a99605ec5537ae3d1045784c1dbca | [
"MIT"
] | null | null | null | defmodule TestingGocdElixir.MixProject do
use Mix.Project
def project do
[
app: :testing_gocd_elixir,
version: "0.1.0",
elixir: "~> 1.8",
start_permanent: Mix.env() == :prod,
deps: deps(),
aliases: aliases()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:credo, "~> 1.0", only: [:dev, :test], runtime: false},
{:dialyxir, "~> 1.0.0-rc.6", only: [:dev], runtime: false}
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
]
end
def aliases() do
[
lint: ["credo list --format=oneline --strict"]
]
end
end
| 22.052632 | 87 | 0.566826 |
7390b7f5fe7aa1e4a7cdc97b4d8addc5ceaf179c | 496 | exs | Elixir | config/test.exs | Namuraid/backend | 04a10248bfeb156eb291207931621b40585d8f7e | [
"MIT"
] | null | null | null | config/test.exs | Namuraid/backend | 04a10248bfeb156eb291207931621b40585d8f7e | [
"MIT"
] | null | null | null | config/test.exs | Namuraid/backend | 04a10248bfeb156eb291207931621b40585d8f7e | [
"MIT"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :namuraid, Namuraid.Endpoint,
http: [port: 4001],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
# Configure your database
config :namuraid, Namuraid.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "namuraid_test",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
| 24.8 | 56 | 0.735887 |
739173db02ceea9d346984e5893d2c5d526576a8 | 193 | exs | Elixir | apps/montreal_elixir/config/test.exs | noircir/website-1 | bfe43f5a7b022dfc009802d9d068d438e83e73f9 | [
"MIT"
] | null | null | null | apps/montreal_elixir/config/test.exs | noircir/website-1 | bfe43f5a7b022dfc009802d9d068d438e83e73f9 | [
"MIT"
] | null | null | null | apps/montreal_elixir/config/test.exs | noircir/website-1 | bfe43f5a7b022dfc009802d9d068d438e83e73f9 | [
"MIT"
] | null | null | null | use Mix.Config
# Configure your database
config :montreal_elixir, MontrealElixir.Repo,
adapter: Ecto.Adapters.Postgres,
database: "montreal_elixir_test",
pool: Ecto.Adapters.SQL.Sandbox
| 24.125 | 45 | 0.792746 |
73919c314fb36c41d9b4a7e402482d259e138b59 | 4,133 | ex | Elixir | lib/scribe.ex | cowile/scribe | 75334daf0f1fd24ba92bceef67b4cb6e1e2c4f37 | [
"MIT"
] | 253 | 2016-08-13T21:47:42.000Z | 2022-01-18T06:53:28.000Z | lib/scribe.ex | cowile/scribe | 75334daf0f1fd24ba92bceef67b4cb6e1e2c4f37 | [
"MIT"
] | 15 | 2017-02-18T18:00:15.000Z | 2022-01-30T15:48:08.000Z | lib/scribe.ex | cowile/scribe | 75334daf0f1fd24ba92bceef67b4cb6e1e2c4f37 | [
"MIT"
] | 11 | 2016-12-23T08:21:36.000Z | 2021-12-31T21:03:26.000Z | defmodule Scribe do
@moduledoc """
Pretty-print tables of structs and maps
"""
alias Scribe.Table
@type data ::
[]
| [...]
| term
@typedoc ~S"""
Options for configuring table output.
- `:colorize` - When `false`, disables colored output. Defaults to `true`
- `:data` - Defines table headers
- `:device` - Where to print (defaults to STDOUT)
- `:style` - Style callback module. Defaults to `Scribe.Style.Default`
- `:width` - Defines table width. Defaults to `:infinite`
"""
@type format_opts :: [
colorize: boolean,
data: [...],
style: module,
width: integer
]
@doc ~S"""
Enables/disables auto-inspect override.
If true, Scribe will override `inspect/2` for maps and structs, printing
them as tables.
## Examples
iex> Scribe.auto_inspect(true)
:ok
"""
@spec auto_inspect(boolean) :: :ok
def auto_inspect(inspect?) do
Application.put_env(:scribe, :auto_inspect, inspect?)
end
@doc ~S"""
Returns true if Scribe is overriding `Inspect`.
## Examples
iex> Scribe.auto_inspect?
true
"""
def auto_inspect? do
compile_auto_inspect?() and
Application.get_env(:scribe, :auto_inspect, false)
end
@doc false
def compile_auto_inspect? do
Application.get_env(:scribe, :compile_auto_inspect, false)
end
@doc ~S"""
Prints a table from given data.
## Examples
iex> print([])
:ok
iex> Scribe.print(%{key: :value, test: 1234}, colorize: false)
+----------+---------+
| :key | :test |
+----------+---------+
| :value | 1234 |
+----------+---------+
:ok
"""
@spec print(data, format_opts) :: :ok
def print(_results, opts \\ [])
def print([], _opts), do: :ok
def print(results, opts) do
dev = opts |> Keyword.get(:device, :stdio)
results = results |> format(opts)
dev |> IO.puts(results)
end
def console(results, opts \\ []) do
results
|> format(opts)
|> Pane.console()
end
@doc ~S"""
Prints a table from given data and returns the data.
Useful for inspecting pipe chains.
## Examples
iex> Scribe.inspect([])
[]
iex> Scribe.inspect(%{key: :value, test: 1234}, colorize: false)
+----------+---------+
| :key | :test |
+----------+---------+
| :value | 1234 |
+----------+---------+
%{test: 1234, key: :value}
"""
@spec inspect(term, format_opts) :: term
def inspect(results, opts \\ []) do
print(results, opts)
results
end
@doc ~S"""
Formats data into a printable table string.
## Examples
iex> format([])
:ok
iex> format(%{test: 1234}, colorize: false)
"+---------+\n| :test |\n+---------+\n| 1234 |\n+---------+\n"
"""
@spec format([] | [...] | term) :: String.t() | :ok
def format(_results, opts \\ [])
def format([], _opts), do: :ok
def format(results, opts) when not is_list(results) do
format([results], opts)
end
def format(results, opts) do
keys = fetch_keys(results, opts[:data])
headers = map_string_values(keys)
data = Enum.map(results, &map_string_values(&1, keys))
table = [headers | data]
Table.format(table, Enum.count(table), Enum.count(keys), opts)
end
defp map_string_values(keys), do: Enum.map(keys, &string_value(&1))
defp map_string_values(row, keys), do: Enum.map(keys, &string_value(row, &1))
defp string_value(%{name: name, key: _key}) do
name
end
defp string_value(map, %{name: _name, key: key}) when is_function(key) do
map |> key.()
end
defp string_value(map, %{name: _name, key: key}) do
map |> Map.get(key)
end
defp fetch_keys([first | _rest], nil), do: fetch_keys(first)
defp fetch_keys(_list, opts), do: process_headers(opts)
defp process_headers(opts) do
for opt <- opts do
case opt do
{name, key} -> %{name: name, key: key}
key -> %{name: key, key: key}
end
end
end
defp fetch_keys(map) do
map
|> Map.keys()
|> process_headers()
end
end
| 22.708791 | 79 | 0.566659 |
7391a7c9ac1f236196c43c3e342ae8d18bfcfbc5 | 781 | ex | Elixir | lib/mmss_server_ex.ex | leader22/mmss-server-ex | db399e06e422b443db028369dc61006f845c168c | [
"MIT"
] | null | null | null | lib/mmss_server_ex.ex | leader22/mmss-server-ex | db399e06e422b443db028369dc61006f845c168c | [
"MIT"
] | null | null | null | lib/mmss_server_ex.ex | leader22/mmss-server-ex | db399e06e422b443db028369dc61006f845c168c | [
"MIT"
] | null | null | null | defmodule MMSSServer do
@moduledoc """
My Mp3 Streaming Server SERVER implementation.
"""
use Application
require Logger
alias Plug.Adapters.Cowboy
def start(_type, _args) do
mpath = Env.fetch!(:mmss_server_ex, :mpath)
port = Env.fetch!(:mmss_server_ex, :port)
user = Env.fetch!(:mmss_server_ex, :user)
pass = Env.fetch!(:mmss_server_ex, :pass)
Logger.info("""
Starting app on env...
mpath: #{mpath}
port: #{port}
user/pass: #{user}/#{pass}
""")
Supervisor.start_link(
[
Cowboy.child_spec(
:http,
MMSSServer.Server,
[],
port: String.to_integer(port)
)
],
strategy: :one_for_one,
name: MMSSServer.Supervisor
)
end
end
| 20.552632 | 48 | 0.577465 |
7391e8c76b9f78f50217230cbd91c4d1f4f31ff2 | 1,123 | ex | Elixir | test/support/channel_case.ex | Ruin0x11/niacademy | f0b07aefa7b2bf5a8f643d851523ee43c6fd1c0f | [
"MIT"
] | null | null | null | test/support/channel_case.ex | Ruin0x11/niacademy | f0b07aefa7b2bf5a8f643d851523ee43c6fd1c0f | [
"MIT"
] | null | null | null | test/support/channel_case.ex | Ruin0x11/niacademy | f0b07aefa7b2bf5a8f643d851523ee43c6fd1c0f | [
"MIT"
] | null | null | null | defmodule NiacademyWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use NiacademyWeb.ChannelCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
import Phoenix.ChannelTest
import NiacademyWeb.ChannelCase
# The default endpoint for testing
@endpoint NiacademyWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Niacademy.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Niacademy.Repo, {:shared, self()})
end
:ok
end
end
| 27.390244 | 71 | 0.730187 |
739219139f8d6034b091da36232ef8f62c264e0b | 718 | ex | Elixir | examples/phoenix_shell/web/gettext.ex | acj/parley | 56b5c7b34f39c74af6ccd4a85a23ceccfc117bdf | [
"MIT"
] | 1 | 2021-07-02T22:50:40.000Z | 2021-07-02T22:50:40.000Z | examples/phoenix_shell/web/gettext.ex | acj/parley | 56b5c7b34f39c74af6ccd4a85a23ceccfc117bdf | [
"MIT"
] | null | null | null | examples/phoenix_shell/web/gettext.ex | acj/parley | 56b5c7b34f39c74af6ccd4a85a23ceccfc117bdf | [
"MIT"
] | null | null | null | defmodule PhoenixShell.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import PhoenixShell.Gettext
# Simple translation
gettext "Here is the string to translate"
# Plural translation
ngettext "Here is the string to translate",
"Here are the strings to translate",
3
# Domain-based translation
dgettext "errors", "Here is the error message to translate"
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :phoenix_shell
end
| 28.72 | 72 | 0.685237 |
739237b8547adc7ad66407ae01dd8e30a60dc4b8 | 13,787 | ex | Elixir | day21/lib/day21.ex | the-shank/advent-of-code-2018 | 3be3958adad61e62e8a7ea6ec6a868f049c7a7e4 | [
"Apache-2.0"
] | 7 | 2018-12-12T01:08:24.000Z | 2019-12-09T19:50:36.000Z | day21/lib/day21.ex | bjorng/advent-of-code-2018 | 5dd312b7473d7f2fe12f0de1fd771c3ee1931b97 | [
"Apache-2.0"
] | null | null | null | day21/lib/day21.ex | bjorng/advent-of-code-2018 | 5dd312b7473d7f2fe12f0de1fd771c3ee1931b97 | [
"Apache-2.0"
] | null | null | null | defmodule Day21 do
use Bitwise
@moduledoc """
This module only solves the problem for my input data.
My input and the solutions can be found in test/day21_test.exs.
I solved part one by first examining the assembly program.
It quickly became apparent that register 0 (A) must be set
equal to the value of F at address 0028. I turned on instruction
trace and could see the solution directly in the trace.
For part 2, I installed two breakpoint handlers. The first one
at address 0017 optimizes the inner loop. The second one at
address 0028 remembers all possible results that have been
seen so far. When a value is seen for the second time, the
same sequence of values will repeat forever. The result is the
value *before* the first repeated value. (It turns out that
there are 11669 distinct values before the cycle repeats itself.)
For reference, here is my input program prettified:
0000 F = 123
0001 F = F band 456
0002 F = F == 72
0003 IP = IP + F
0004 goto 1
0005 F = 0
0006 B = F bor 65536
0007 F = 4591209
0008 D = B band 255
0009 F = F + D
0010 F = F band 16777215
0011 F = F * 65899
0012 F = F band 16777215
0013 D = 256 > B
0014 IP = IP + D
0015 IP = IP + 1
0016 goto 28
0017 D = 0
0018 C = D + 1
0019 C = C * 256
0020 C = C > B
0021 IP = IP + C
0022 IP = IP + 1
0023 goto 26
0024 D = D + 1
0025 goto 18
0026 B = D
0027 goto 8
0028 D = F == A
0029 IP = IP + D
0030 goto 6
"""
def decompile_program lines do
{ip_reg, program} = parse_program lines
machine = Machine.new(program, 6, ip_reg)
Machine.decompile_program machine
end
def part1 lines, initial_r0, trace \\ false do
{ip_reg, program} = parse_program lines
machine = Machine.new(program, 6, ip_reg, trace)
machine = Machine.set_reg(machine, 0, initial_r0)
machine = Machine.set_breakpoint machine, 17, &optimize_bp/1
machine = Machine.execute_program machine
elem(machine.regs, 0)
end
def part2 lines do
{ip_reg, program} = parse_program lines
machine = Machine.new program, 6, ip_reg
machine = Machine.set_breakpoint machine, 17, &optimize_bp/1
machine = Machine.set_breakpoint machine, 28, &result_bp/1
machine = Map.put(machine, :seen, MapSet.new())
machine = Map.put(machine, :result, nil)
machine = Machine.execute_program machine
#IO.inspect MapSet.size(machine.seen)
machine.result
end
# Breakpoint handler to optimize the inner loop that
# is entered at address 17.
defp optimize_bp(machine) do
%{regs: regs} = machine
b = elem(regs, 1)
d = max(0, div(b, 256) - 1)
regs = put_elem(regs, 3, d)
%{machine | regs: regs, ip: 18}
end
# Breakpoint handler to examine the values that can
# make the program halt.
defp result_bp(machine) do
%{regs: regs, seen: seen} = machine
result = elem(regs, 5)
if MapSet.member?(seen, result) do
# We have seen this result before. That means that
# possible values will begin to repeat from here.
# Done.
Map.put(machine, :ip, 999)
else
# Save this potential result and continue executing.
%{machine | ip: 6, result: result,
seen: MapSet.put(seen, result)}
end
end
defp parse_program [first | lines] do
<<"#ip ", ip_reg>> = first
ip_reg = ip_reg - ?0
{ip_reg, Enum.map(lines, &parse_instr/1)}
end
defp parse_instr line do
[opcode | operands] = String.split(line, " ")
opcode = String.to_atom(opcode)
operands = Enum.map(operands, &String.to_integer/1)
{{opcode, Keyword.get(Machine.instructions, opcode)}, operands}
end
end
defmodule Machine do
use Bitwise
def new program, num_registers, ip_reg, trace \\ false do
program = program
|> Enum.with_index
|> Enum.map(fn {instr, ip} -> {ip, instr} end)
|> Map.new
regs = :erlang.make_tuple(num_registers, 0)
%{ip: 0, regs: regs, ip_reg: ip_reg,
program: program, breakpoints: %{},
trace: trace}
end
def set_reg machine, reg, value do
regs = put_elem machine.regs, reg, value
put_in machine.regs, regs
end
def set_breakpoint(machine, ip, fun) when is_function(fun, 1) do
breakpoints = Map.put(machine.breakpoints, ip, fun)
put_in machine.breakpoints, breakpoints
end
def execute_program machine do
%{ip: ip, program: program, ip_reg: ip_reg, regs: regs0,
breakpoints: breakpoints, trace: trace} = machine
case program do
%{^ip => {{_, execute}, operands} = instr} ->
regs0 = put_elem regs0, ip_reg, ip
case breakpoints do
%{^ip => breakpoint} ->
machine = Map.put(machine, :regs, regs0)
machine = breakpoint.(machine)
execute_program machine
%{} ->
regs = execute.(operands, regs0)
if trace do
:io.format('~4..0w ~-50s ~-22s ~s\n',
[ip, pp_regs(regs0), decompile_instr(instr, ip_reg, ip), pp_regs(regs)])
if elem(regs0, ip_reg) != elem(regs, ip_reg) do
IO.puts ""
end
end
ip = elem(regs, ip_reg)
ip = ip + 1
machine = %{machine | ip: ip, regs: regs}
execute_program machine
end
%{} ->
machine
end
end
defp pp_regs regs do
Tuple.to_list(regs)
|> Stream.with_index
|> Enum.map(fn {value, index} -> [index + ?A, '=' | int_to_str(value)] end)
|> Enum.intersperse(' ')
end
def decompile_program machine do
%{program: program, ip_reg: ip_reg} = machine
IO.puts ""
program
|> Enum.sort
|> Enum.each(fn {ip, instr} ->
str = decompile_instr(instr, ip_reg, ip)
:io.format("~4..0w ~s\n", [ip, str])
end)
end
defp decompile_instr({{name, _}, operands}, ip_reg, _ip) do
{op, result, sources} = translate_instr name, operands, ip_reg
case op do
'set' ->
[result, ' = ', hd(sources)]
'goto' ->
['goto ', hd(sources)]
_ ->
op = case op do
'ban' -> 'band'
'bo' -> 'bor'
'eq' -> '=='
'gt' -> '>'
'add' -> '+'
'mul' -> '*'
_ -> op
end
{src1, src2} = case sources do
[other, 'IP'] -> {'IP', other}
[src1, src2] -> {src1, src2}
end
[result, ' = ', src1, ' ', op, ' ', src2]
end
end
defp translate_instr(name, [src1, src2, result], ip_reg) do
[c1, c2, c3, _c4] = name = Atom.to_charlist(name)
op = [c1, c2, c3]
result = translate_reg(result, ip_reg)
case name do
'seti' when result == 'IP' ->
{'goto', result, [int_to_str(src1 + 1)]}
'seti' ->
{op, result, [int_to_str(src1)]}
'setr' ->
{op, result, [translate_reg(src1, ip_reg)]}
[_, _, ?i, ?i] ->
{[c1, c2], result, [int_to_str(src1), int_to_str(src2)]}
[_, _, ?i, ?r] ->
{[c1, c2], result, [int_to_str(src1), translate_reg(src2, ip_reg)]}
[_, _, ?r, ?r] ->
{[c1, c2], result, [translate_reg(src1, ip_reg), translate_reg(src2, ip_reg)]}
[_, _, ?r, ?i] ->
{[c1, c2], result, [translate_reg(src1, ip_reg), int_to_str(src2)]}
[_, _, _, ?i] ->
{op, result, [translate_reg(src1, ip_reg), int_to_str(src2)]}
[_, _, _, ?r] ->
{op, result, [translate_reg(src1, ip_reg), translate_reg(src2, ip_reg)]}
end
end
defp int_to_str(int), do: Integer.to_charlist(int)
defp translate_reg(ip_reg, ip_reg), do: 'IP'
defp translate_reg(reg, _), do: [reg + ?A]
def instructions do
[{:addr, &addr/2},
{:addi, &addi/2},
{:mulr, &mulr/2},
{:muli, &muli/2},
{:banr, &banr/2},
{:bani, &bani/2},
{:borr, &borr/2},
{:bori, &bori/2},
{:setr, &setr/2},
{:seti, &seti/2},
{:gtir, >ir/2},
{:gtri, >ri/2},
{:gtrr, >rr/2},
{:eqir, &eqir/2},
{:eqri, &eqri/2},
{:eqrr, &eqrr/2}]
end
@doc """
addr (add register) stores into register C the result of
adding register A and register B.
## Examples
iex> Machine.addr [1, 2, 0], {1, 4, 5, 10, 177, 178}
{9, 4, 5, 10, 177, 178}
"""
def addr [a, b, c], regs do
put_elem regs, c, elem(regs, a) + elem(regs, b)
end
@doc """
addi (add immediate) stores into register C the result of adding
register A and value B.
## Examples
iex> Machine.addi [1, 42, 3], {1, 4, 5, 10, 177, 178}
{1, 4, 5, 46, 177, 178}
"""
def addi [a, b, c], regs do
put_elem regs, c, elem(regs, a) + b
end
@doc """
mulr (multiply register) stores into register C the result of
multiplying register A and register B.
## Examples
iex> Machine.mulr [1, 2, 0], {1, 4, 5, 10, 177, 178}
{20, 4, 5, 10, 177, 178}
"""
def mulr [a, b, c], regs do
put_elem regs, c, elem(regs, a) * elem(regs, b)
end
@doc """
muli (multiply immediate) stores into register C the result of multiplying
register A and value B.
## Examples
iex> Machine.muli [1, 42, 3], {1, 4, 5, 10, 177, 178}
{1, 4, 5, 168, 177, 178}
"""
def muli [a, b, c], regs do
put_elem regs, c, elem(regs, a) * b
end
@doc """
banr (bitwise AND register) stores into register C the result of the
bitwise AND of register A and register B.
## Examples
iex> Machine.banr [1, 2, 0], {1, 5, 13, 10, 177, 178}
{5, 5, 13, 10, 177, 178}
"""
def banr [a, b, c], regs do
put_elem regs, c, band(elem(regs, a), elem(regs, b))
end
@doc """
bani (bitwise AND immediate) stores into register C the result of
the bitwise AND of register A and value B.
## Examples
iex> Machine.bani [3, 8, 0], {1, 4, 5, 10, 177, 178}
{8, 4, 5, 10, 177, 178}
"""
def bani [a, b, c], regs do
put_elem regs, c, band(elem(regs, a), b)
end
@doc """
borr (bitwise OR register) stores into register C the result of
the bitwise OR of register A and register B.
## Examples
iex> Machine.borr [1, 2, 0], {1, 5, 9, 10, 177, 178}
{13, 5, 9, 10, 177, 178}
"""
def borr [a, b, c], regs do
put_elem regs, c, bor(elem(regs, a), elem(regs, b))
end
@doc """
bori (bitwise OR immediate) stores into register C the result of
the bitwise OR of register A and value B.
## Examples
iex> Machine.bori [3, 32, 0], {1, 4, 5, 10, 177, 178}
{42, 4, 5, 10, 177, 178}
"""
def bori [a, b, c], regs do
put_elem regs, c, bor(elem(regs, a), b)
end
@doc """
setr (set register) copies the contents of register A into register C.
(Input B is ignored.)
## Examples
iex> Machine.setr [3, 999, 1], {1, 4, 5, 10, 177, 178}
{1, 10, 5, 10, 177, 178}
"""
def setr [a, _b, c], regs do
put_elem regs, c, elem(regs, a)
end
@doc """
seti (set immediate) stores value A into register C. (Input B is ignored.)
## Examples
iex> Machine.seti [777, 999, 0], {1, 4, 5, 10, 177, 178}
{777, 4, 5, 10, 177, 178}
"""
def seti [a, _b, c], regs do
put_elem regs, c, a
end
@doc """
gtir (greater-than immediate/register) sets register C to 1 if value A
is greater than register B. Otherwise, register C is set to 0.
## Examples
iex> Machine.gtir [7, 2, 0], {1, 4, 5, 10, 177, 178}
{1, 4, 5, 10, 177, 178}
iex> Machine.gtir [0, 2, 0], {1, 4, 5, 10, 177, 178}
{0, 4, 5, 10, 177, 178}
"""
def gtir [a, b, c], regs do
put_bool regs, c, a > elem(regs, b)
end
@doc """
gtri (greater-than register/immediate) sets register C to 1 if register A
is greater than value B. Otherwise, register C is set to 0.
## Examples
iex> Machine.gtri [3, 9, 0], {1, 4, 5, 10, 177, 178}
{1, 4, 5, 10, 177, 178}
iex> Machine.gtri [2, 9, 0], {1, 4, 5, 10, 177, 178}
{0, 4, 5, 10, 177, 178}
"""
def gtri [a, b, c], regs do
put_bool regs, c, elem(regs, a) > b
end
@doc """
gtrr (greater-than register/register) sets register C to 1 if register A
is greater than register B. Otherwise, register C is set to 0.
## Examples
iex> Machine.gtrr [3, 2, 0], {1, 4, 5, 10, 177, 178}
{1, 4, 5, 10, 177, 178}
iex> Machine.gtrr [1, 2, 0], {1, 4, 5, 10, 177, 178}
{0, 4, 5, 10, 177, 178}
"""
def gtrr [a, b, c], regs do
put_bool regs, c, elem(regs, a) > elem(regs, b)
end
@doc """
eqir (equal immediate/register) sets register C to 1 if value A is
equal to register B. Otherwise, register C is set to 0.
## Examples
iex> Machine.eqir [4, 1, 0], {1, 4, 5, 10, 177, 178}
{1, 4, 5, 10, 177, 178}
iex> Machine.eqir [42, 1, 0], {1, 4, 5, 10, 177, 178}
{0, 4, 5, 10, 177, 178}
"""
def eqir [a, b, c], regs do
put_bool regs, c, a == elem(regs, b)
end
@doc """
eqri (equal register/immediate) sets register C to 1 if register A
is equal to value B. Otherwise, register C is set to 0.
## Examples
iex> Machine.eqri [3, 10, 0], {1, 4, 5, 10, 177, 178}
{1, 4, 5, 10, 177, 178}
iex> Machine.eqri [3, 19, 0], {1, 4, 5, 10, 177, 178}
{0, 4, 5, 10, 177, 178}
"""
def eqri [a, b, c], regs do
put_bool regs, c, elem(regs, a) == b
end
@doc """
eqrr (equal register/register) sets register C to 1 if register A
is equal to register B. Otherwise, register C is set to 0.
## Examples
iex> Machine.eqrr [3, 2, 0], {1, 4, 5, 10, 177, 178}
{0, 4, 5, 10, 177, 178}
iex> Machine.eqrr [3, 2, 0], {1, 4, 10, 10, 177, 178}
{1, 4, 10, 10, 177, 178}
"""
def eqrr [a, b, c], regs do
put_bool regs, c, elem(regs, a) == elem(regs, b)
end
defp put_bool regs, c, bool do
bool = if bool, do: 1, else: 0
put_elem regs, c, bool
end
end
| 25.437269 | 86 | 0.575905 |
73923b13291eaf10292151b4f87b3a6e5ddde229 | 1,288 | exs | Elixir | test/lib/deserialization/xs2a/synchronization_test.exs | ibanity/ibanity-elixir | c2e1feedbfc2376678c9db78c6365a82a654b00b | [
"MIT"
] | 3 | 2018-11-17T18:12:15.000Z | 2020-12-09T06:26:59.000Z | test/lib/deserialization/xs2a/synchronization_test.exs | ibanity/ibanity-elixir | c2e1feedbfc2376678c9db78c6365a82a654b00b | [
"MIT"
] | 2 | 2018-12-12T14:14:56.000Z | 2019-07-01T14:13:57.000Z | test/lib/deserialization/xs2a/synchronization_test.exs | ibanity/ibanity-elixir | c2e1feedbfc2376678c9db78c6365a82a654b00b | [
"MIT"
] | null | null | null | defmodule Ibanity.Xs2a.Synchronization.DeserializationTest do
use ExUnit.Case
import Ibanity.JsonDeserializer
alias Ibanity.DateTimeUtil
alias Ibanity.Xs2a.Synchronization
test "deserializes a synchronization" do
data = %{
"type" => "synchronization",
"id" => "e8b19b5e-068b-4802-a2da-0c641145479c",
"attributes" => %{
"updatedAt" => "2018-10-18T15:11:42.341Z",
"subtype" => "accountDetails",
"status" => "success",
"resourceType" => "account",
"resourceId" => "a23b8b7e-b118-43da-80d6-cf0b4c6b1707",
"customerOnline" => true,
"customerIpAddress" => "123.123.123.123",
"errors" => [],
"createdAt" => "2018-10-18T15:11:41.489069Z"
}
}
actual = deserialize(data)
expected = %Synchronization{
id: "e8b19b5e-068b-4802-a2da-0c641145479c",
updated_at: DateTimeUtil.parse("2018-10-18T15:11:42.341Z"),
subtype: "accountDetails",
status: "success",
resource_type: "account",
resource_id: "a23b8b7e-b118-43da-80d6-cf0b4c6b1707",
customer_online: true,
customer_ip_address: "123.123.123.123",
errors: [],
created_at: DateTimeUtil.parse("2018-10-18T15:11:41.489069Z")
}
assert expected == actual
end
end
| 30.666667 | 67 | 0.631211 |
7392600e73d16a96d54b45b48377fafc35427e8d | 152 | exs | Elixir | test/test_helper.exs | erickgnavar/tweet_map | 4c8839e35dc96cd8d6ca12fb2895496f2a0c100e | [
"MIT"
] | null | null | null | test/test_helper.exs | erickgnavar/tweet_map | 4c8839e35dc96cd8d6ca12fb2895496f2a0c100e | [
"MIT"
] | null | null | null | test/test_helper.exs | erickgnavar/tweet_map | 4c8839e35dc96cd8d6ca12fb2895496f2a0c100e | [
"MIT"
] | null | null | null | ExUnit.start
Mix.Task.run "ecto.create", ["--quiet"]
Mix.Task.run "ecto.migrate", ["--quiet"]
Ecto.Adapters.SQL.begin_test_transaction(TweetMap.Repo)
| 21.714286 | 55 | 0.730263 |
73926904456401130c586398e28b94e0b842d686 | 4,303 | ex | Elixir | apps/rig_auth/lib/rig_auth/blacklist.ex | arana3/reactive-interaction-gateway | 793648bcc5b8b05fc53df1f5f97818fb40ca84be | [
"Apache-2.0"
] | null | null | null | apps/rig_auth/lib/rig_auth/blacklist.ex | arana3/reactive-interaction-gateway | 793648bcc5b8b05fc53df1f5f97818fb40ca84be | [
"Apache-2.0"
] | 132 | 2018-11-26T14:00:54.000Z | 2022-03-11T04:17:54.000Z | apps/rig_auth/lib/rig_auth/blacklist.ex | arana3/reactive-interaction-gateway | 793648bcc5b8b05fc53df1f5f97818fb40ca84be | [
"Apache-2.0"
] | null | null | null | defmodule RigAuth.Blacklist do
@moduledoc """
Enables blacklisting of JWTs by their jti claim.
The entries representing the banned claims feature an expiration timestamp,
which prevents the blacklist from growing indefinitely.
In a distributed setting, the node that does the blacklisting spreads the
information via Phoenix' PubSub Server as Phoenix Presence information. The
other nodes react by tracking the same record themselves, which means that
for one record and n nodes there are n items in the Presence list. The
following properties are a result of this:
- Blacklisting can occur on/by any node.
- The blacklist is eventually consistent over all nodes.
- Any node can go down and come up at any time without affecting the
blacklist, except if all nodes go down at the same time (in that case
there is nothing to synchronize from -- the list is not stored on disk).
"""
use Rig.Config, [:default_expiry_hours]
require Logger
alias RigAuth.Blacklist.Serializer
@typep state_t :: map
@default_tracker_mod RigAuth.Blacklist.Tracker
def start_link(tracker_mod \\ nil, opts \\ []) do
tracker_mod = if tracker_mod, do: tracker_mod, else: @default_tracker_mod
Logger.debug(fn -> "Blacklist with tracker #{inspect tracker_mod}" end)
GenServer.start_link(
__MODULE__,
_state = %{tracker_mod: tracker_mod},
Keyword.merge([name: __MODULE__], opts))
end
@spec add_jti(pid | atom, String.t, nil | String.t | Timex.DateTime.t, nil | pid) :: pid
def add_jti(server, jti, expiry \\ nil, listener \\ nil)
def add_jti(server, jti, nil = _expiry, listener) do
conf = config()
default_expiry = Timex.now() |> Timex.shift(hours: conf.default_expiry_hours)
add_jti(server, jti, default_expiry, listener)
end
def add_jti(server, jti, expiry, listener) do
expires_at =
case Timex.is_valid? expiry do
true -> expiry
_ -> Serializer.deserialize_datetime!(expiry)
end
GenServer.cast(server, {:add, jti, expires_at, listener})
server # allow for chaining calls
end
@spec contains_jti?(pid, String.t) :: boolean
def contains_jti?(server, jti) do
GenServer.call(server, {:contains?, jti})
end
# callbacks
@spec init(state_t) :: {:ok, state_t}
def init(state) do
send(self(), :expire_stale_records)
{:ok, state}
end
@spec handle_cast({:add, String.t, Timex.DateTime.t, nil | pid}, state_t) :: {:noreply, state_t}
def handle_cast({:add, jti, expiry, listener}, state) do
Logger.info("Blacklisting JWT with jti=#{jti}")
with {:ok, _phx_ref} <- state.tracker_mod.track(jti, expiry) do
remaining_ms = max(
(Timex.diff(expiry, Timex.now(), :seconds) + 1) * 1_000,
0
)
Process.send_after(self(), {:expire, jti, listener}, _timeout = remaining_ms)
end
{:noreply, state}
end
@spec handle_call({:contains?, String.t}, any, state_t) :: {:reply, boolean, state_t}
def handle_call({:contains?, jti}, _from, state) do
contains? = case state.tracker_mod.find(jti) do
{_jti, _meta} -> true
nil -> false
end
{:reply, contains?, state}
end
@spec handle_info({:expire, String.t, nil | pid}, state_t) :: {:noreply, state_t}
def handle_info({:expire, jti, listener}, state) do
expire(state.tracker_mod, jti, listener)
{:noreply, state}
end
@spec handle_info(:expire_stale_records, state_t) :: {:noreply, state_t}
def handle_info(:expire_stale_records, state) do
now = Timex.now()
state.tracker_mod.list()
|> Stream.filter(fn({_jti, meta}) -> meta.expiry |> Timex.before?(now) end)
|> Enum.each(fn({jti, _meta}) -> expire(state.tracker_mod, jti) end)
{:noreply, state}
end
# private functions
@spec expire(atom, String.t, nil | pid) :: any
defp expire(tracker_mod, jti, listener \\ nil) do
Logger.info("Removing JWT with jti=#{jti} from blacklist (entry expired)")
tracker_mod.untrack(jti)
if listener, do: send_expiration_notification(listener, jti)
end
@spec send_expiration_notification(pid, String.t) :: any
defp send_expiration_notification(listener, jti) do
send(listener, {:expired, jti})
Logger.debug(fn -> "notified #{inspect listener} about expiration of JTI #{inspect jti}" end)
end
end
| 35.858333 | 98 | 0.689519 |
739274cb2a3c4e4d2f7bc24c8415741e9036ab4d | 6,190 | ex | Elixir | lib/resource_pool/resources/resources.ex | code-mancers/resource-pool | 83610857b63ec3d0b5e215f6ac63e12ccb99c2f1 | [
"MIT"
] | null | null | null | lib/resource_pool/resources/resources.ex | code-mancers/resource-pool | 83610857b63ec3d0b5e215f6ac63e12ccb99c2f1 | [
"MIT"
] | null | null | null | lib/resource_pool/resources/resources.ex | code-mancers/resource-pool | 83610857b63ec3d0b5e215f6ac63e12ccb99c2f1 | [
"MIT"
] | null | null | null | defmodule ResourcePool.Resources do
@moduledoc """
The Resources context.
"""
import Ecto.Query, warn: false
alias Ecto.Multi
alias ResourcePool.Repo
alias ResourcePool.Resources.Database
require Logger
def create_resource_bulk(callback, opts \\ []) do
nOfResource = opts[:nOfResource] || 1
1..nOfResource
|> Enum.map(fn _ -> create_resource() end)
|> Enum.reject(&match?({:error, _}, &1))
|> Enum.map(&format_resource/1)
|> send_response(callback)
end
defp format_resource({:ok, database}) do
%{
external_id: "#{database.id}",
value: "#{database.host}:#{database.port}"
}
end
defp send_response(databases, callback) do
IO.puts callback
IO.inspect Poison.encode!(%{data: databases})
opts = [body: Poison.encode!(%{data: databases}),
headers: ["Content-Type": "application/json"]]
case HTTPotion.post(callback, opts) do
%HTTPotion.Response{} = response -> Logger.info("#{inspect response}")
%HTTPotion.ErrorResponse{message: msg} -> Logger.error(msg)
end
end
defp create_resource() do
with {:ok, %{record_db: database}} <- do_create_resource() do
{:ok, database}
else
_ -> {:error, "unable to create resource"}
end
end
defp do_create_resource() do
Multi.new
|> prepare_params()
|> create_postgres_db()
|> seed_postgres_db()
|> record_db()
|> Repo.transaction()
end
def delete_resource(db) do
with {:ok, %{delete_db: database}} <- do_delete_resource(db) do
{:ok, database}
else
_ -> {:error, "unable to create resource"}
end
end
def do_delete_resource(db) do
Multi.new
|> delete_postgres_db(db)
|> delete_db(db)
|> Repo.transaction()
end
defp prepare_params(multi) do
db_path = get_db_path()
Multi.run(multi, :prepare_params, fn _ ->
{:ok,
%{
db_path: db_path,
log_path: get_log_path(db_path),
name: get_db_name(db_path),
port: get_port(),
host: get_host()
}
}
end)
end
defp create_postgres_db(multi) do
Multi.run(multi, :create_postgress_db, fn %{prepare_params: params} ->
opts = "-h 0.0.0.0 -p #{params[:port]}"
with {_, 0} <- System.cmd("initdb", ["-A", "trust", "-D", params[:db_path]]),
{_, 0} <- System.cmd("pg_ctl", ["-D", params[:db_path],
"-o", opts, "-l", params[:log_path], "start"]) do
{:ok, params}
else
{msg, errcode} ->
IO.puts "some weird error, #{msg}, #{errcode}"
{:error, "Couldn't create the database"}
end
end)
end
defp seed_postgres_db(multi) do
Multi.run(multi, :seed_postgress_db, fn %{prepare_params: params} ->
dump_file = Path.join(["/", "Users", "akash", "Developer", "resource_pool", "dockup_db_dump.sql"])
with {_, 0} <- System.cmd("psql", ["-f", dump_file, "-h", "0.0.0.0", "-p", Integer.to_string(params[:port]), "postgres"]) do
{:ok, params}
else
{msg, errcode} ->
IO.puts "some weird error, #{msg}, #{errcode}"
{:error, "Couldn't create the database"}
end
end)
end
def record_db(multi) do
Multi.run(multi, :record_db, fn %{prepare_params: params} ->
create_database(params)
end)
end
defp get_db_path() do
Application.get_env(:resource_pool, :base_path)
|> Path.join(gen_random_db_name())
end
defp get_db_name(db_path) do
db_path
|> Path.split()
|> List.last()
end
defp get_log_path(db_path) do
db_path
|> Path.join("db.log")
end
defp get_port() do
gen_random_unused_port()
end
defp get_host() do
# extract ip
{:ok, [{inet, _, _} | _]} = :inet.getif()
inet
|> Tuple.to_list()
|> Enum.join(".")
end
defp gen_random_db_name() do
?a..?z
|> Enum.take_random(10)
|> to_string()
end
defp gen_random_unused_port() do
5000..50000
|> Enum.take_random(1)
|> Enum.at(0)
end
defp delete_postgres_db(multi, db) do
Multi.run(multi, :delete_postgres_db, fn _ ->
opts = "-h #{db.host} -p #{db.port}"
with {_, 0} <- System.cmd("pg_ctl", ["-D", db.db_path, "-o", opts, "-l", db.log_path, "stop"]),
{:ok, _} <- File.rm_rf(db.db_path) do
{:ok, db}
else
_ -> {:error, "Couldn't delete the database"}
end
end)
end
def delete_db(multi, db) do
Multi.delete(multi, :delete_db, db)
end
@doc """
Returns the list of databases.
## Examples
iex> list_databases()
[%Database{}, ...]
"""
def list_databases do
Repo.all(Database)
end
@doc """
Gets a single database.
Raises `Ecto.NoResultsError` if the Database does not exist.
## Examples
iex> get_database!(123)
%Database{}
iex> get_database!(456)
** (Ecto.NoResultsError)
"""
def get_database!(id), do: Repo.get!(Database, id)
@doc """
Creates a database.
## Examples
iex> create_database(%{field: value})
{:ok, %Database{}}
iex> create_database(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_database(attrs \\ %{}) do
%Database{}
|> Database.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a database.
## Examples
iex> update_database(database, %{field: new_value})
{:ok, %Database{}}
iex> update_database(database, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_database(%Database{} = database, attrs) do
database
|> Database.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Database.
## Examples
iex> delete_database(database)
{:ok, %Database{}}
iex> delete_database(database)
{:error, %Ecto.Changeset{}}
"""
def delete_database(%Database{} = database) do
Repo.delete(database)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking database changes.
## Examples
iex> change_database(database)
%Ecto.Changeset{source: %Database{}}
"""
def change_database(%Database{} = database) do
Database.changeset(database, %{})
end
end
| 22.34657 | 130 | 0.591761 |
7392b28ce9e103f0efef2a9e212fc0f292a1615a | 1,618 | exs | Elixir | test/policr_mini_bot/captcha_test.exs | WOCyo/policr-mini | 23e5e9f927d083cba5993f39e9f122e3a0ad1be2 | [
"MIT"
] | 487 | 2020-06-08T03:04:21.000Z | 2022-03-31T14:51:36.000Z | test/policr_mini_bot/captcha_test.exs | WOCyo/policr-mini | 23e5e9f927d083cba5993f39e9f122e3a0ad1be2 | [
"MIT"
] | 141 | 2020-06-11T01:03:29.000Z | 2022-03-30T20:23:32.000Z | test/policr_mini_bot/captcha_test.exs | WOCyo/policr-mini | 23e5e9f927d083cba5993f39e9f122e3a0ad1be2 | [
"MIT"
] | 61 | 2020-06-10T05:25:03.000Z | 2022-03-23T15:54:26.000Z | defmodule PolicrMiniBot.CaptchaTest do
use ExUnit.Case
import PolicrMiniBot.Captcha
alias Telegex.Model.{InlineKeyboardMarkup, InlineKeyboardButton}
test "build_markup/1" do
markup =
[
[
"猫",
"狗",
"猪"
]
]
|> build_markup(100)
assert markup == %InlineKeyboardMarkup{
inline_keyboard: [
[
%InlineKeyboardButton{text: "猫", callback_data: "verification:v1:1:100"},
%InlineKeyboardButton{text: "狗", callback_data: "verification:v1:2:100"},
%InlineKeyboardButton{text: "猪", callback_data: "verification:v1:3:100"}
]
]
}
markup =
[
[
1,
2,
3
],
[
4,
5,
6
]
]
|> build_markup(100)
assert markup == %InlineKeyboardMarkup{
inline_keyboard: [
[
%InlineKeyboardButton{text: "1", callback_data: "verification:v1:1:100"},
%InlineKeyboardButton{text: "2", callback_data: "verification:v1:2:100"},
%InlineKeyboardButton{text: "3", callback_data: "verification:v1:3:100"}
],
[
%InlineKeyboardButton{text: "4", callback_data: "verification:v1:4:100"},
%InlineKeyboardButton{text: "5", callback_data: "verification:v1:5:100"},
%InlineKeyboardButton{text: "6", callback_data: "verification:v1:6:100"}
]
]
}
end
end
| 26.966667 | 90 | 0.497528 |
7392b4bf8d1b0f584970e463db352cd1159a3514 | 1,220 | ex | Elixir | web/views/error_helpers.ex | appdojolabs/myapp | 41887a60f7ea86db3c70470631b703455c865042 | [
"MIT"
] | 41 | 2017-05-21T14:33:28.000Z | 2022-03-28T22:29:56.000Z | web/views/error_helpers.ex | DMeechan/deploy-elixir-docker-example | b63a46453629ced5134aba039575a3c010a6f2a1 | [
"MIT"
] | 2 | 2017-07-23T07:06:38.000Z | 2018-07-16T23:53:12.000Z | web/views/error_helpers.ex | appdojolabs/myapp | 41887a60f7ea86db3c70470631b703455c865042 | [
"MIT"
] | 16 | 2017-05-21T22:35:10.000Z | 2022-03-28T22:30:04.000Z | defmodule Myapp.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
if error = form.errors[field] do
content_tag :span, translate_error(error), class: "help-block"
end
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# Because error messages were defined within Ecto, we must
# call the Gettext module passing our Gettext backend. We
# also use the "errors" domain as translations are placed
# in the errors.po file.
# Ecto will pass the :count keyword if the error message is
# meant to be pluralized.
# On your own code and templates, depending on whether you
# need the message to be pluralized or not, this could be
# written simply as:
#
# dngettext "errors", "1 file", "%{count} files", count
# dgettext "errors", "is invalid"
#
if count = opts[:count] do
Gettext.dngettext(Myapp.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(Myapp.Gettext, "errors", msg, opts)
end
end
end
| 29.756098 | 71 | 0.665574 |
7392c92855a359f9fc74c79a0899b949257c660c | 1,846 | exs | Elixir | clients/compute/mix.exs | Contractbook/elixir-google-api | 342751041aaf8c2e7f76f9922cf24b9c5895802b | [
"Apache-2.0"
] | null | null | null | clients/compute/mix.exs | Contractbook/elixir-google-api | 342751041aaf8c2e7f76f9922cf24b9c5895802b | [
"Apache-2.0"
] | null | null | null | clients/compute/mix.exs | Contractbook/elixir-google-api | 342751041aaf8c2e7f76f9922cf24b9c5895802b | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.Mixfile do
use Mix.Project
@version "0.39.1"
def project() do
[
app: :google_api_compute,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/compute"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.4"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Compute Engine API client library. Creates and runs virtual machines on Google Cloud Platform.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/compute",
"Homepage" => "https://developers.google.com/compute/docs/reference/latest/"
}
]
end
end
| 27.552239 | 98 | 0.656013 |
7392de8a93f793a90b135fe3f8db2aabe13f6c6e | 137 | ex | Elixir | web/controllers/page_controller.ex | hackersguildco/flaggy | 5c52879f0f8541bba5603e6a678d9527e76507db | [
"MIT"
] | 1 | 2018-04-23T14:55:58.000Z | 2018-04-23T14:55:58.000Z | web/controllers/page_controller.ex | hackersguildco/flaggy | 5c52879f0f8541bba5603e6a678d9527e76507db | [
"MIT"
] | null | null | null | web/controllers/page_controller.ex | hackersguildco/flaggy | 5c52879f0f8541bba5603e6a678d9527e76507db | [
"MIT"
] | null | null | null | defmodule Flaggy.PageController do
use Flaggy.Web, :controller
def index(conn, _params) do
render conn, "index.html"
end
end
| 15.222222 | 34 | 0.722628 |
7393082869e9228a5341f35264f53fb9d6c7029b | 1,358 | ex | Elixir | lib/recipients.ex | terianil/sendgridex | e2a998fcea794cf71ab9df1ffb0b73198f916206 | [
"MIT"
] | null | null | null | lib/recipients.ex | terianil/sendgridex | e2a998fcea794cf71ab9df1ffb0b73198f916206 | [
"MIT"
] | null | null | null | lib/recipients.ex | terianil/sendgridex | e2a998fcea794cf71ab9df1ffb0b73198f916206 | [
"MIT"
] | null | null | null | defmodule SendGridEx.Recipients do
alias SendGridEx.Client
alias SendGridEx.Model.Recipient
alias SendGridEx.Model.RecipientId
@doc """
Adds a single recipient.
"""
def add(%Recipient{} = recipient) do
add([recipient])
end
@doc """
Adds multiple recipients.
"""
def add(recipients) when is_list(recipients) do
with {:ok, env} <-
Client.post(
"contactdb/recipients",
recipients,
opts: [expected_status_code: 201]
) do
{:ok, env.body}
end
end
@doc """
Updates a single recipient. If the recipient does not exist, it will be created.
"""
def update(%Recipient{} = recipient) do
update([recipient])
end
@doc """
Updates multiple recipients. If the recipient does not exist, it will be created.
"""
def update(recipients) when is_list(recipients) do
with {:ok, env} <-
Client.patch(
"contactdb/recipients",
recipients,
opts: [expected_status_code: 201]
) do
{:ok, env.body}
end
end
@doc """
Deletes a recipient.
"""
def delete(%RecipientId{} = r) do
with {:ok, _env} <-
Client.delete(
"contactdb/recipients/#{r.recipient_id}",
opts: [expected_status_code: 204]
) do
:ok
end
end
end
| 22.262295 | 83 | 0.579529 |
73933aecacdb02b5ec7ae46531767491eafbd795 | 45,889 | ex | Elixir | lib/ecto/query/planner.ex | jccf091/ecto | 42d47a6da0711f842e1a0e6724a89b318b9b2144 | [
"Apache-2.0"
] | null | null | null | lib/ecto/query/planner.ex | jccf091/ecto | 42d47a6da0711f842e1a0e6724a89b318b9b2144 | [
"Apache-2.0"
] | null | null | null | lib/ecto/query/planner.ex | jccf091/ecto | 42d47a6da0711f842e1a0e6724a89b318b9b2144 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Query.Planner do
# Normalizes a query and its parameters.
@moduledoc false
alias Ecto.Query.{BooleanExpr, DynamicExpr, JoinExpr, QueryExpr, SelectExpr}
if map_size(%Ecto.Query{}) != 17 do
raise "Ecto.Query match out of date in builder"
end
@doc """
Converts a query to a list of joins.
The from is moved as last join with the where conditions as its "on"
in order to keep proper binding order.
"""
def query_to_joins(qual, %{from: from, wheres: wheres, joins: joins}, position) do
on = %QueryExpr{file: __ENV__.file, line: __ENV__.line, expr: true, params: []}
on =
Enum.reduce(wheres, on, fn %BooleanExpr{op: op, expr: expr, params: params}, acc ->
merge_expr_and_params(op, acc, expr, params)
end)
join = %JoinExpr{qual: qual, source: from, file: __ENV__.file, line: __ENV__.line, on: on}
last = length(joins) + position
mapping = fn
0 -> last
ix -> ix + position - 1
end
for {%{on: on} = join, ix} <- Enum.with_index(joins ++ [join]) do
%{join | on: rewrite_sources(on, mapping), ix: ix + position}
end
end
defp merge_expr_and_params(op, %QueryExpr{expr: left_expr, params: left_params} = struct,
right_expr, right_params) do
right_expr =
case length(left_params) do
0 ->
right_expr
prefix ->
Macro.prewalk(right_expr, fn
{:^, meta, [counter]} when is_integer(counter) -> {:^, meta, [prefix + counter]}
other -> other
end)
end
%{struct | expr: merge_expr(op, left_expr, right_expr), params: left_params ++ right_params}
end
defp merge_expr(_op, left, true), do: left
defp merge_expr(_op, true, right), do: right
defp merge_expr(op, left, right), do: {op, [], [left, right]}
@doc """
Rewrites the given query expression sources using the given mapping.
"""
def rewrite_sources(%{expr: expr, params: params} = part, mapping) do
expr =
Macro.prewalk expr, fn
{:&, meta, [ix]} ->
{:&, meta, [mapping.(ix)]}
other ->
other
end
params =
Enum.map params, fn
{val, {composite, {ix, field}}} when is_integer(ix) ->
{val, {composite, {mapping.(ix), field}}}
{val, {ix, field}} when is_integer(ix) ->
{val, {mapping.(ix), field}}
val ->
val
end
%{part | expr: expr, params: params}
end
@doc """
Plans the query for execution.
Planning happens in multiple steps:
1. First the query is prepared by retrieving
its cache key, casting and merging parameters
2. Then a cache lookup is done, if the query is
cached, we are done
3. If there is no cache, we need to actually
normalize and validate the query, asking the
adapter to prepare it
4. The query is sent to the adapter to be generated
## Cache
All entries in the query, except the preload and sources
field, should be part of the cache key.
The cache value is the compiled query by the adapter
along-side the select expression.
"""
def query(query, operation, repo, adapter, counter) do
{query, params, key} = prepare(query, operation, adapter, counter)
if key == :nocache do
{_, select, prepared} = query_without_cache(query, operation, adapter, counter)
{build_meta(query, select), {:nocache, prepared}, params}
else
query_with_cache(query, operation, repo, adapter, counter, key, params)
end
end
defp query_with_cache(query, operation, repo, adapter, counter, key, params) do
case query_lookup(query, operation, repo, adapter, counter, key) do
{:nocache, select, prepared} ->
{build_meta(query, select), {:nocache, prepared}, params}
{_, :cached, select, cached} ->
reset = &cache_reset(repo, key, &1)
{build_meta(query, select), {:cached, reset, cached}, params}
{_, :cache, select, prepared} ->
update = &cache_update(repo, key, &1)
{build_meta(query, select), {:cache, update, prepared}, params}
end
end
defp query_lookup(query, operation, repo, adapter, counter, key) do
try do
:ets.lookup(repo, key)
rescue
ArgumentError ->
raise ArgumentError,
"repo #{inspect repo} is not started, please ensure it is part of your supervision tree"
else
[term] -> term
[] -> query_prepare(query, operation, adapter, counter, repo, key)
end
end
defp query_prepare(query, operation, adapter, counter, repo, key) do
case query_without_cache(query, operation, adapter, counter) do
{:cache, select, prepared} ->
elem = {key, :cache, select, prepared}
cache_insert(repo, key, elem)
{:nocache, _, _} = nocache ->
nocache
end
end
defp cache_insert(repo, key, elem) do
case :ets.insert_new(repo, elem) do
true ->
elem
false ->
[elem] = :ets.lookup(repo, key)
elem
end
end
defp cache_update(repo, key, cached) do
_ = :ets.update_element(repo, key, [{2, :cached}, {4, cached}])
:ok
end
defp cache_reset(repo, key, prepared) do
_ = :ets.update_element(repo, key, [{2, :cache}, {4, prepared}])
:ok
end
defp query_without_cache(query, operation, adapter, counter) do
{query, select} = normalize(query, operation, adapter, counter)
{cache, prepared} = adapter.prepare(operation, query)
{cache, select, prepared}
end
defp build_meta(%{prefix: prefix, sources: sources, preloads: preloads}, select) do
%{prefix: prefix, select: select, preloads: preloads, sources: sources}
end
@doc """
Prepares the query for cache.
This means all the parameters from query expressions are
merged into a single value and their entries are pruned
from the query.
This function is called by the backend before invoking
any cache mechanism.
"""
def prepare(query, operation, adapter, counter) do
query
|> prepare_sources(adapter)
|> prepare_assocs
|> prepare_cache(operation, adapter, counter)
rescue
e ->
# Reraise errors so we ignore the planner inner stacktrace
reraise e
end
@doc """
Prepare all sources, by traversing and expanding joins.
"""
def prepare_sources(%{from: from} = query, adapter) do
from = from || error!(query, "query must have a from expression")
from = prepare_source(query, from, adapter)
{joins, sources, tail_sources} = prepare_joins(query, [from], length(query.joins), adapter)
%{query | from: from, joins: joins |> Enum.reverse,
sources: (tail_sources ++ sources) |> Enum.reverse |> List.to_tuple()}
end
defp prepare_source(query, %Ecto.SubQuery{query: inner_query} = subquery, adapter) do
try do
{inner_query, params, key} = prepare(inner_query, :all, adapter, 0)
assert_no_subquery_assocs!(inner_query)
{inner_query, select} = inner_query |> returning(true) |> subquery_select(adapter)
%{subquery | query: inner_query, params: params, cache: key, select: select}
rescue
e -> raise Ecto.SubQueryError, query: query, exception: e
end
end
defp prepare_source(_query, {nil, schema}, _adapter) when is_atom(schema) and schema != nil,
do: {schema.__schema__(:source), schema}
defp prepare_source(_query, {source, schema}, _adapter) when is_binary(source) and is_atom(schema),
do: {source, schema}
defp prepare_source(_query, {:fragment, _, _} = source, _adapter),
do: source
defp assert_no_subquery_assocs!(%{assocs: assocs, preloads: preloads} = query)
when assocs != [] or preloads != [] do
error!(query, "cannot preload associations in subquery")
end
defp assert_no_subquery_assocs!(query) do
query
end
defp subquery_select(%{select: %{expr: expr, take: take} = select} = query, adapter) do
expr =
case subquery_select(expr, take, query) do
{nil, fields} ->
{:%{}, [], fields}
{struct, fields} ->
{:%, [], [struct, {:%{}, [], fields}]}
end
query = put_in(query.select.expr, expr)
{expr, _} = prewalk(expr, :select, query, select, 0, adapter)
{meta, _fields, _from} = collect_fields(expr, [], :error, query, take)
{query, meta}
end
defp subquery_select({:merge, _, [left, right]}, take, query) do
{left_struct, left_fields} = subquery_select(left, take, query)
{right_struct, right_fields} = subquery_select(right, take, query)
struct =
case {left_struct, right_struct} do
{struct, struct} -> struct
{_, nil} -> left_struct
{nil, _} -> error!(query, "cannot merge because the left side is a map " <>
"and the right side is a #{inspect right_struct} struct")
{_, _} -> error!(query, "cannot merge because the left side is a #{inspect left_struct} " <>
"and the right side is a #{inspect right_struct} struct")
end
{struct, Keyword.merge(left_fields, right_fields)}
end
defp subquery_select({:%, _, [name, map]}, take, query) do
{_, fields} = subquery_select(map, take, query)
{name, fields}
end
defp subquery_select({:%{}, _, [{:|, _, [{:&, [], [ix]}, pairs]}]} = expr, take, query) do
assert_subquery_fields!(query, expr, pairs)
{source, _} = source_take!(:select, query, take, ix, ix)
{struct, fields} = subquery_struct_and_fields(source)
update_keys = Keyword.keys(pairs)
case update_keys -- fields do
[] -> :ok
[key | _] -> error!(query, "invalid key `#{inspect key}` on map update in subquery")
end
# In case of map updates, we need to remove duplicated fields
# at query time because we use the field names as aliases and
# duplicate aliases will lead to invalid queries.
kept_keys = fields -- update_keys
{struct, subquery_fields(kept_keys, ix) ++ pairs}
end
defp subquery_select({:%{}, _, pairs} = expr, _take, query) do
assert_subquery_fields!(query, expr, pairs)
{nil, pairs}
end
defp subquery_select({:&, _, [ix]}, take, query) do
{source, _} = source_take!(:select, query, take, ix, ix)
{struct, fields} = subquery_struct_and_fields(source)
{struct, subquery_fields(fields, ix)}
end
defp subquery_select({{:., _, [{:&, _, [ix]}, field]}, _, []}, _take, _query) do
{nil, subquery_fields([field], ix)}
end
defp subquery_select(expr, _take, query) do
error!(query, "subquery must select a source (t), a field (t.field) or a map, got: `#{Macro.to_string(expr)}`")
end
defp subquery_struct_and_fields({:source, {_, schema}, types}) do
{schema, Keyword.keys(types)}
end
defp subquery_struct_and_fields({:struct, name, types}) do
{name, Keyword.keys(types)}
end
defp subquery_struct_and_fields({:map, types}) do
{nil, Keyword.keys(types)}
end
defp subquery_fields(fields, ix) do
for field <- fields do
{field, {{:., [], [{:&, [], [ix]}, field]}, [], []}}
end
end
defp subquery_types(%{select: {:map, types}}), do: types
defp subquery_types(%{select: {:struct, _name, types}}), do: types
defp assert_subquery_fields!(query, expr, pairs) do
Enum.each(pairs, fn
{key, _} when not is_atom(key) ->
error!(query, "only atom keys are allowed when selecting a map in subquery, got: `#{Macro.to_string(expr)}`")
{key, value} ->
if valid_subquery_value?(value) do
{key, value}
else
error!(query, "maps, lists, tuples and sources are not allowed as map values in subquery, got: `#{Macro.to_string(expr)}`")
end
end)
end
defp valid_subquery_value?({_, _}), do: false
defp valid_subquery_value?(args) when is_list(args), do: false
defp valid_subquery_value?({container, _, args})
when container in [:{}, :%{}, :&] and is_list(args), do: false
defp valid_subquery_value?(_), do: true
defp prepare_joins(query, sources, offset, adapter) do
prepare_joins(query.joins, query, [], sources, [], 1, offset, adapter)
end
defp prepare_joins([%JoinExpr{assoc: {ix, assoc}, qual: qual, on: on} = join|t],
query, joins, sources, tail_sources, counter, offset, adapter) do
schema = schema_for_association_join!(query, join, Enum.fetch!(Enum.reverse(sources), ix))
refl = schema.__schema__(:association, assoc)
unless refl do
error! query, join, "could not find association `#{assoc}` on schema #{inspect schema}"
end
# If we have the following join:
#
# from p in Post,
# join: p in assoc(p, :comments)
#
# The callback below will return a query that contains only
# joins in a way it starts with the Post and ends in the
# Comment.
#
# This means we need to rewrite the joins below to properly
# shift the &... identifier in a way that:
#
# &0 -> becomes assoc ix
# &LAST_JOIN -> becomes counter
#
# All values in the middle should be shifted by offset,
# all values after join are already correct.
child = refl.__struct__.joins_query(refl)
last_ix = length(child.joins)
source_ix = counter
{child_joins, child_sources, child_tail} =
prepare_joins(child, [child.from], offset + last_ix - 1, adapter)
# Rewrite joins indexes as mentioned above
child_joins = Enum.map(child_joins, &rewrite_join(&1, qual, ix, last_ix, source_ix, offset))
# Drop the last resource which is the association owner (it is reversed)
child_sources = Enum.drop(child_sources, -1)
[current_source|child_sources] = child_sources
child_sources = child_tail ++ child_sources
prepare_joins(t, query, attach_on(child_joins, on) ++ joins, [current_source|sources],
child_sources ++ tail_sources, counter + 1, offset + length(child_sources), adapter)
end
defp prepare_joins([%JoinExpr{source: %Ecto.Query{from: source} = join_query, qual: qual, on: on} = join|t],
query, joins, sources, tail_sources, counter, offset, adapter) do
case join_query do
%{order_bys: [], limit: nil, offset: nil, group_bys: [], joins: [],
havings: [], preloads: [], assocs: [], distinct: nil, lock: nil} ->
source = prepare_source(query, source, adapter)
[join] = attach_on(query_to_joins(qual, %{join_query | from: source}, counter), on)
prepare_joins(t, query, [join|joins], [source|sources], tail_sources, counter + 1, offset, adapter)
_ ->
error! query, join, "queries in joins can only have `where` conditions"
end
end
defp prepare_joins([%JoinExpr{source: source} = join|t],
query, joins, sources, tail_sources, counter, offset, adapter) do
source = prepare_source(query, source, adapter)
join = %{join | source: source, ix: counter}
prepare_joins(t, query, [join|joins], [source|sources], tail_sources, counter + 1, offset, adapter)
end
defp prepare_joins([], _query, joins, sources, tail_sources, _counter, _offset, _adapter) do
{joins, sources, tail_sources}
end
defp attach_on([%{on: on} = h | t], %{expr: expr, params: params}) do
[%{h | on: merge_expr_and_params(:and, on, expr, params)} | t]
end
defp rewrite_join(%{on: on, ix: join_ix} = join, qual, ix, last_ix, source_ix, inc_ix) do
on = update_in on.expr, fn expr ->
Macro.prewalk expr, fn
{:&, meta, [join_ix]} ->
{:&, meta, [rewrite_ix(join_ix, ix, last_ix, source_ix, inc_ix)]}
other ->
other
end
end
%{join | on: on, qual: qual,
ix: rewrite_ix(join_ix, ix, last_ix, source_ix, inc_ix)}
end
# We need to replace the source by the one from the assoc
defp rewrite_ix(0, ix, _last_ix, _source_ix, _inc_x), do: ix
# The last entry will have the current source index
defp rewrite_ix(last_ix, _ix, last_ix, source_ix, _inc_x), do: source_ix
# All above last are already correct
defp rewrite_ix(join_ix, _ix, last_ix, _source_ix, _inc_ix) when join_ix > last_ix, do: join_ix
# All others need to be incremented by the offset sources
defp rewrite_ix(join_ix, _ix, _last_ix, _source_ix, inc_ix), do: join_ix + inc_ix
defp schema_for_association_join!(query, join, source) do
case source do
{source, nil} ->
error! query, join, "cannot perform association join on #{inspect source} " <>
"because it does not have a schema"
{_, schema} ->
schema
%Ecto.SubQuery{select: {:struct, schema, _}} ->
schema
%Ecto.SubQuery{} ->
error! query, join, "can only perform association joins on subqueries " <>
"that return a source with schema in select"
_ ->
error! query, join, "can only perform association joins on sources with a schema"
end
end
@doc """
Prepare the parameters by merging and casting them according to sources.
"""
def prepare_cache(query, operation, adapter, counter) do
{query, {cache, params}} =
traverse_exprs(query, operation, {[], []}, &{&3, merge_cache(&1, &2, &3, &4, adapter)})
{query, Enum.reverse(params), finalize_cache(query, operation, cache, counter)}
end
defp merge_cache(:from, _query, expr, {cache, params}, _adapter) do
{key, params} = source_cache(expr, params)
{merge_cache(key, cache, key != :nocache), params}
end
defp merge_cache(kind, query, expr, {cache, params}, adapter)
when kind in ~w(select distinct limit offset)a do
if expr do
{params, cacheable?} = cast_and_merge_params(kind, query, expr, params, adapter)
{merge_cache({kind, expr_to_cache(expr)}, cache, cacheable?), params}
else
{cache, params}
end
end
defp merge_cache(kind, query, exprs, {cache, params}, adapter)
when kind in ~w(where update group_by having order_by)a do
{expr_cache, {params, cacheable?}} =
Enum.map_reduce exprs, {params, true}, fn expr, {params, cacheable?} ->
{params, current_cacheable?} = cast_and_merge_params(kind, query, expr, params, adapter)
{expr_to_cache(expr), {params, cacheable? and current_cacheable?}}
end
case expr_cache do
[] -> {cache, params}
_ -> {merge_cache({kind, expr_cache}, cache, cacheable?), params}
end
end
defp merge_cache(:join, query, exprs, {cache, params}, adapter) do
{expr_cache, {params, cacheable?}} =
Enum.map_reduce exprs, {params, true}, fn
%JoinExpr{on: on, qual: qual, source: source} = join, {params, cacheable?} ->
{key, params} = source_cache(source, params)
{params, join_cacheable?} = cast_and_merge_params(:join, query, join, params, adapter)
{params, on_cacheable?} = cast_and_merge_params(:join, query, on, params, adapter)
{{qual, key, on.expr},
{params, cacheable? and join_cacheable? and on_cacheable? and key != :nocache}}
end
case expr_cache do
[] -> {cache, params}
_ -> {merge_cache({:join, expr_cache}, cache, cacheable?), params}
end
end
defp expr_to_cache(%BooleanExpr{op: op, expr: expr}), do: {op, expr}
defp expr_to_cache(%QueryExpr{expr: expr}), do: expr
defp expr_to_cache(%SelectExpr{expr: expr}), do: expr
defp cast_and_merge_params(kind, query, expr, params, adapter) do
Enum.reduce expr.params, {params, true}, fn {v, type}, {acc, cacheable?} ->
case cast_param(kind, query, expr, v, type, adapter) do
{:in, v} ->
{Enum.reverse(v, acc), false}
v ->
{[v|acc], cacheable?}
end
end
end
defp merge_cache(_left, _right, false), do: :nocache
defp merge_cache(_left, :nocache, true), do: :nocache
defp merge_cache(left, right, true), do: [left|right]
defp finalize_cache(_query, _operation, :nocache, _counter) do
:nocache
end
defp finalize_cache(%{assocs: assocs, prefix: prefix, lock: lock, select: select},
operation, cache, counter) do
cache =
case select do
%{take: take} when take != %{} ->
[take: take] ++ cache
_ ->
cache
end
cache =
cache
|> prepend_if(assocs != [], [assocs: assocs])
|> prepend_if(prefix != nil, [prefix: prefix])
|> prepend_if(lock != nil, [lock: lock])
[operation, counter | cache]
end
defp prepend_if(cache, true, prepend), do: prepend ++ cache
defp prepend_if(cache, false, _prepend), do: cache
defp source_cache({_, nil} = source, params),
do: {source, params}
defp source_cache({bin, schema}, params),
do: {{bin, schema, schema.__schema__(:hash)}, params}
defp source_cache({:fragment, _, _} = source, params),
do: {source, params}
defp source_cache(%Ecto.SubQuery{params: inner, cache: key}, params),
do: {key, Enum.reverse(inner, params)}
defp cast_param(_kind, query, expr, %DynamicExpr{}, _type, _value) do
error! query, expr, "dynamic expressions can only be interpolated inside other " <>
"dynamic expressions or at the top level of where, having, update or a join's on"
end
defp cast_param(_kind, query, expr, [{_, _} | _], _type, _value) do
error! query, expr, "keyword lists can only be interpolated at the top level of " <>
"where, having, distinct, order_by, update or a join's on"
end
defp cast_param(kind, query, expr, v, type, adapter) do
type = field_type!(kind, query, expr, type)
try do
case cast_param(kind, type, v, adapter) do
{:ok, v} -> v
{:error, error} -> error! query, expr, error
end
catch
:error, %Ecto.QueryError{} = e ->
raise Ecto.Query.CastError, value: v, type: type, message: Exception.message(e)
end
end
defp cast_param(kind, type, v, adapter) do
with {:ok, type} <- normalize_param(kind, type, v),
{:ok, v} <- cast_param(kind, type, v),
do: dump_param(adapter, type, v)
end
@doc """
Prepare association fields found in the query.
"""
def prepare_assocs(query) do
prepare_assocs(query, 0, query.assocs)
query
end
defp prepare_assocs(_query, _ix, []), do: :ok
defp prepare_assocs(query, ix, assocs) do
# We validate the schema exists when preparing joins above
{_, parent_schema} = get_source!(:preload, query, ix)
Enum.each assocs, fn {assoc, {child_ix, child_assocs}} ->
refl = parent_schema.__schema__(:association, assoc)
unless refl do
error! query, "field `#{inspect parent_schema}.#{assoc}` " <>
"in preload is not an association"
end
case find_source_expr(query, child_ix) do
%JoinExpr{qual: qual} when qual in [:inner, :left, :inner_lateral, :left_lateral] ->
:ok
%JoinExpr{qual: qual} ->
error! query, "association `#{inspect parent_schema}.#{assoc}` " <>
"in preload requires an inner, left or lateral join, got #{qual} join"
_ ->
:ok
end
prepare_assocs(query, child_ix, child_assocs)
end
end
defp find_source_expr(query, 0) do
query.from
end
defp find_source_expr(query, ix) do
Enum.find(query.joins, & &1.ix == ix)
end
@doc """
Used for customizing the query returning result.
"""
def returning(%{select: select} = query, _fields) when select != nil do
query
end
def returning(%{select: nil}, []) do
raise ArgumentError, ":returning expects at least one field to be given, got an empty list"
end
def returning(%{select: nil} = query, fields) when is_list(fields) do
%{query | select: %SelectExpr{expr: {:&, [], [0]}, take: %{0 => {:any, fields}},
line: __ENV__.line, file: __ENV__.file}}
end
def returning(%{select: nil} = query, true) do
%{query | select: %SelectExpr{expr: {:&, [], [0]}, line: __ENV__.line, file: __ENV__.file}}
end
def returning(%{select: nil} = query, false) do
query
end
@doc """
Asserts there is no select statement in the given query.
"""
def assert_no_select!(%{select: nil} = query, _operation) do
query
end
def assert_no_select!(%{select: _} = query, operation) do
raise Ecto.QueryError,
query: query,
message: "`select` clause is not supported in `#{operation}`, " <>
"please pass the :returning option instead"
end
@doc """
Normalizes the query.
After the query was prepared and there is no cache
entry, we need to update its interpolations and check
its fields and associations exist and are valid.
"""
def normalize(query, operation, adapter, counter) do
query
|> normalize_query(operation, adapter, counter)
|> elem(0)
|> normalize_select()
rescue
e ->
# Reraise errors so we ignore the planner inner stacktrace
reraise e
end
defp normalize_query(query, operation, adapter, counter) do
case operation do
:all ->
assert_no_update!(query, operation)
:update_all ->
assert_update!(query, operation)
assert_only_filter_expressions!(query, operation)
:delete_all ->
assert_no_update!(query, operation)
assert_only_filter_expressions!(query, operation)
end
traverse_exprs(query, operation, counter,
&validate_and_increment(&1, &2, &3, &4, operation, adapter))
end
defp validate_and_increment(:from, query, %Ecto.SubQuery{}, _counter, kind, _adapter) when kind != :all do
error! query, "`#{kind}` does not allow subqueries in `from`"
end
defp validate_and_increment(:from, query, expr, counter, _kind, adapter) do
prewalk_source(expr, :from, query, expr, counter, adapter)
end
defp validate_and_increment(kind, query, expr, counter, _operation, adapter)
when kind in ~w(select distinct limit offset)a do
if expr do
prewalk(kind, query, expr, counter, adapter)
else
{nil, counter}
end
end
defp validate_and_increment(kind, query, exprs, counter, _operation, adapter)
when kind in ~w(where group_by having order_by update)a do
{exprs, counter} =
Enum.reduce(exprs, {[], counter}, fn
%{expr: []}, {list, acc} ->
{list, acc}
expr, {list, acc} ->
{expr, acc} = prewalk(kind, query, expr, acc, adapter)
{[expr|list], acc}
end)
{Enum.reverse(exprs), counter}
end
defp validate_and_increment(:join, query, exprs, counter, _operation, adapter) do
Enum.map_reduce exprs, counter, fn join, acc ->
{source, acc} = prewalk_source(join.source, :join, query, join, acc, adapter)
{on, acc} = prewalk(:join, query, join.on, acc, adapter)
{%{join | on: on, source: source, params: nil}, acc}
end
end
defp prewalk_source({:fragment, meta, fragments}, kind, query, expr, acc, adapter) do
{fragments, acc} = prewalk(fragments, kind, query, expr, acc, adapter)
{{:fragment, meta, fragments}, acc}
end
defp prewalk_source(%Ecto.SubQuery{query: inner_query} = subquery, _kind, query, _expr, counter, adapter) do
try do
{inner_query, counter} = normalize_query(inner_query, :all, adapter, counter)
{inner_query, _} = normalize_select(inner_query)
keys = subquery |> subquery_types() |> Keyword.keys()
inner_query = update_in(inner_query.select.fields, &Enum.zip(keys, &1))
{%{subquery | query: inner_query}, counter}
rescue
e -> raise Ecto.SubQueryError, query: query, exception: e
end
end
defp prewalk_source(source, _kind, _query, _expr, acc, _adapter) do
{source, acc}
end
defp prewalk(:update, query, expr, counter, adapter) do
source = get_source!(:update, query, 0)
{inner, acc} =
Enum.map_reduce expr.expr, counter, fn {op, kw}, counter ->
{kw, acc} =
Enum.map_reduce kw, counter, fn {field, value}, counter ->
{value, acc} = prewalk(value, :update, query, expr, counter, adapter)
{{field_source(source, field), value}, acc}
end
{{op, kw}, acc}
end
{%{expr | expr: inner, params: nil}, acc}
end
defp prewalk(kind, query, expr, counter, adapter) do
{inner, acc} = prewalk(expr.expr, kind, query, expr, counter, adapter)
{%{expr | expr: inner, params: nil}, acc}
end
defp prewalk({:in, in_meta, [left, {:^, meta, [param]}]}, kind, query, expr, acc, adapter) do
{left, acc} = prewalk(left, kind, query, expr, acc, adapter)
{right, acc} = validate_in(meta, expr, param, acc, adapter)
{{:in, in_meta, [left, right]}, acc}
end
defp prewalk({{:., dot_meta, [{:&, amp_meta, [ix]}, field]}, meta, []},
kind, query, _expr, acc, _adapter) do
field = field_source(get_source!(kind, query, ix), field)
{{{:., dot_meta, [{:&, amp_meta, [ix]}, field]}, meta, []}, acc}
end
defp prewalk({:^, meta, [ix]}, _kind, _query, _expr, acc, _adapter) when is_integer(ix) do
{{:^, meta, [acc]}, acc + 1}
end
defp prewalk({:type, _, [arg, type]}, kind, query, expr, acc, adapter) do
{arg, acc} = prewalk(arg, kind, query, expr, acc, adapter)
type = field_type!(kind, query, expr, type)
{%Ecto.Query.Tagged{value: arg, tag: type, type: Ecto.Type.type(type)}, acc}
end
defp prewalk(%Ecto.Query.Tagged{value: v, type: type} = tagged, kind, query, expr, acc, adapter) do
if Ecto.Type.base?(type) do
{tagged, acc}
else
{dump_param(kind, query, expr, v, type, adapter), acc}
end
end
defp prewalk({left, right}, kind, query, expr, acc, adapter) do
{left, acc} = prewalk(left, kind, query, expr, acc, adapter)
{right, acc} = prewalk(right, kind, query, expr, acc, adapter)
{{left, right}, acc}
end
defp prewalk({left, meta, args}, kind, query, expr, acc, adapter) do
{left, acc} = prewalk(left, kind, query, expr, acc, adapter)
{args, acc} = prewalk(args, kind, query, expr, acc, adapter)
{{left, meta, args}, acc}
end
defp prewalk(list, kind, query, expr, acc, adapter) when is_list(list) do
Enum.map_reduce(list, acc, &prewalk(&1, kind, query, expr, &2, adapter))
end
defp prewalk(other, _kind, _query, _expr, acc, _adapter) do
{other, acc}
end
defp dump_param(kind, query, expr, v, type, adapter) do
type = field_type!(kind, query, expr, type)
case dump_param(kind, type, v, adapter) do
{:ok, v} ->
v
{:error, error} ->
error = error <> ". Or the value is incompatible or it must be " <>
"interpolated (using ^) so it may be cast accordingly"
error! query, expr, error
end
end
defp dump_param(kind, type, v, adapter) do
with {:ok, type} <- normalize_param(kind, type, v),
do: dump_param(adapter, type, v)
end
defp validate_in(meta, expr, param, acc, adapter) do
{v, t} = Enum.fetch!(expr.params, param)
length = length(v)
case adapter.dumpers(t, t) do
[{:in, _} | _] -> {{:^, meta, [acc, length]}, acc + length}
_ -> {{:^, meta, [acc, length]}, acc + 1}
end
end
defp normalize_select(%{select: nil} = query) do
{query, nil}
end
defp normalize_select(query) do
%{assocs: assocs, preloads: preloads, select: select} = query
%{take: take, expr: expr} = select
{tag, from_take} = Map.get(take, 0, {:any, []})
source = get_source!(:select, query, 0)
# In from, if there is a schema and we have a map tag with preloads,
# it needs to be converted to a map in a later pass.
{take, from_tag} =
case tag do
:map when is_tuple(source) and elem(source, 1) != nil and preloads != [] ->
{Map.put(take, 0, {:struct, from_take}), :map}
_ ->
{take, :any}
end
{postprocess, fields, from} =
collect_fields(expr, [], :error, query, take)
{fields, preprocess, postprocess} =
case from do
{:ok, from_pre, from_taken} ->
{assoc_exprs, assoc_fields} = collect_assocs([], [], query, tag, from_take, assocs)
fields = from_taken ++ Enum.reverse(assoc_fields, Enum.reverse(fields))
preprocess = [from_pre | Enum.reverse(assoc_exprs)]
{fields, preprocess, {:from, from_tag, postprocess}}
:error when preloads != [] or assocs != [] ->
error! query, "the binding used in `from` must be selected in `select` when using `preload`"
:error ->
{Enum.reverse(fields), [], postprocess}
end
select = %{preprocess: preprocess, postprocess: postprocess, take: from_take, assocs: assocs}
{put_in(query.select.fields, fields), select}
end
defp collect_fields({:&, _, [0]}, fields, :error, query, take) do
{expr, taken} = source_take!(:select, query, take, 0, 0)
{{:source, :from}, fields, {:ok, expr, taken}}
end
defp collect_fields({:&, _, [0]}, fields, from, _query, _take) do
{{:source, :from}, fields, from}
end
defp collect_fields({:&, _, [ix]}, fields, from, query, take) do
{expr, taken} = source_take!(:select, query, take, ix, ix)
{expr, Enum.reverse(taken, fields), from}
end
defp collect_fields({agg, _, [{{:., _, [{:&, _, [ix]}, field]}, _, []} | _]} = expr,
fields, from, %{select: select} = query, _take)
when agg in ~w(count avg min max sum)a do
type =
# TODO: Support the :number type
case agg do
:count -> :integer
:avg -> :any
:sum -> :any
_ -> source_type!(:select, query, select, ix, field)
end
{{:value, type}, [expr | fields], from}
end
defp collect_fields({{:., _, [{:&, _, [ix]}, field]}, _, []} = expr,
fields, from, %{select: select} = query, _take) do
type = source_type!(:select, query, select, ix, field)
{{:value, type}, [expr | fields], from}
end
defp collect_fields({left, right}, fields, from, query, take) do
{args, fields, from} = collect_args([left, right], fields, from, query, take, [])
{{:tuple, args}, fields, from}
end
defp collect_fields({:{}, _, args}, fields, from, query, take) do
{args, fields, from} = collect_args(args, fields, from, query, take, [])
{{:tuple, args}, fields, from}
end
defp collect_fields({:%{}, _, [{:|, _, [data, args]}]}, fields, from, query, take) do
{data, fields, from} = collect_fields(data, fields, from, query, take)
{args, fields, from} = collect_kv(args, fields, from, query, take, [])
{{:map, data, args}, fields, from}
end
defp collect_fields({:%{}, _, args}, fields, from, query, take) do
{args, fields, from} = collect_kv(args, fields, from, query, take, [])
{{:map, args}, fields, from}
end
defp collect_fields({:%, _, [name, {:%{}, _, [{:|, _, [data, args]}]}]}, fields, from, query, take) do
{data, fields, from} = collect_fields(data, fields, from, query, take)
{args, fields, from} = collect_kv(args, fields, from, query, take, [])
struct!(name, args)
{{:struct, name, data, args}, fields, from}
end
defp collect_fields({:%, _, [name, {:%{}, _, args}]}, fields, from, query, take) do
{args, fields, from} = collect_kv(args, fields, from, query, take, [])
struct!(name, args)
{{:struct, name, args}, fields, from}
end
defp collect_fields({:merge, _, args}, fields, from, query, take) do
{[left, right], fields, from} = collect_args(args, fields, from, query, take, [])
{{:merge, left, right}, fields, from}
end
defp collect_fields({:date_add, _, [arg | _]} = expr, fields, from, query, take) do
case collect_fields(arg, fields, from, query, take) do
{{:value, :any}, _, _} -> {{:value, :date}, [expr | fields], from}
{type, _, _} -> {type, [expr | fields], from}
end
end
defp collect_fields({:datetime_add, _, [arg | _]} = expr, fields, from, query, take) do
case collect_fields(arg, fields, from, query, take) do
{{:value, :any}, _, _} -> {{:value, :naive_datetime}, [expr | fields], from}
{type, _, _} -> {type, [expr | fields], from}
end
end
defp collect_fields(args, fields, from, query, take) when is_list(args) do
{args, fields, from} = collect_args(args, fields, from, query, take, [])
{{:list, args}, fields, from}
end
defp collect_fields(expr, fields, from, _query, _take)
when is_atom(expr) or is_binary(expr) or is_number(expr) do
{expr, fields, from}
end
defp collect_fields(%Ecto.Query.Tagged{tag: tag} = expr, fields, from, _query, _take) do
{{:value, tag}, [expr | fields], from}
end
defp collect_fields(expr, fields, from, _query, _take) do
{{:value, :any}, [expr | fields], from}
end
defp collect_kv([{key, value} | elems], fields, from, query, take, acc) do
{key, fields, from} = collect_fields(key, fields, from, query, take)
{value, fields, from} = collect_fields(value, fields, from, query, take)
collect_kv(elems, fields, from, query, take, [{key, value} | acc])
end
defp collect_kv([], fields, from, _query, _take, acc) do
{Enum.reverse(acc), fields, from}
end
defp collect_args([elem | elems], fields, from, query, take, acc) do
{elem, fields, from} = collect_fields(elem, fields, from, query, take)
collect_args(elems, fields, from, query, take, [elem | acc])
end
defp collect_args([], fields, from, _query, _take, acc) do
{Enum.reverse(acc), fields, from}
end
defp collect_assocs(exprs, fields, query, tag, take, [{assoc, {ix, children}}|tail]) do
case get_source!(:preload, query, ix) do
{_, schema} = source when schema != nil ->
{fetch, take_children} = fetch_assoc(tag, take, assoc)
{expr, taken} = take!(source, query, fetch, assoc, ix)
exprs = [expr | exprs]
fields = Enum.reverse(taken, fields)
{exprs, fields} = collect_assocs(exprs, fields, query, tag, take_children, children)
{exprs, fields} = collect_assocs(exprs, fields, query, tag, take, tail)
{exprs, fields}
_ ->
error! query, "can only preload sources with a schema " <>
"(fragments, binary and subqueries are not supported)"
end
end
defp collect_assocs(exprs, fields, _query, _tag, _take, []) do
{exprs, fields}
end
defp fetch_assoc(tag, take, assoc) do
case Access.fetch(take, assoc) do
{:ok, value} -> {{:ok, {tag, value}}, value}
:error -> {:error, []}
end
end
defp source_take!(kind, query, take, field, ix) do
source = get_source!(kind, query, ix)
take!(source, query, Access.fetch(take, field), field, ix)
end
defp take!(source, query, fetched, field, ix) do
case {fetched, source} do
{{:ok, {_, []}}, {_, _}} ->
error! query, "at least one field must be selected for binding `#{field}`, got an empty list"
{{:ok, {:struct, _}}, {_, nil}} ->
error! query, "struct/2 in select expects a source with a schema"
{{:ok, {kind, fields}}, {source, schema}} ->
dumper = if schema, do: schema.__schema__(:dump), else: %{}
schema = if kind == :map, do: nil, else: schema
{types, fields} = select_dump(List.wrap(fields), dumper, ix)
{{:source, {source, schema}, types}, fields}
{{:ok, {_, _}}, {:fragment, _, _}} ->
error! query, "it is not possible to return a map/struct subset of a fragment, " <>
"you must explicitly return the desired individual fields"
{{:ok, {_, _}}, %Ecto.SubQuery{}} ->
error! query, "it is not possible to return a map/struct subset of a subquery, " <>
"you must explicitly select the whole subquery or individual fields only"
{:error, {_, nil}} ->
{{:value, :map}, [{:&, [], [ix]}]}
{:error, {_, schema}} ->
{types, fields} = select_dump(schema.__schema__(:fields), schema.__schema__(:dump), ix)
{{:source, source, types}, fields}
{:error, {:fragment, _, _}} ->
{{:value, :map}, [{:&, [], [ix]}]}
{:error, %Ecto.SubQuery{select: select} = subquery} ->
fields = for {field, _} <- subquery_types(subquery), do: select_field(field, ix)
{select, fields}
end
end
defp select_dump(fields, dumper, ix) do
fields
|> Enum.reverse
|> Enum.reduce({[], []}, fn
field, {types, exprs} when is_atom(field) ->
{source, type} = Map.get(dumper, field, {field, :any})
{[{field, type} | types], [select_field(source, ix) | exprs]}
_field, acc ->
acc
end)
end
defp select_field(field, ix) do
{{:., [], [{:&, [], [ix]}, field]}, [], []}
end
defp get_source!(where, %{sources: sources} = query, ix) do
elem(sources, ix)
rescue
ArgumentError ->
error! query, "cannot prepare query because it has specified more bindings than " <>
"bindings available in `#{where}` (look for `unknown_binding!` in " <>
"the printed query below)"
end
## Helpers
@exprs [distinct: :distinct, select: :select, from: :from, join: :joins,
where: :wheres, group_by: :group_bys, having: :havings,
order_by: :order_bys, limit: :limit, offset: :offset]
# Traverse all query components with expressions.
# Therefore from, preload, assocs and lock are not traversed.
defp traverse_exprs(query, operation, acc, fun) do
extra =
case operation do
:update_all -> [update: :updates]
_ -> []
end
Enum.reduce extra ++ @exprs, {query, acc}, fn {kind, key}, {query, acc} ->
{traversed, acc} = fun.(kind, query, Map.fetch!(query, key), acc)
{Map.put(query, key, traversed), acc}
end
end
defp field_type!(kind, query, expr, {composite, {ix, field}}) when is_integer(ix) do
{composite, type!(kind, :type, query, expr, ix, field)}
end
defp field_type!(kind, query, expr, {ix, field}) when is_integer(ix) do
type!(kind, :type, query, expr, ix, field)
end
defp field_type!(_kind, _query, _expr, type) do
type
end
defp source_type!(kind, query, expr, ix, field) do
type!(kind, :source_type, query, expr, ix, field)
end
defp type!(_kind, _lookup, _query, _expr, nil, _field), do: :any
defp type!(kind, lookup, query, expr, ix, field) when is_integer(ix) do
case get_source!(kind, query, ix) do
{_, schema} ->
type!(kind, lookup, query, expr, schema, field)
{:fragment, _, _} ->
:any
%Ecto.SubQuery{} = subquery ->
case Keyword.fetch(subquery_types(subquery), field) do
{:ok, {:value, type}} ->
type
{:ok, _} ->
:any
:error ->
error!(query, expr, "field `#{field}` does not exist in subquery")
end
end
end
defp type!(kind, lookup, query, expr, schema, field) when is_atom(schema) do
if type = schema.__schema__(lookup, field) do
type
else
error! query, expr, "field `#{field}` in `#{kind}` does not exist in schema #{inspect schema}"
end
end
defp normalize_param(_kind, {:out, {:array, type}}, _value) do
{:ok, type}
end
defp normalize_param(_kind, {:out, :any}, _value) do
{:ok, :any}
end
defp normalize_param(kind, {:out, other}, value) do
{:error, "value `#{inspect value}` in `#{kind}` expected to be part of an array " <>
"but matched type is #{inspect other}"}
end
defp normalize_param(_kind, type, _value) do
{:ok, type}
end
defp cast_param(kind, type, v) do
case Ecto.Type.cast(type, v) do
{:ok, v} ->
{:ok, v}
:error ->
{:error, "value `#{inspect v}` in `#{kind}` cannot be cast to type #{inspect type}"}
end
end
defp dump_param(adapter, type, v) do
case Ecto.Type.adapter_dump(adapter, type, v) do
{:ok, v} ->
{:ok, v}
:error when type == :any ->
{:error, "value `#{inspect v}` cannot be dumped with Ecto.DataType"}
:error ->
{:error, "value `#{inspect v}` cannot be dumped to type #{inspect type}"}
end
end
defp field_source({_, schema}, field) when schema != nil do
# If the field is not found we return the field itself
# which will be checked and raise later.
schema.__schema__(:field_source, field) || field
end
defp field_source(_, field) do
field
end
defp assert_update!(%Ecto.Query{updates: updates} = query, operation) do
changes =
Enum.reduce(updates, %{}, fn update, acc ->
Enum.reduce(update.expr, acc, fn {_op, kw}, acc ->
Enum.reduce(kw, acc, fn {k, v}, acc ->
Map.update(acc, k, v, fn _ ->
error! query, "duplicate field `#{k}` for `#{operation}`"
end)
end)
end)
end)
if changes == %{} do
error! query, "`#{operation}` requires at least one field to be updated"
end
end
defp assert_no_update!(query, operation) do
case query do
%Ecto.Query{updates: []} -> query
_ ->
error! query, "`#{operation}` does not allow `update` expressions"
end
end
defp assert_only_filter_expressions!(query, operation) do
case query do
%Ecto.Query{order_bys: [], limit: nil, offset: nil, group_bys: [],
havings: [], preloads: [], assocs: [], distinct: nil, lock: nil} ->
query
_ ->
error! query, "`#{operation}` allows only `where` and `join` expressions. " <>
"You can exclude unwanted expressions from a query by using " <>
"Ecto.Query.exclude/2. Error found"
end
end
defp reraise(exception) do
reraise exception, Enum.reject(System.stacktrace, &match?({__MODULE__, _, _, _}, &1))
end
defp error!(query, message) do
raise Ecto.QueryError, message: message, query: query
end
defp error!(query, expr, message) do
raise Ecto.QueryError, message: message, query: query, file: expr.file, line: expr.line
end
end
| 36.047918 | 133 | 0.617926 |
73933c7d24833026458910ef196610d81437b5ad | 889 | ex | Elixir | lib/chat_api/slack_authorizations/settings.ex | ZmagoD/papercups | dff9a5822b809edc4fd8ecf198566f9b14ab613f | [
"MIT"
] | 4,942 | 2020-07-20T22:35:28.000Z | 2022-03-31T15:38:51.000Z | lib/chat_api/slack_authorizations/settings.ex | ZmagoD/papercups | dff9a5822b809edc4fd8ecf198566f9b14ab613f | [
"MIT"
] | 552 | 2020-07-22T01:39:04.000Z | 2022-02-01T00:26:35.000Z | lib/chat_api/slack_authorizations/settings.ex | ZmagoD/papercups | dff9a5822b809edc4fd8ecf198566f9b14ab613f | [
"MIT"
] | 396 | 2020-07-22T19:27:48.000Z | 2022-03-31T05:25:24.000Z | defmodule ChatApi.SlackAuthorizations.Settings do
use Ecto.Schema
import Ecto.Changeset
@type t :: %__MODULE__{
sync_all_incoming_threads: boolean(),
sync_by_emoji_tagging: boolean(),
sync_trigger_emoji: String.t(),
forward_synced_messages_to_reply_channel: boolean()
}
embedded_schema do
field(:sync_all_incoming_threads, :boolean, default: true)
field(:sync_by_emoji_tagging, :boolean, default: true)
field(:sync_trigger_emoji, :string, default: "eyes")
field(:forward_synced_messages_to_reply_channel, :boolean, default: true)
end
@spec changeset(any(), map()) :: Ecto.Changeset.t()
def changeset(schema, params) do
schema
|> cast(params, [
:sync_all_incoming_threads,
:sync_by_emoji_tagging,
:sync_trigger_emoji,
:forward_synced_messages_to_reply_channel
])
end
end
| 29.633333 | 77 | 0.701912 |
739350710aa2ab277f5bb470214a9ed72d516a25 | 1,117 | exs | Elixir | tooling/pbt/config/config.exs | sfat/programming-elixir-exercises | 19e62e3f3344ec044e1eb1b39b195f4dad3dff1c | [
"Apache-2.0"
] | 1 | 2019-02-17T11:54:17.000Z | 2019-02-17T11:54:17.000Z | tooling/pbt/config/config.exs | sfat/programming-elixir-exercises | 19e62e3f3344ec044e1eb1b39b195f4dad3dff1c | [
"Apache-2.0"
] | null | null | null | tooling/pbt/config/config.exs | sfat/programming-elixir-exercises | 19e62e3f3344ec044e1eb1b39b195f4dad3dff1c | [
"Apache-2.0"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :pbt, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:pbt, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env()}.exs"
| 36.032258 | 73 | 0.748433 |
73935670beb910ca23c28ab35375fe214e89e1b5 | 5,639 | exs | Elixir | apps/raptor/test/unit/raptor_web/authorize_controller_test.exs | Datastillery/smartcitiesdata | f4ab7e6877cfd68cd4bdac15cc879d930d11f270 | [
"Apache-2.0"
] | 18 | 2020-11-13T15:38:24.000Z | 2021-05-26T00:40:08.000Z | apps/raptor/test/unit/raptor_web/authorize_controller_test.exs | Datastillery/smartcitiesdata | f4ab7e6877cfd68cd4bdac15cc879d930d11f270 | [
"Apache-2.0"
] | 365 | 2020-09-21T12:31:40.000Z | 2021-09-25T14:54:21.000Z | apps/raptor/test/unit/raptor_web/authorize_controller_test.exs | Datastillery/smartcitiesdata | f4ab7e6877cfd68cd4bdac15cc879d930d11f270 | [
"Apache-2.0"
] | 3 | 2020-10-06T16:17:49.000Z | 2021-09-03T17:11:41.000Z | defmodule RaptorWeb.AuthorizeControllerTest do
use RaptorWeb.ConnCase
use Placebo
alias Raptor.Services.Auth0Management
alias Raptor.Services.DatasetStore
alias Raptor.Services.UserOrgAssocStore
@authorized_call [
%{
"email_verified" => true,
"user_id" => "penny"
}
]
@multiple_users_call [
%{
"email_verified" => true
},
%{
"email_verified" => true
}
]
@unverified_email_call [
%{
"email_verified" => false
}
]
@unauthorized_call []
describe "authorization checks" do
test "returns true when there is one valid user that has the given api key", %{conn: conn} do
api_key = "enterprise"
system_name = "system__name"
org_id = "dog_stats"
user = @authorized_call |> List.first()
user_id = user["user_id"]
expected = %{"is_authorized" => true}
expect(Auth0Management.get_users_by_api_key(api_key), return: {:ok, @authorized_call})
expect(DatasetStore.get(system_name),
return: %{dataset_id: "wags", system_name: system_name, org_id: org_id}
)
expect(UserOrgAssocStore.get(user_id, org_id),
return: %{user_id: user_id, org_id: org_id, email: "[email protected]"}
)
actual =
conn
|> get("/api/authorize?apiKey=#{api_key}&systemName=#{system_name}")
|> json_response(200)
assert actual == expected
end
test "returns false when the dataset org does not match the user org", %{conn: conn} do
api_key = "enterprise"
system_name = "system__name"
dataset_org_id = "dataset_org"
user = @authorized_call |> List.first()
user_id = user["user_id"]
expected = %{"is_authorized" => false}
expect(Auth0Management.get_users_by_api_key(api_key), return: {:ok, @authorized_call})
expect(DatasetStore.get(system_name),
return: %{dataset_id: "wags", system_name: system_name, org_id: dataset_org_id}
)
expect(UserOrgAssocStore.get(user_id, dataset_org_id),
return: %{}
)
actual =
conn
|> get("/api/authorize?apiKey=#{api_key}&systemName=#{system_name}")
|> json_response(200)
assert actual == expected
end
test "returns false when the system name does not match an existing dataset", %{conn: conn} do
api_key = "enterprise"
system_name = "invalid_system__name"
expected = %{"is_authorized" => false}
expect(Auth0Management.get_users_by_api_key(api_key), return: {:ok, @authorized_call})
expect(DatasetStore.get(system_name),
return: %{}
)
actual =
conn
|> get("/api/authorize?apiKey=#{api_key}&systemName=#{system_name}")
|> json_response(200)
assert actual == expected
end
test "returns an error when the apiKey is not passed", %{conn: conn} do
expected = %{"message" => "apiKey is a required parameter."}
actual =
conn
|> get("/api/authorize?systemName=systemName")
|> json_response(400)
assert actual == expected
end
end
describe "invalid input checks" do
test "returns an error when the systemName is not passed", %{conn: conn} do
expected = %{"message" => "systemName is a required parameter."}
actual =
conn
|> get("/api/authorize?apiKey=apiKey")
|> json_response(400)
assert actual == expected
end
test "returns an error when the apiKey and the systemName are not passed", %{conn: conn} do
expected = %{"message" => "apiKey and systemName are required parameters."}
actual =
conn
|> get("/api/authorize")
|> json_response(400)
assert actual == expected
end
end
describe "authentication checks" do
test "returns false when there is one valid user that has the given api key but their email is not validated",
%{conn: conn} do
api_key = "enterprise"
expected = %{"is_authorized" => false}
expect(Auth0Management.get_users_by_api_key(api_key),
return: {:ok, @unverified_email_call}
)
actual =
conn
|> get("/api/authorize?apiKey=#{api_key}&systemName=system__name")
|> json_response(200)
assert actual == expected
end
test "returns false when there is no valid user with the given api key", %{conn: conn} do
api_key = "intrepid"
expected = %{"is_authorized" => false}
expect(Auth0Management.get_users_by_api_key(api_key),
return: {:ok, @unauthorized_call}
)
actual =
conn
|> get("/api/authorize?apiKey=#{api_key}&systemName=system__name")
|> json_response(200)
assert actual == expected
end
test "returns false when there are multiple users with the given api key", %{conn: conn} do
api_key = "intrepid"
expected = %{"is_authorized" => false}
expect(Auth0Management.get_users_by_api_key(api_key),
return: {:ok, @multiple_users_call}
)
actual =
conn
|> get("/api/authorize?apiKey=#{api_key}&systemName=system__name")
|> json_response(200)
assert actual == expected
end
test "returns false if the auth0 management api returns an error", %{conn: conn} do
api_key = "intrepid"
expected = %{"is_authorized" => false}
expect(Auth0Management.get_users_by_api_key(api_key), return: {:error, []})
actual =
conn
|> get("/api/authorize?apiKey=#{api_key}&systemName=system__name")
|> json_response(200)
assert actual == expected
end
end
end
| 28.054726 | 114 | 0.625466 |
73937a9e1f278c592c64108ec9f38b5b9b0d94aa | 4,008 | ex | Elixir | apps/omg_utils/lib/omg_utils/http_rpc/response.ex | karmonezz/elixir-omg | 3b26fc072fa553992277e1b9c4bad37b3d61ec6a | [
"Apache-2.0"
] | 1 | 2020-05-01T12:30:09.000Z | 2020-05-01T12:30:09.000Z | apps/omg_utils/lib/omg_utils/http_rpc/response.ex | karmonezz/elixir-omg | 3b26fc072fa553992277e1b9c4bad37b3d61ec6a | [
"Apache-2.0"
] | null | null | null | apps/omg_utils/lib/omg_utils/http_rpc/response.ex | karmonezz/elixir-omg | 3b26fc072fa553992277e1b9c4bad37b3d61ec6a | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.Utils.HttpRPC.Response do
@moduledoc """
Serializes the response into expected result/data format.
"""
alias OMG.Utils.HttpRPC.Encoding
@sha String.replace(elem(System.cmd("git", ["rev-parse", "--short=7", "HEAD"]), 0), "\n", "")
@type response_t :: %{version: binary(), success: boolean(), data: map()}
def serialize_page(data, data_paging) do
data
|> serialize()
|> Map.put(:data_paging, data_paging)
end
@doc """
Append result of operation to the response data forming standard api response structure
"""
@spec serialize(any()) :: response_t()
def serialize(%{object: :error} = error) do
to_response(error, :error)
|> add_version()
|> add_service_name()
end
def serialize(data) do
data
|> sanitize()
|> to_response(:success)
|> add_version()
|> add_service_name()
end
@doc """
Removes or encodes fields in response that cannot be serialized to api response.
By default, it:
* encodes to hex all binary values
* removes metadata fields
Provides standard data structure for API response
"""
@spec sanitize(any()) :: any()
def sanitize(response)
def sanitize(list) when is_list(list) do
list |> Enum.map(&sanitize/1)
end
def sanitize(map_or_struct) when is_map(map_or_struct) do
map_or_struct
|> to_map()
|> do_filter()
|> sanitize_map()
end
def sanitize(bin) when is_binary(bin), do: Encoding.to_hex(bin)
def sanitize({:skip_hex_encode, bin}), do: bin
def sanitize({{key, value}, _}), do: Map.put_new(%{}, key, value)
def sanitize({key, value}), do: Map.put_new(%{}, key, value)
def sanitize(value), do: value
defp do_filter(map_or_struct) do
if :code.is_loaded(Ecto) do
Enum.filter(map_or_struct, fn
{_, %{__struct__: Ecto.Association.NotLoaded}} -> false
_ -> true
end)
|> Map.new()
else
map_or_struct
end
end
# Allows to skip sanitize on specifies keys provided in list in key :skip_hex_encode
defp sanitize_map(map) do
{skip_keys, map} = Map.pop(map, :skip_hex_encode, [])
skip_keys = MapSet.new(skip_keys)
map
|> Enum.map(fn {k, v} ->
case MapSet.member?(skip_keys, k) do
true -> {k, v}
false -> {k, sanitize(v)}
end
end)
|> Map.new()
end
defp to_map(struct), do: Map.drop(struct, [:__struct__, :__meta__])
defp to_response(data, result),
do: %{
success: result == :success,
data: data
}
# not the most beatuful way of doing this but
# because our "response serializer" is in utils there's no other way
defp add_version(response) do
vsn =
case :code.is_loaded(OMG.ChildChainRPC) do
{:file, _} ->
{:ok, vsn} = :application.get_key(:omg_child_chain_rpc, :vsn)
vsn
_ ->
{:ok, vsn} = :application.get_key(:omg_watcher_rpc, :vsn)
vsn
end
Map.merge(response, %{version: List.to_string(vsn) <> "+" <> @sha})
end
# Not the most "beautiful way", but I'm just referencing
# how they're injecting the version
defp add_service_name(response) do
service_name = service_name()
# Inject it into the response code
Map.merge(response, %{service_name: service_name})
end
defp service_name do
case :code.is_loaded(OMG.ChildChainRPC) do
{:file, _} ->
"child_chain"
_ ->
"watcher"
end
end
end
| 27.452055 | 95 | 0.650449 |
73937c1cbe39e4e0e486880361bbfa5fc9df082c | 3,637 | exs | Elixir | test/entice/coordination_test.exs | hoodaly/entity | e325231c7c15375e45889e8bd0b71d48c60974ca | [
"WTFPL"
] | null | null | null | test/entice/coordination_test.exs | hoodaly/entity | e325231c7c15375e45889e8bd0b71d48c60974ca | [
"WTFPL"
] | null | null | null | test/entice/coordination_test.exs | hoodaly/entity | e325231c7c15375e45889e8bd0b71d48c60974ca | [
"WTFPL"
] | null | null | null | defmodule Entice.Logic.CoordinationTest do
use ExUnit.Case, async: true
alias Entice.Entity
alias Entice.Entity.{Coordination, Test.Spy}
defmodule TestAttr1, do: defstruct foo: 1337, bar: "lol"
defmodule TestAttr2, do: defstruct baz: false
defmodule TestAttr3, do: defstruct crux: "hello"
setup do
{:ok, eid, _pid} = Entity.start
eid |> Coordination.register(__MODULE__)
Entity.put_attribute(eid, %TestAttr1{})
Entity.put_attribute(eid, %TestAttr2{})
Coordination.register_observer(self, __MODULE__)
{:ok, [entity_id: eid]}
end
test "entity notification", %{entity_id: eid} do
Spy.register(eid, self())
assert :ok = Coordination.notify(eid, :something)
assert_receive %{sender: ^eid, event: :something}
end
test "notification of all entities" do
{:ok, id1, e1} = Entity.start
{:ok, id2, e2} = Entity.start
{:ok, id3, e3} = Entity.start
Coordination.register(e1, __MODULE__)
Coordination.register(e2, __MODULE__)
Coordination.register(e3, __MODULE__)
Spy.register(e1, self())
Spy.register(e2, self())
Spy.register(e3, self())
Coordination.notify_all(__MODULE__, :test_message)
assert_receive %{sender: ^id1, event: :test_message}
assert_receive %{sender: ^id2, event: :test_message}
assert_receive %{sender: ^id3, event: :test_message}
end
test "notification of all entities local to an entity", %{entity_id: eid} do
{:ok, id1, e1} = Entity.start
{:ok, id2, e2} = Entity.start
{:ok, id3, e3} = Entity.start
Coordination.register(e1, __MODULE__)
Coordination.register(e2, __MODULE__)
Coordination.register(e3, __MODULE__)
Spy.register(e1, self())
Spy.register(e2, self())
Spy.register(e3, self())
Coordination.notify_locally(eid, :test_message)
assert_receive %{sender: ^id1, event: :test_message}
assert_receive %{sender: ^id2, event: :test_message}
assert_receive %{sender: ^id3, event: :test_message}
end
test "observer registry", %{entity_id: eid} do
assert_receive {:entity_join, %{
entity_id: ^eid,
attributes: %{
TestAttr1 => %TestAttr1{},
TestAttr2 => %TestAttr2{}}}}
end
test "add attributes", %{entity_id: eid} do
Entity.put_attribute(eid, %TestAttr3{})
assert_receive {:entity_change, %{
entity_id: ^eid,
added: %{TestAttr3 => %TestAttr3{}},
changed: %{},
removed: %{}}}
end
test "change attributes", %{entity_id: eid} do
Entity.put_attribute(eid, %TestAttr1{foo: 42})
assert_receive {:entity_change, %{
entity_id: ^eid,
added: %{},
changed: %{TestAttr1 => %TestAttr1{foo: 42}},
removed: %{}}}
end
test "delete attributes", %{entity_id: eid} do
Entity.remove_attribute(eid, TestAttr1)
assert_receive {:entity_change, %{
entity_id: ^eid,
added: %{},
changed: %{},
removed: %{TestAttr1 => %TestAttr1{}}}}
end
test "entity join" do
{:ok, eid2, _pid} = Entity.start_plain()
Coordination.register(eid2, __MODULE__)
assert_receive {:entity_join, %{
entity_id: ^eid2,
attributes: %{}}}
end
test "entity leave", %{entity_id: eid} do
Entity.stop(eid)
assert_receive {:entity_leave, %{
entity_id: ^eid,
attributes: %{
TestAttr1 => %TestAttr1{},
TestAttr2 => %TestAttr2{}}}}
end
test "gracefully stopping of channels" do
assert :ok = Coordination.stop_channel(__MODULE__)
assert :ok = Coordination.stop_channel(:non_existing_channel)
assert :error = Coordination.notify_all(:non_existing_channel, :blubb)
end
end
| 27.345865 | 78 | 0.653561 |
739395d7082fb154b95aa9ff77d91e422e634862 | 479 | exs | Elixir | test/10_parse/30_example_functions/params_test.exs | marick/ecto_test_dsl | 6d460af093367098b7c78db709753deb45904d77 | [
"Unlicense"
] | 4 | 2021-02-09T17:26:34.000Z | 2021-08-08T01:42:52.000Z | test/10_parse/30_example_functions/params_test.exs | marick/transformer_test_support | 6d460af093367098b7c78db709753deb45904d77 | [
"Unlicense"
] | null | null | null | test/10_parse/30_example_functions/params_test.exs | marick/transformer_test_support | 6d460af093367098b7c78db709753deb45904d77 | [
"Unlicense"
] | null | null | null | defmodule Parse.ExampleFunctions.ParamsTest do
use EctoTestDSL.Case
use T.Drink.AndParse
use T.Parse.Exports
describe "creation" do
test "without eens" do
assert params(id: 5, age: 3) == {:params, Pnode.Params.parse(%{id: 5, age: 3})}
end
test "with eens" do
input = [id: 5, other_id: id_of(:other)]
expected = %{id: 5, other_id: id_of(:other)}
assert params(input) == {:params, Pnode.Params.parse(expected)}
end
end
end
| 25.210526 | 85 | 0.634656 |
7393ad5a786f26b84c245993a1bb28869ef1e6a7 | 413 | ex | Elixir | test/support/factory.ex | jesseshieh/chess | 80e6854d5f0a05420e5eea6deee9e41f03445b5a | [
"MIT"
] | null | null | null | test/support/factory.ex | jesseshieh/chess | 80e6854d5f0a05420e5eea6deee9e41f03445b5a | [
"MIT"
] | null | null | null | test/support/factory.ex | jesseshieh/chess | 80e6854d5f0a05420e5eea6deee9e41f03445b5a | [
"MIT"
] | null | null | null | defmodule Chess.Factory do
alias Chess.Auth.User
alias Chess.Store.Game
alias Chess.Repo
def create_user(username \\ "zelda", password \\ "password") do
User.changeset(
%User{},
%{username: username, password: password}
)
|> Repo.insert!
end
def create_game_for(user) do
Game.create_changeset(
%Game{},
%{user_id: user.id}
)
|> Repo.insert!
end
end
| 18.772727 | 65 | 0.624697 |
7393bd6a2bad61f4210f5604fb88dc34f561ffc2 | 1,125 | exs | Elixir | config/config.exs | kor-and-m/exproto | 64fcacd898b7a7947ec224e799caf9feacf78928 | [
"MIT"
] | 4 | 2018-03-19T13:34:35.000Z | 2018-07-11T19:20:34.000Z | config/config.exs | kor-and-m/exproto | 64fcacd898b7a7947ec224e799caf9feacf78928 | [
"MIT"
] | 1 | 2021-06-28T12:46:27.000Z | 2021-06-28T12:46:27.000Z | config/config.exs | k10pr0f/exproto | 3ae86c1308527a723465a14236849c7ab9ba949c | [
"MIT"
] | 1 | 2019-05-27T11:46:01.000Z | 2019-05-27T11:46:01.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :protobuf, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:protobuf, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.290323 | 73 | 0.752 |
7393c8372a177ea2f30dcaf95e0373f2016ee843 | 11,137 | exs | Elixir | test/ex_force/oauth_test.exs | gabrielpra1/ex_force | dcfad966649769b7fc581e9be770e895f5bcd12f | [
"MIT"
] | null | null | null | test/ex_force/oauth_test.exs | gabrielpra1/ex_force | dcfad966649769b7fc581e9be770e895f5bcd12f | [
"MIT"
] | null | null | null | test/ex_force/oauth_test.exs | gabrielpra1/ex_force | dcfad966649769b7fc581e9be770e895f5bcd12f | [
"MIT"
] | 2 | 2020-10-15T23:11:22.000Z | 2020-10-16T14:33:26.000Z | defmodule ExForce.OAuthTest do
use ExUnit.Case, async: true
doctest(ExForce.OAuth)
alias ExForce.{OAuth, OAuthResponse}
alias Plug.Conn
@unreachable_url "http://257.0.0.0:0"
setup do
with bypass <- Bypass.open(),
client <- OAuth.build_client(bypass_url(bypass)) do
{:ok, bypass: bypass, client: client}
end
end
def bypass_url(bypass), do: "http://127.0.0.1:#{bypass.port}"
defp assert_form_body(conn, expected) do
["application/x-www-form-urlencoded" <> _] = Conn.get_req_header(conn, "content-type")
{:ok, raw, conn} = Conn.read_body(conn)
assert URI.decode_query(raw) == expected
conn
end
defp to_issued_at(string) do
{:ok, issued_at, 0} = DateTime.from_iso8601(string)
issued_at
end
test "authorize_url/2 returns URL for response_type=code" do
assert OAuth.authorize_url(
"https://login.salesforce.com",
response_type: :code,
client_id: "client_id_foo",
redirect_uri: "http://127.0.0.1:8080/callback",
scope: "api refresh_token"
) ==
"https://login.salesforce.com/services/oauth2/authorize?response_type=code&client_id=client_id_foo&redirect_uri=http%3A%2F%2F127.0.0.1%3A8080%2Fcallback&scope=api+refresh_token"
end
test "authorize_url/2 returns URL for response_type=token" do
assert OAuth.authorize_url(
"https://login.salesforce.com",
response_type: :token,
client_id: "client_id_foo",
redirect_uri: "http://127.0.0.1:8080/callback",
scope: "api refresh_token"
) ==
"https://login.salesforce.com/services/oauth2/authorize?response_type=token&client_id=client_id_foo&redirect_uri=http%3A%2F%2F127.0.0.1%3A8080%2Fcallback&scope=api+refresh_token"
end
test "get_token/2 - authorization_code - success", %{bypass: bypass, client: client} do
Bypass.expect_once(bypass, "POST", "/services/oauth2/token", fn conn ->
conn
|> assert_form_body(%{
"grant_type" => "authorization_code",
"client_id" => "client_id_foo",
"client_secret" => "client_secret_bar",
"code" => "code_foo",
"redirect_uri" => "http://127.0.0.1:8080/callback"
})
|> Conn.put_resp_content_type("application/json")
|> Conn.resp(200, """
{
"access_token": "access_token_foo",
"refresh_token": "refresh_token_foo",
"signature": "RNy9G2E/bedQgdKoiqPGFgeIaxH0NR774kf1fwJvo8Y=",
"scope": "refresh_token api",
"instance_url": "https://example.com",
"id": "https://example.com/id/fakeid",
"token_type": "Bearer",
"issued_at": "1505149885697"
}
""")
end)
assert OAuth.get_token(
client,
grant_type: :authorization_code,
client_id: "client_id_foo",
client_secret: "client_secret_bar",
code: "code_foo",
redirect_uri: "http://127.0.0.1:8080/callback"
) ==
{:ok,
%OAuthResponse{
access_token: "access_token_foo",
refresh_token: "refresh_token_foo",
signature: "RNy9G2E/bedQgdKoiqPGFgeIaxH0NR774kf1fwJvo8Y=",
scope: "refresh_token api",
instance_url: "https://example.com",
id: "https://example.com/id/fakeid",
token_type: "Bearer",
issued_at: to_issued_at("2017-09-11T17:11:25.697Z")
}}
end
test "get_token/2 - authorization_code - invalid_grant", %{bypass: bypass, client: client} do
Bypass.expect_once(bypass, "POST", "/services/oauth2/token", fn conn ->
conn
|> Conn.put_resp_content_type("application/json")
|> Conn.resp(200, """
{
"access_token": "access_token_foo",
"refresh_token": "refresh_token_foo",
"signature": "badsignature",
"scope": "refresh_token api",
"instance_url": "https://example.com",
"id": "https://example.com/id/fakeid",
"token_type": "Bearer",
"issued_at": "1505149885697"
}
""")
end)
assert OAuth.get_token(
client,
grant_type: :authorization_code,
client_id: "client_id_foo",
client_secret: "client_secret_bar",
code: "code_foo",
redirect_uri: "http://127.0.0.1:8080/callback"
) == {:error, :invalid_signature}
end
test "get_token/2 - authorization_code - expired", %{bypass: bypass, client: client} do
Bypass.expect_once(bypass, "POST", "/services/oauth2/token", fn conn ->
conn
|> Conn.put_resp_content_type("application/json")
|> Conn.resp(400, """
{
"error": "invalid_grant",
"error_description": "expired authorization code"
}
""")
end)
assert OAuth.get_token(
client,
grant_type: :authorization_code,
client_id: "client_id_foo",
client_secret: "client_secret_bar",
code: "code_foo",
redirect_uri: "http://127.0.0.1:8080/callback"
) ==
{:error,
%{
"error" => "invalid_grant",
"error_description" => "expired authorization code"
}}
end
test "get_token/2 - refresh_token - success", %{bypass: bypass, client: client} do
Bypass.expect_once(bypass, "POST", "/services/oauth2/token", fn conn ->
conn
|> assert_form_body(%{
"grant_type" => "refresh_token",
"client_id" => "client_id_foo",
"client_secret" => "client_secret_bar",
"refresh_token" => "refresh_token_foo"
})
|> Conn.put_resp_content_type("application/json")
|> Conn.resp(200, """
{
"access_token": "access_token_foo",
"signature": "RNy9G2E/bedQgdKoiqPGFgeIaxH0NR774kf1fwJvo8Y=",
"scope": "refresh_token api",
"instance_url": "https://example.com",
"id": "https://example.com/id/fakeid",
"token_type": "Bearer",
"issued_at": "1505149885697"
}
""")
end)
assert OAuth.get_token(
client,
grant_type: :refresh_token,
client_id: "client_id_foo",
client_secret: "client_secret_bar",
refresh_token: "refresh_token_foo"
) ==
{:ok,
%OAuthResponse{
access_token: "access_token_foo",
signature: "RNy9G2E/bedQgdKoiqPGFgeIaxH0NR774kf1fwJvo8Y=",
scope: "refresh_token api",
instance_url: "https://example.com",
id: "https://example.com/id/fakeid",
token_type: "Bearer",
issued_at: to_issued_at("2017-09-11T17:11:25.697Z")
}}
end
test "get_token/2 - refresh_token - expired", %{bypass: bypass, client: client} do
Bypass.expect_once(bypass, "POST", "/services/oauth2/token", fn conn ->
conn
|> Conn.put_resp_content_type("application/json")
|> Conn.resp(400, """
{
"error": "invalid_grant",
"error_description": "expired access/refresh token"
}
""")
end)
assert OAuth.get_token(
client,
grant_type: :refresh_token,
client_id: "client_id_foo",
client_secret: "client_secret_bar",
refresh_token: "refresh_token_foo"
) ==
{:error,
%{
"error" => "invalid_grant",
"error_description" => "expired access/refresh token"
}}
end
test "get_token/2 - password - success", %{bypass: bypass, client: client} do
Bypass.expect_once(bypass, "POST", "/services/oauth2/token", fn conn ->
conn
|> assert_form_body(%{
"grant_type" => "password",
"client_id" => "client_id_foo",
"client_secret" => "client_secret_bar",
"username" => "[email protected]",
"password" => "a0!#$%-_=+<>"
})
|> Conn.put_resp_content_type("application/json")
|> Conn.resp(200, """
{
"access_token": "access_token_foo",
"instance_url": "https://example.com",
"id": "https://example.com/id/fakeid",
"token_type": "Bearer",
"issued_at": "1505149885697",
"signature": "RNy9G2E/bedQgdKoiqPGFgeIaxH0NR774kf1fwJvo8Y="
}
""")
end)
assert OAuth.get_token(
client,
grant_type: :password,
client_id: "client_id_foo",
client_secret: "client_secret_bar",
username: "[email protected]",
password: "a0!#$%-_=+<>"
) ==
{:ok,
%OAuthResponse{
access_token: "access_token_foo",
instance_url: "https://example.com",
id: "https://example.com/id/fakeid",
token_type: "Bearer",
issued_at: to_issued_at("2017-09-11T17:11:25.697Z"),
signature: "RNy9G2E/bedQgdKoiqPGFgeIaxH0NR774kf1fwJvo8Y="
}}
end
test "get_token/2 - password - failure", %{bypass: bypass, client: client} do
Bypass.expect_once(bypass, "POST", "/services/oauth2/token", fn conn ->
conn
|> Conn.put_resp_content_type("application/json")
|> Conn.resp(400, """
{
"error": "invalid_grant",
"error_description": "authentication failure"
}
""")
end)
assert OAuth.get_token(
client,
grant_type: :password,
client_id: "client_id_foo",
client_secret: "client_secret_bar",
username: "[email protected]",
password: "a0!#$%-_=+<>"
) ==
{:error,
%{
"error" => "invalid_grant",
"error_description" => "authentication failure"
}}
end
test "get_token/2 with url", %{bypass: bypass} do
Bypass.expect_once(bypass, "POST", "/services/oauth2/token", fn conn ->
conn
|> Conn.put_resp_content_type("application/json")
|> Conn.resp(400, """
{
"error": "invalid_grant",
"error_description": "authentication failure"
}
""")
end)
assert OAuth.get_token(
bypass_url(bypass),
grant_type: :password,
client_id: "client_id_foo",
client_secret: "client_secret_bar",
username: "[email protected]",
password: "a0!#$%-_=+<>"
) ==
{:error,
%{
"error" => "invalid_grant",
"error_description" => "authentication failure"
}}
end
test "get_token/2 with bad url" do
assert OAuth.get_token(
@unreachable_url,
grant_type: :password,
client_id: "client_id_foo",
client_secret: "client_secret_bar",
username: "[email protected]",
password: "a0!#$%-_=+<>"
) == {:error, :econnrefused}
end
end
| 33.851064 | 191 | 0.556254 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.