hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
030f41e4041c701cc1b2efe7b26c22cc6a28a4e5 | 280 | ex | Elixir | lib/fragment/structured_text/span/label.ex | TheRealReal/prismic-elixir | 0850b922be6d2f2d541add8426fe27f8ce5f63eb | [
"MIT"
] | 10 | 2018-01-12T18:52:53.000Z | 2022-02-17T06:00:21.000Z | lib/fragment/structured_text/span/label.ex | TheRealReal/prismic-elixir | 0850b922be6d2f2d541add8426fe27f8ce5f63eb | [
"MIT"
] | 12 | 2018-02-14T23:08:08.000Z | 2021-11-22T15:55:41.000Z | lib/fragment/structured_text/span/label.ex | TheRealReal/prismic-elixir | 0850b922be6d2f2d541add8426fe27f8ce5f63eb | [
"MIT"
] | 8 | 2018-05-08T14:02:21.000Z | 2021-12-15T08:19:55.000Z | alias Prismic.Fragment.StructuredText.Span
defmodule Span.Label do
defstruct [:label, :start, :end]
@type t :: %__MODULE__{label: String.t(), start: Integer.t(), end: Integer.t()}
end
defimpl Span, for: Span.Label do
# TODO
def serialize(_, _link_resolver), do: ""
end
| 21.538462 | 81 | 0.696429 |
030f72faf4f7e7c982b560af2eaf5d78338f9a9d | 1,826 | ex | Elixir | test/support/model_case.ex | Symbolk/social_network | 11df1ba9bc19fd140b630ac2abbd4b13b42def92 | [
"MIT"
] | 17 | 2017-01-02T10:38:28.000Z | 2021-02-28T22:16:54.000Z | test/support/model_case.ex | Symbolk/social_network | 11df1ba9bc19fd140b630ac2abbd4b13b42def92 | [
"MIT"
] | null | null | null | test/support/model_case.ex | Symbolk/social_network | 11df1ba9bc19fd140b630ac2abbd4b13b42def92 | [
"MIT"
] | 2 | 2017-01-09T13:02:13.000Z | 2018-06-16T22:01:53.000Z | defmodule SocialNetwork.ModelCase do
@moduledoc """
This module defines the test case to be used by
model tests.
You may define functions here to be used as helpers in
your model tests. See `errors_on/2`'s definition as reference.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias SocialNetwork.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import SocialNetwork.ModelCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(SocialNetwork.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(SocialNetwork.Repo, {:shared, self()})
end
:ok
end
@doc """
Helper for returning list of errors in a struct when given certain data.
## Examples
Given a User schema that lists `:name` as a required field and validates
`:password` to be safe, it would return:
iex> errors_on(%User{}, %{password: "password"})
[password: "is unsafe", name: "is blank"]
You could then write your assertion like:
assert {:password, "is unsafe"} in errors_on(%User{}, %{password: "password"})
You can also create the changeset manually and retrieve the errors
field directly:
iex> changeset = User.changeset(%User{}, password: "password")
iex> {:password, "is unsafe"} in changeset.errors
true
"""
def errors_on(struct, data) do
struct.__struct__.changeset(struct, data)
|> Ecto.Changeset.traverse_errors(&SocialNetwork.ErrorHelpers.translate_error/1)
|> Enum.flat_map(fn {key, errors} -> for msg <- errors, do: {key, msg} end)
end
end
| 27.666667 | 84 | 0.691128 |
030f7ab0257c078778a4ca84c321b7a9e102186b | 354 | exs | Elixir | priv/repo/seeds.exs | holandes22/rotterdam | d8b56079638c15a8492c08a6859ed14413163e62 | [
"MIT"
] | null | null | null | priv/repo/seeds.exs | holandes22/rotterdam | d8b56079638c15a8492c08a6859ed14413163e62 | [
"MIT"
] | null | null | null | priv/repo/seeds.exs | holandes22/rotterdam | d8b56079638c15a8492c08a6859ed14413163e62 | [
"MIT"
] | null | null | null | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# Rotterdam.Repo.insert!(%Rotterdam.SomeModel{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 29.5 | 61 | 0.70904 |
030f8748d4119df25cbb2799a75ff921dd95650a | 1,470 | ex | Elixir | apps/blockchain/lib/mix/tasks/blockchain_tests.run.ex | wolflee/mana | db66dac85addfaad98d40da5bd4082b3a0198bb1 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 152 | 2018-10-27T04:52:03.000Z | 2022-03-26T10:34:00.000Z | apps/blockchain/lib/mix/tasks/blockchain_tests.run.ex | wolflee/mana | db66dac85addfaad98d40da5bd4082b3a0198bb1 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 270 | 2018-04-14T07:34:57.000Z | 2018-10-25T18:10:45.000Z | apps/blockchain/lib/mix/tasks/blockchain_tests.run.ex | wolflee/mana | db66dac85addfaad98d40da5bd4082b3a0198bb1 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 25 | 2018-10-27T12:15:13.000Z | 2022-01-25T20:31:14.000Z | defmodule Mix.Tasks.BlockchainTests.Run do
use Mix.Task
require Logger
alias EthCommonTest.BlockchainTestRunner
@shortdoc "Runs a single blockchain common test"
@moduledoc """
Runs a single blockchain common test.
## Example
From the blockchain app,
```
mix blockchain_tests.run "stSpecialTest/failed_tx" --fork "SpuriousDragon"
```
## Command line options
* `--fork`, `-f` - the name of the hardfork to run (optional)
"""
@preferred_cli_env :test
@switches [test: :string, fork: :string]
@aliases [hardfork: :fork]
def run(args) do
{opts, [test_name | _]} = OptionParser.parse!(args, switches: @switches, aliases: @aliases)
hardfork = Keyword.get(opts, :fork, :all)
test_name
|> find_full_name()
|> BlockchainTestRunner.run(hardfork)
|> Enum.map(&log_result/1)
end
defp log_result({:pass, {fork, name, _ex, _act}}) do
Mix.shell().info("[#{fork}] #{name} passed")
end
defp log_result({:fail, {fork, name, expected, actual}}) do
message = """
[#{fork}] #{name} failed:
Expected: #{Base.encode16(expected, case: :lower)},
Actual: #{Base.encode16(actual, case: :lower)}
"""
Mix.shell().error(message)
end
defp find_full_name(name) do
EthCommonTest.Helpers.ethereum_common_tests_path()
|> Path.join("/BlockchainTests/**/*.json")
|> Path.wildcard()
|> Enum.find(fn full_name ->
String.contains?(full_name, name)
end)
end
end
| 22.615385 | 95 | 0.64966 |
030f9a058475892e9c0d6631f3df4072227efdf0 | 1,425 | ex | Elixir | example_applications/web_app/lib/web_app_web/router.ex | andersonmcook/prom_ex | 4913b15be186db29ee9fe800bf6baf6807e1902d | [
"MIT"
] | null | null | null | example_applications/web_app/lib/web_app_web/router.ex | andersonmcook/prom_ex | 4913b15be186db29ee9fe800bf6baf6807e1902d | [
"MIT"
] | null | null | null | example_applications/web_app/lib/web_app_web/router.ex | andersonmcook/prom_ex | 4913b15be186db29ee9fe800bf6baf6807e1902d | [
"MIT"
] | null | null | null | defmodule WebAppWeb.Router do
use WebAppWeb, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_live_flash
plug :put_root_layout, {WebAppWeb.LayoutView, :root}
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", WebAppWeb do
pipe_through :browser
live "/", PageLive, :index
end
scope "/", WebAppWeb do
pipe_through :api
get "/users", UserController, :index
get "/users/:id", UserController, :show
post "/users/:id/action/level-up", UserController, :level_up
post "/users", UserController, :create
delete "/users/:id", UserController, :delete
end
# Other scopes may use custom stacks.
# scope "/api", WebAppWeb do
# pipe_through :api
# end
# Enables LiveDashboard only for development
#
# If you want to use the LiveDashboard in production, you should put
# it behind authentication and allow only admins to access it.
# If your application does not have an admins-only section yet,
# you can use Plug.BasicAuth to set up some basic authentication
# as long as you are also using SSL (which you should anyway).
if Mix.env() in [:dev, :test] do
import Phoenix.LiveDashboard.Router
scope "/" do
pipe_through :browser
live_dashboard "/dashboard", metrics: WebAppWeb.Telemetry
end
end
end
| 26.388889 | 70 | 0.691228 |
030fcdfba875e21a6708365f60afbdf11110fb74 | 1,057 | exs | Elixir | test/elixir/test/batch_save_test.exs | mtenrero/couchdb-vetcontrol | b7ede3ededdf0072c73f08d8f1217cb723b03f7a | [
"Apache-2.0"
] | 1 | 2017-07-05T18:50:12.000Z | 2017-07-05T18:50:12.000Z | test/elixir/test/batch_save_test.exs | mtenrero/couchdb-vetcontrol | b7ede3ededdf0072c73f08d8f1217cb723b03f7a | [
"Apache-2.0"
] | 1 | 2018-10-01T11:37:24.000Z | 2018-10-03T08:57:30.000Z | test/elixir/test/batch_save_test.exs | garrensmith/couchdb | 25838d078b1cf8ef5554f41c0b51d8628ca712ba | [
"Apache-2.0"
] | 1 | 2018-02-13T16:17:04.000Z | 2018-02-13T16:17:04.000Z | defmodule BatchSaveTest do
use CouchTestCase
@moduletag :batch_save
@moduledoc """
Test CouchDB batch save
This is a port of batch_save.js
"""
@doc_count 100
@tag :with_db
test "batch put", context do
path_fun = &"/#{&1}/#{&2}"
run(&Couch.put/2, path_fun, context[:db_name], @doc_count)
end
@tag :with_db
test "batch post", context do
path_fun = fn db_name, _ -> "/#{db_name}" end
run(&Couch.post/2, path_fun, context[:db_name], @doc_count)
end
@tag :with_db
test "batch put with identical doc ids", context do
path_fun = fn db_name, _ -> "/#{db_name}/foo" end
run(&Couch.put/2, path_fun, context[:db_name], 1)
end
defp run(req_fun, path_fun, db_name, expected_doc_count) do
for i <- 1..@doc_count do
opts = [body: %{a: i, b: i}, query: %{batch: "ok"}]
resp = req_fun.(path_fun.(db_name, i), opts)
assert resp.body["ok"] and resp.status_code == 202
end
retry_until(fn ->
Couch.get("/#{db_name}").body["doc_count"] == expected_doc_count
end)
end
end
| 24.581395 | 70 | 0.630085 |
0310ef1ed2d76584e1308088b491d770a4003a7b | 1,119 | exs | Elixir | config/config.exs | suddenrushofsushi/epoxy | 9d4f101b6221d10f3eb7cdf836d3f1b1fe8c5e02 | [
"Apache-2.0"
] | 1 | 2017-09-18T13:56:24.000Z | 2017-09-18T13:56:24.000Z | config/config.exs | suddenrushofsushi/epoxy | 9d4f101b6221d10f3eb7cdf836d3f1b1fe8c5e02 | [
"Apache-2.0"
] | null | null | null | config/config.exs | suddenrushofsushi/epoxy | 9d4f101b6221d10f3eb7cdf836d3f1b1fe8c5e02 | [
"Apache-2.0"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :epoxy, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:epoxy, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.096774 | 73 | 0.75067 |
0310f568350d448b604523a218173275b82460c7 | 6,235 | ex | Elixir | clients/service_user/lib/google_api/service_user/v1/model/metric_descriptor.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/service_user/lib/google_api/service_user/v1/model/metric_descriptor.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/service_user/lib/google_api/service_user/v1/model/metric_descriptor.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.ServiceUser.V1.Model.MetricDescriptor do
@moduledoc """
Defines a metric type and its schema. Once a metric descriptor is created, deleting or altering it stops data collection and makes the metric type's existing data unusable.
## Attributes
- description (String): A detailed description of the metric, which can be used in documentation. Defaults to: `null`.
- displayName (String): A concise name for the metric, which can be displayed in user interfaces. Use sentence case without an ending period, for example \"Request count\". Defaults to: `null`.
- labels (List[LabelDescriptor]): The set of labels that can be used to describe a specific instance of this metric type. For example, the `appengine.googleapis.com/http/server/response_latencies` metric type has a label for the HTTP response code, `response_code`, so you can look at latencies for successful responses or just for responses that failed. Defaults to: `null`.
- metricKind (String): Whether the metric records instantaneous values, changes to a value, etc. Some combinations of `metric_kind` and `value_type` might not be supported. Defaults to: `null`.
- Enum - one of [METRIC_KIND_UNSPECIFIED, GAUGE, DELTA, CUMULATIVE]
- name (String): The resource name of the metric descriptor. Depending on the implementation, the name typically includes: (1) the parent resource name that defines the scope of the metric type or of its data; and (2) the metric's URL-encoded type, which also appears in the `type` field of this descriptor. For example, following is the resource name of a custom metric within the GCP project `my-project-id`: \"projects/my-project-id/metricDescriptors/custom.googleapis.com%2Finvoice%2Fpaid%2Famount\" Defaults to: `null`.
- type (String): The metric type, including its DNS name prefix. The type is not URL-encoded. All user-defined custom metric types have the DNS name `custom.googleapis.com`. Metric types should use a natural hierarchical grouping. For example: \"custom.googleapis.com/invoice/paid/amount\" \"appengine.googleapis.com/http/server/response_latencies\" Defaults to: `null`.
- unit (String): The unit in which the metric value is reported. It is only applicable if the `value_type` is `INT64`, `DOUBLE`, or `DISTRIBUTION`. The supported units are a subset of [The Unified Code for Units of Measure](http://unitsofmeasure.org/ucum.html) standard: **Basic units (UNIT)** * `bit` bit * `By` byte * `s` second * `min` minute * `h` hour * `d` day **Prefixes (PREFIX)** * `k` kilo (10**3) * `M` mega (10**6) * `G` giga (10**9) * `T` tera (10**12) * `P` peta (10**15) * `E` exa (10**18) * `Z` zetta (10**21) * `Y` yotta (10**24) * `m` milli (10**-3) * `u` micro (10**-6) * `n` nano (10**-9) * `p` pico (10**-12) * `f` femto (10**-15) * `a` atto (10**-18) * `z` zepto (10**-21) * `y` yocto (10**-24) * `Ki` kibi (2**10) * `Mi` mebi (2**20) * `Gi` gibi (2**30) * `Ti` tebi (2**40) **Grammar** The grammar includes the dimensionless unit `1`, such as `1/s`. The grammar also includes these connectors: * `/` division (as an infix operator, e.g. `1/s`). * `.` multiplication (as an infix operator, e.g. `GBy.d`) The grammar for a unit is as follows: Expression = Component { \".\" Component } { \"/\" Component } ; Component = [ PREFIX ] UNIT [ Annotation ] | Annotation | \"1\" ; Annotation = \"{\" NAME \"}\" ; Notes: * `Annotation` is just a comment if it follows a `UNIT` and is equivalent to `1` if it is used alone. For examples, `{requests}/s == 1/s`, `By{transmitted}/s == By/s`. * `NAME` is a sequence of non-blank printable ASCII characters not containing '{' or '}'. Defaults to: `null`.
- valueType (String): Whether the measurement is an integer, a floating-point number, etc. Some combinations of `metric_kind` and `value_type` might not be supported. Defaults to: `null`.
- Enum - one of [VALUE_TYPE_UNSPECIFIED, BOOL, INT64, DOUBLE, STRING, DISTRIBUTION, MONEY]
"""
defstruct [
:"description",
:"displayName",
:"labels",
:"metricKind",
:"name",
:"type",
:"unit",
:"valueType"
]
end
defimpl Poison.Decoder, for: GoogleApi.ServiceUser.V1.Model.MetricDescriptor do
import GoogleApi.ServiceUser.V1.Deserializer
def decode(value, options) do
value
|> deserialize(:"labels", :list, GoogleApi.ServiceUser.V1.Model.LabelDescriptor, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceUser.V1.Model.MetricDescriptor do
def encode(value, options) do
GoogleApi.ServiceUser.V1.Deserializer.serialize_non_nil(value, options)
end
end
| 97.421875 | 2,267 | 0.678268 |
03113281a7755d29212166ca640dfc4b9b26d33c | 683 | exs | Elixir | config/test.exs | vinolivae/banking_api | ba91a3396776ea279e648f99120f415df43bd168 | [
"Apache-2.0"
] | 1 | 2021-06-10T03:29:16.000Z | 2021-06-10T03:29:16.000Z | config/test.exs | vinolivae/banking_api | ba91a3396776ea279e648f99120f415df43bd168 | [
"Apache-2.0"
] | null | null | null | config/test.exs | vinolivae/banking_api | ba91a3396776ea279e648f99120f415df43bd168 | [
"Apache-2.0"
] | null | null | null | use Mix.Config
# Configure your database
#
# The MIX_TEST_PARTITION environment variable can be used
# to provide built-in test partitioning in CI environment.
# Run `mix help test` for more information.
config :banking_api, BankingApi.Repo,
username: "postgres",
password: "postgres",
database: "banking_api_test#{System.get_env("MIX_TEST_PARTITION")}",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :banking_api_web, BankingApiWeb.Endpoint,
http: [port: 4002],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
| 29.695652 | 70 | 0.756955 |
03114eb80029b83c2b780b3ef4777f1e169045fe | 367 | exs | Elixir | .lab/config/config.exs | afaur/elixir-plug-api | 86001074f4cc0310c8f918a1dc7fa3083e7f4f53 | [
"Unlicense"
] | 1 | 2018-09-11T16:18:09.000Z | 2018-09-11T16:18:09.000Z | .lab/config/config.exs | afaur/elixir-plug-api | 86001074f4cc0310c8f918a1dc7fa3083e7f4f53 | [
"Unlicense"
] | null | null | null | .lab/config/config.exs | afaur/elixir-plug-api | 86001074f4cc0310c8f918a1dc7fa3083e7f4f53 | [
"Unlicense"
] | null | null | null | use Mix.Config
scheme = :http
port = 8085
if (scheme == :https) do
config :app, plug_options: [
password: "cowboy",
keyfile: Path.expand("../../conf/ssl/server.key", __DIR__),
certfile: Path.expand("../../conf/ssl/server.cer", __DIR__),
port: port,
]
else
config :app, plug_options: [ port: port ]
end
config :app, plug_scheme: scheme
| 20.388889 | 64 | 0.629428 |
031153d2659629102334a2757c99408938927d26 | 497 | ex | Elixir | lib/turtle_tube_web/views/error_view.ex | ConnorRigby/turtletube_web | 0e6618395918234584edbb74ccceb589fc79bc68 | [
"MIT"
] | null | null | null | lib/turtle_tube_web/views/error_view.ex | ConnorRigby/turtletube_web | 0e6618395918234584edbb74ccceb589fc79bc68 | [
"MIT"
] | null | null | null | lib/turtle_tube_web/views/error_view.ex | ConnorRigby/turtletube_web | 0e6618395918234584edbb74ccceb589fc79bc68 | [
"MIT"
] | null | null | null | defmodule TurtleTubeWeb.ErrorView do
use TurtleTubeWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.html", _assigns) do
# "Internal Server Error"
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.html" becomes
# "Not Found".
def template_not_found(template, _assigns) do
Phoenix.Controller.status_message_from_template(template)
end
end
| 29.235294 | 61 | 0.738431 |
03116de71206a34ab32423e1fe3f2154d45737c8 | 4,174 | ex | Elixir | lib/phoenix/live_dashboard/live/applications_live.ex | drozdzynski/phoenix_live_dashboard | cb8d3fe1a1733318597d591542b23d2218366f97 | [
"MIT"
] | 2 | 2020-05-07T23:49:26.000Z | 2020-05-10T13:56:03.000Z | lib/phoenix/live_dashboard/live/applications_live.ex | dkuku/phoenix_live_dashboard | fcdc98abf1b7c4718d080bfe4c891057a7878aa9 | [
"MIT"
] | null | null | null | lib/phoenix/live_dashboard/live/applications_live.ex | dkuku/phoenix_live_dashboard | fcdc98abf1b7c4718d080bfe4c891057a7878aa9 | [
"MIT"
] | null | null | null | defmodule Phoenix.LiveDashboard.ApplicationsLive do
use Phoenix.LiveDashboard.Web, :live_view
import Phoenix.LiveDashboard.TableHelpers
alias Phoenix.LiveDashboard.SystemInfo
@sort_by ~w(name state)
@sort_dir ~w(asc desc)
@temporary_assigns [applications: [], total: 0]
@impl true
def mount(%{"node" => _} = params, session, socket) do
{:ok, assign_mount(socket, :applications, params, session, true),
temporary_assigns: @temporary_assigns}
end
@impl true
def handle_params(params, _url, socket) do
{:noreply,
socket
|> assign_table_params(params, @sort_by, @sort_dir)
|> fetch_applications()}
end
defp fetch_applications(%{assigns: %{params: params, menu: menu}} = socket) do
%{search: search, sort_by: sort_by, sort_dir: sort_dir, limit: limit} = params
{applications, total} =
SystemInfo.fetch_applications(menu.node, search, sort_by, sort_dir, limit)
assign(socket, applications: applications, total: total)
end
@impl true
def render(assigns) do
~L"""
<div class="tabular-page">
<h5 class="card-title">Applications</h5>
<div class="tabular-search">
<form phx-change="search" phx-submit="search" class="form-inline">
<div class="form-row align-items-center">
<div class="col-auto">
<input type="search" name="search" class="form-control form-control-sm" value="<%= @params.search %>" placeholder="Search" phx-debounce="300">
</div>
</div>
</form>
</div>
<form phx-change="select_limit" class="form-inline">
<div class="form-row align-items-center">
<div class="col-auto">Showing at most</div>
<div class="col-auto">
<div class="input-group input-group-sm">
<select name="limit" class="custom-select" id="limit-select">
<%= options_for_select(limit_options(), @params.limit) %>
</select>
</div>
</div>
<div class="col-auto">
applications out of <%= @total %>
</div>
</div>
</form>
<div class="card tabular-card mb-4 mt-4">
<div class="card-body p-0">
<div class="dash-table-wrapper">
<table class="table table-hover mt-0 dash-table">
<thead>
<tr>
<th class="pl-4">
<%= sort_link(@socket, @live_action, @menu, @params, :name, "Name") %>
</th>
<th>Description</th>
<th>
<%= sort_link(@socket, @live_action, @menu, @params, :state, "State") %>
</th>
<th class="px-4">Version</th>
</tr>
</thead>
<tbody>
<%= for application <- @applications do %>
<tr class="<%= if application[:state] == :loaded, do: "text-muted" %>">
<td class="pl-4"><%= application[:name] %></td>
<td><%= application[:description] %></td>
<td><%= application[:state] %></td>
<td class="px-4"><%= application[:version] %></td>
</tr>
<% end %>
</tbody>
</table>
</div>
</div>
</div>
</div>
"""
end
@impl true
def handle_info({:node_redirect, node}, socket) do
{:noreply, push_redirect(socket, to: self_path(socket, node, socket.assigns.params))}
end
def handle_info(:refresh, socket) do
{:noreply, socket}
end
@impl true
def handle_event("search", %{"search" => search}, socket) do
%{menu: menu, params: params} = socket.assigns
{:noreply, push_patch(socket, to: self_path(socket, menu.node, %{params | search: search}))}
end
def handle_event("select_limit", %{"limit" => limit}, socket) do
%{menu: menu, params: params} = socket.assigns
{:noreply, push_patch(socket, to: self_path(socket, menu.node, %{params | limit: limit}))}
end
defp self_path(socket, node, params) do
live_dashboard_path(socket, :applications, node, params)
end
end
| 33.66129 | 156 | 0.556061 |
03117e44e127bf2cc59fff25bcda1e28d2e76281 | 312 | ex | Elixir | lib/post_register/web/controllers/logs_controller.ex | myobie/post_register | 924a832d7fa1693d655c34b0295affa8b3275f26 | [
"MIT"
] | null | null | null | lib/post_register/web/controllers/logs_controller.ex | myobie/post_register | 924a832d7fa1693d655c34b0295affa8b3275f26 | [
"MIT"
] | null | null | null | lib/post_register/web/controllers/logs_controller.ex | myobie/post_register | 924a832d7fa1693d655c34b0295affa8b3275f26 | [
"MIT"
] | null | null | null | defmodule PostRegister.Web.LogsController do
use PostRegister.Web, :controller
alias PostRegister.Blogs
def show(conn, %{"id" => id}) do
log = Blogs.find_log(id: id)
posts = Blogs.list_posts(log)
conn
|> assign(:log, log)
|> assign(:posts, posts)
|> render("show.html")
end
end
| 20.8 | 44 | 0.647436 |
031183f735b0a1f143f9a1a29a01513369577d6e | 1,767 | exs | Elixir | test/lexer/element_test.exs | lpil/jot | d4d0b3852db54a7e5c201c8a68ffa7894b2f0d42 | [
"MIT"
] | 1 | 2016-08-20T14:41:55.000Z | 2016-08-20T14:41:55.000Z | test/lexer/element_test.exs | lpil/jot | d4d0b3852db54a7e5c201c8a68ffa7894b2f0d42 | [
"MIT"
] | 26 | 2016-05-29T00:29:33.000Z | 2018-04-12T13:53:50.000Z | test/lexer/element_test.exs | lpil/jot | d4d0b3852db54a7e5c201c8a68ffa7894b2f0d42 | [
"MIT"
] | null | null | null | defmodule Jot.Lexer.ElementTest do
use ExUnit.Case, async: true
alias Jot.Lexer.Element
doctest Element
defmacro template ~> tokens do
quote do
assert unquote(tokens) == Element.tokenize!(unquote(template))
end
end
test "tags" do
"h1" ~> [
name: 'h1',
]
end
test "ids" do
"#foo" ~> [
hash: '#',
name: 'foo',
]
"#xx#yy" ~> [
hash: '#',
name: 'xx',
hash: '#',
name: 'yy',
]
"#who-what_slimSHADY" ~> [
hash: '#',
name: 'who-what_slimSHADY',
]
end
test "class literal" do
".bar" ~> [
dot: '.',
name: 'bar',
]
".x.y" ~> [
dot: '.',
name: 'x',
dot: '.',
name: 'y',
]
".WHAT-where__when" ~> [
dot: '.',
name: 'WHAT-where__when',
]
end
test "tags with text content" do
"div Hello, world!" ~> [
name: 'div',
ws: ' ',
word: 'Hello,',
ws: ' ',
word: 'world!',
]
"div I'm spartacus" ~> [
name: 'div',
ws: ' ',
word: 'I\'m',
ws: ' ',
name: 'spartacus',
]
end
test "whitespace" do
"div \t Hi\t \t" ~> [
name: 'div',
ws: ' \t ',
name: 'Hi',
ws: '\t \t',
]
end
test "attrs" do
~s[a(href="foo")] ~> [
name: 'a',
"(": '(',
name: 'href',
eq: '=',
string: 'foo',
")": ')',
]
~s[a(class="button" href="/beep") Clicky] ~> [
name: 'a',
"(": '(',
name: 'class',
eq: '=',
string: 'button',
ws: ' ',
name: 'href',
eq: '=',
string: '/beep',
")": ')',
ws: ' ',
name: 'Clicky'
]
end
end
| 16.990385 | 68 | 0.381437 |
03119d575c8e973aab5c278a0c88f45fe1c55c2a | 1,064 | ex | Elixir | lib/live_view_example/count.ex | RyoWakabayashi/phoenix-liveview-example | 1f7020d142a88fb2f68c4e58fd7635133cbaf8bc | [
"MIT"
] | null | null | null | lib/live_view_example/count.ex | RyoWakabayashi/phoenix-liveview-example | 1f7020d142a88fb2f68c4e58fd7635133cbaf8bc | [
"MIT"
] | null | null | null | lib/live_view_example/count.ex | RyoWakabayashi/phoenix-liveview-example | 1f7020d142a88fb2f68c4e58fd7635133cbaf8bc | [
"MIT"
] | null | null | null | defmodule LiveViewExample.Count do
@moduledoc """
カウンターのバックエンド処理を定義する
"""
use GenServer
alias Phoenix.PubSub
@name :count_server
@start_value 0
# ------- External API (runs in client process) -------
def topic do
"count"
end
def start_link(_opts) do
GenServer.start_link(__MODULE__, @start_value, name: @name)
end
def incr do
GenServer.call(@name, :incr)
end
def decr do
GenServer.call(@name, :decr)
end
def current do
GenServer.call(@name, :current)
end
def init(start_count) do
{:ok, start_count}
end
# ------- Implementation (Runs in GenServer process) -------
def handle_call(:current, _from, count) do
{:reply, count, count}
end
def handle_call(:incr, _from, count) do
make_change(count, +1)
end
def handle_call(:decr, _from, count) do
make_change(count, -1)
end
defp make_change(count, change) do
new_count = count + change
PubSub.broadcast(LiveViewExample.PubSub, topic(), {:count, new_count})
{:reply, new_count, new_count}
end
end
| 18.033898 | 74 | 0.654135 |
0311ff86304dfb1bece6e47a4783ce9971247cc5 | 488 | ex | Elixir | Chapter11/todo_web/lib/todo/process_registry.ex | benjamindburke/elixir-studies | 65231b5af83dcf701041cae2879107c3bd3e5078 | [
"Unlicense"
] | null | null | null | Chapter11/todo_web/lib/todo/process_registry.ex | benjamindburke/elixir-studies | 65231b5af83dcf701041cae2879107c3bd3e5078 | [
"Unlicense"
] | null | null | null | Chapter11/todo_web/lib/todo/process_registry.ex | benjamindburke/elixir-studies | 65231b5af83dcf701041cae2879107c3bd3e5078 | [
"Unlicense"
] | null | null | null | # Todo ProcessRegistry [supervisor]
# This module supervises and registers all Todo services
defmodule Todo.ProcessRegistry do
# ---------
# Supervisor hook functions
# ---------
def start_link do
Registry.start_link(keys: :unique, name: __MODULE__)
end
def via_tuple(key) do
{:via, Registry, {__MODULE__, key}}
end
def child_spec(_) do
Supervisor.child_spec(
Registry,
id: __MODULE__,
start: {__MODULE__, :start_link, []}
)
end
end
| 20.333333 | 56 | 0.655738 |
0312008e10b8d71d8458a2165e894032040827ad | 120 | exs | Elixir | test/varnishex_test.exs | b3k/varnishex | accb623f9a165a5ec6f4a7e4331be3c14972e28d | [
"MIT"
] | null | null | null | test/varnishex_test.exs | b3k/varnishex | accb623f9a165a5ec6f4a7e4331be3c14972e28d | [
"MIT"
] | null | null | null | test/varnishex_test.exs | b3k/varnishex | accb623f9a165a5ec6f4a7e4331be3c14972e28d | [
"MIT"
] | null | null | null | defmodule VarnishexTest do
use ExUnit.Case
doctest Varnishex
test "the truth" do
assert 1 + 1 == 2
end
end
| 13.333333 | 26 | 0.683333 |
0312068f2ed05c97aa560bf83e2e1c047edd6b1b | 444 | ex | Elixir | lib/cayenne/lpp/type/barometric_pressure.ex | janpieper/cayenne_lpp | 432003d00553cc05a4faa80dddaa2429e2cf5b6b | [
"MIT"
] | 1 | 2020-08-24T08:14:09.000Z | 2020-08-24T08:14:09.000Z | lib/cayenne/lpp/type/barometric_pressure.ex | janpieper/cayenne_lpp | 432003d00553cc05a4faa80dddaa2429e2cf5b6b | [
"MIT"
] | null | null | null | lib/cayenne/lpp/type/barometric_pressure.ex | janpieper/cayenne_lpp | 432003d00553cc05a4faa80dddaa2429e2cf5b6b | [
"MIT"
] | null | null | null | defmodule Cayenne.LPP.Type.BarometricPressure do
@moduledoc """
Cayenne LPP type for barometric pressure
### Example
```elixir
alias Cayenne.LPP.{Buffer, Encoder}
alias Cayenne.LPP.Type.BarometricPressure
buffer =
1017.8
|> DigitalInput.new()
|> Encoder.encode()
Buffer.to_string(buffer) # "7327C2"
Buffer.size(buffer) # 3
```
"""
use Cayenne.LPP.Type,
id: 0x73,
size: 2,
multiplier: 10
end
| 17.076923 | 48 | 0.655405 |
03121caf5b9e137e3104130b3609cbf608d3a1a4 | 300 | ex | Elixir | lib/parse_utility.ex | vikram7/sec_company_filings_rss_feed_parser | eddedeb0b5c0e4cc278876df5215db551a39ae48 | [
"MIT"
] | 6 | 2016-03-04T17:25:16.000Z | 2021-11-08T11:20:49.000Z | lib/parse_utility.ex | vikram7/sec_company_filings_rss_feed_parser | eddedeb0b5c0e4cc278876df5215db551a39ae48 | [
"MIT"
] | 13 | 2016-03-03T23:28:39.000Z | 2018-10-15T14:28:00.000Z | lib/parse_utility.ex | vikram7/sec_company_filings_rss_feed_parser | eddedeb0b5c0e4cc278876df5215db551a39ae48 | [
"MIT"
] | 2 | 2017-12-29T16:45:36.000Z | 2018-10-14T17:34:47.000Z | defmodule ParseUtility do
@moduledoc "a module to hold common helper functions"
defmacro __using__(_opts) do
quote do
def extract_last_item([]), do: nil
def extract_last_item([tuple | _]) do
{_, _, [last_item | _]} = tuple
last_item
end
end
end
end
| 18.75 | 55 | 0.626667 |
03123c429d3e547dd91092c8fa49d1e7a1490b6b | 7,471 | ex | Elixir | lib/ecto/repo/model.ex | aforward/ecto | 74bf4528abf198b25ff261d127fe7cf54a1ff044 | [
"Apache-2.0"
] | null | null | null | lib/ecto/repo/model.ex | aforward/ecto | 74bf4528abf198b25ff261d127fe7cf54a1ff044 | [
"Apache-2.0"
] | null | null | null | lib/ecto/repo/model.ex | aforward/ecto | 74bf4528abf198b25ff261d127fe7cf54a1ff044 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Repo.Model do
# The module invoked by user defined repos
# for model related functionality.
@moduledoc false
alias Ecto.Query.Planner
alias Ecto.Model.Callbacks
alias Ecto.Changeset
@doc """
Implementation for `Ecto.Repo.insert/2`.
"""
def insert(repo, adapter, %Changeset{} = changeset, opts) when is_list(opts) do
struct = struct_from_changeset!(changeset)
model = struct.__struct__
fields = model.__schema__(:fields)
source = struct.__meta__.source
return = model.__schema__(:read_after_writes)
id_types = adapter.id_types(repo)
# On insert, we always merge the whole struct into the
# changeset as changes, except the primary key if it is nil.
changeset = %{changeset | repo: repo}
changeset = merge_into_changeset(struct, fields, changeset)
changeset = merge_autogenerate(changeset, model)
{autogen, changeset} = merge_autogenerate_id(changeset, model)
with_transactions_if_callbacks repo, adapter, model, opts,
~w(before_insert after_insert)a, fn ->
changeset = Callbacks.__apply__(model, :before_insert, changeset)
changes = validate_changes(:insert, changeset, model, fields, id_types)
{:ok, values} = adapter.insert(repo, source, changes, autogen, return, opts)
changeset = load_into_changeset(changeset, model, values, id_types)
Callbacks.__apply__(model, :after_insert, changeset).model
end
end
def insert(repo, adapter, %{__struct__: _} = struct, opts) do
insert(repo, adapter, %Changeset{model: struct, valid?: true}, opts)
end
@doc """
Implementation for `Ecto.Repo.update/2`.
"""
def update(repo, adapter, %Changeset{} = changeset, opts) when is_list(opts) do
struct = struct_from_changeset!(changeset)
model = struct.__struct__
fields = model.__schema__(:fields)
source = struct.__meta__.source
return = model.__schema__(:read_after_writes)
id_types = adapter.id_types(repo)
# Differently from insert, update does not copy the struct
# fields into the changeset. All changes must be in the
# changeset before hand.
changeset = %{changeset | repo: repo}
autogen = get_autogenerate_id(changeset, model)
with_transactions_if_callbacks repo, adapter, model, opts,
~w(before_update after_update)a, fn ->
changeset = Callbacks.__apply__(model, :before_update, changeset)
changes = validate_changes(:update, changeset, model, fields, id_types)
filters = add_pk_filter!(changeset.filters, struct)
filters = Planner.fields(:update, model, filters, id_types)
values =
if changes != [] do
case adapter.update(repo, source, changes, filters, autogen, return, opts) do
{:ok, values} ->
values
{:error, :stale} ->
raise Ecto.StaleModelError, model: struct, action: :update
end
else
[]
end
changeset = load_into_changeset(changeset, model, values, id_types)
Callbacks.__apply__(model, :after_update, changeset).model
end
end
def update(repo, adapter, %{__struct__: model} = struct, opts) do
changes = Map.take(struct, model.__schema__(:fields))
# Remove all primary key fields from the list of changes.
changes =
Enum.reduce model.__schema__(:primary_key), changes, &Map.delete(&2, &1)
changeset = %Changeset{model: struct, valid?: true, changes: changes}
update(repo, adapter, changeset, opts)
end
@doc """
Implementation for `Ecto.Repo.delete/2`.
"""
def delete(repo, adapter, %Changeset{} = changeset, opts) when is_list(opts) do
struct = struct_from_changeset!(changeset)
model = struct.__struct__
source = struct.__meta__.source
# There are no field changes on delete
changeset = %{changeset | repo: repo}
autogen = get_autogenerate_id(changeset, model)
with_transactions_if_callbacks repo, adapter, model, opts,
~w(before_delete after_delete)a, fn ->
changeset = Callbacks.__apply__(model, :before_delete, changeset)
filters = add_pk_filter!(changeset.filters, struct)
filters = Planner.fields(:delete, model, filters, adapter.id_types(repo))
case adapter.delete(repo, source, filters, autogen, opts) do
{:ok, _} -> nil
{:error, :stale} ->
raise Ecto.StaleModelError, model: struct, action: :delete
end
model = Callbacks.__apply__(model, :after_delete, changeset).model
put_in model.__meta__.state, :deleted
end
end
def delete(repo, adapter, %{__struct__: _} = struct, opts) do
delete(repo, adapter, %Changeset{model: struct, valid?: true}, opts)
end
## Helpers
defp struct_from_changeset!(%{valid?: false}),
do: raise(ArgumentError, "cannot insert/update an invalid changeset")
defp struct_from_changeset!(%{model: nil}),
do: raise(ArgumentError, "cannot insert/update a changeset without a model")
defp struct_from_changeset!(%{model: struct}),
do: struct
defp load_into_changeset(%{changes: changes} = changeset, model, values, id_types) do
update_in changeset.model, &do_load(struct(&1, changes), model, values, id_types)
end
defp do_load(struct, model, kv, id_types) do
types = model.__changeset__
model = Enum.reduce(kv, struct, fn
{k, v}, acc ->
value =
types
|> Map.fetch!(k)
|> Ecto.Type.normalize(id_types)
|> Ecto.Type.load!(v)
Map.put(acc, k, value)
end)
put_in model.__meta__.state, :loaded
end
defp merge_into_changeset(struct, fields, changeset) do
changes = Map.take(struct, fields)
update_in changeset.changes, &Map.merge(changes, &1)
end
defp merge_autogenerate_id(changeset, model) do
case model.__schema__(:autogenerate_id) do
{key, id} ->
get_and_update_in changeset.changes, fn changes ->
case Map.pop(changes, key) do
{nil, changes} -> {{key, id, nil}, changes}
{value, _} -> {{key, id, value}, changes}
end
end
nil ->
{nil, changeset}
end
end
defp get_autogenerate_id(changeset, model) do
case model.__schema__(:autogenerate_id) do
{key, id} -> {key, id, Map.get(changeset.changes, key)}
nil -> nil
end
end
defp merge_autogenerate(changeset, model) do
update_in changeset.changes, fn changes ->
Enum.reduce model.__schema__(:autogenerate), changes, fn {k, v}, acc ->
if Map.get(acc, k) == nil do
Map.put(acc, k, v.generate())
else
acc
end
end
end
end
defp validate_changes(kind, changeset, model, fields, id_types) do
Planner.fields(kind, model, Map.take(changeset.changes, fields), id_types)
end
defp add_pk_filter!(filters, struct) do
Enum.reduce Ecto.Model.primary_key!(struct), filters, fn
{_k, nil}, _acc ->
raise Ecto.MissingPrimaryKeyError, struct: struct
{k, v}, acc ->
Map.put(acc, k, v)
end
end
defp with_transactions_if_callbacks(repo, adapter, model, opts, callbacks, fun) do
if Enum.any?(callbacks, &function_exported?(model, &1, 1)) and
function_exported?(adapter, :transaction, 3) do
{:ok, value} = adapter.transaction(repo, opts, fun)
value
else
fun.()
end
end
end
| 33.653153 | 87 | 0.654665 |
031246fc7d48373f02af69744f90f213e1fa2004 | 1,610 | ex | Elixir | lib/weber/session/session_manager.ex | elixir-web/weber | 1c8caa43681cc432813dff33b2c6d08ca1d61f29 | [
"MIT"
] | 124 | 2015-01-03T16:48:21.000Z | 2022-02-02T21:13:11.000Z | lib/weber/session/session_manager.ex | elixir-web/weber | 1c8caa43681cc432813dff33b2c6d08ca1d61f29 | [
"MIT"
] | 2 | 2015-03-08T05:29:36.000Z | 2015-07-19T15:31:19.000Z | lib/weber/session/session_manager.ex | elixir-web/weber | 1c8caa43681cc432813dff33b2c6d08ca1d61f29 | [
"MIT"
] | 12 | 2015-02-23T02:09:27.000Z | 2016-08-07T13:50:38.000Z | defmodule Weber.Session.SessionManager do
@moduledoc """
Session manager process. Create new session's process,
manages all sessions.
"""
use GenServer
defmodule SessionManager do
defstruct config: nil
end
@doc """
Start session manager.
"""
def start_link(config) do
:gen_server.start_link({:local, :session_manager}, __MODULE__, [config], [])
end
@doc """
gen_server init/1 callback.
"""
def init([config]) do
:ets.new(:cookie_storage, [:named_table, :public, :bag])
{ :ok, %SessionManager{config: config} }
end
@doc """
Create new session.
"""
def handle_cast({:create_new_session, session_id, pid}, state) do
{_, session_config} = :lists.keyfind(:session, 1, state.config)
{_, max_age} = :lists.keyfind(:max_age, 1, session_config)
{:ok, session_pid} = Weber.Session.start_link(max_age, session_id)
send(session_pid, {:create_new_session, session_id, pid, session_config, max_age, state.config})
{:noreply, state}
end
@doc """
Check cookie in ets.
"""
def handle_cast({:check_cookie, cookie, pid}, state) do
case :ets.match_object(:cookie_storage, {cookie, :_, :_}) do
[] ->
locale = case :lists.keyfind(:localization, 1, state.config) do
false ->
"en_US"
{:localization, localization_config} ->
{_, default_locale} = :lists.keyfind(:default_locale, 1, localization_config)
default_locale
end
:ets.insert(:cookie_storage, {cookie, pid, [locale: locale]})
_ ->
:ok
end
{:noreply, state}
end
end
| 25.967742 | 100 | 0.636025 |
0312667e72fcdc2e324927e3dde4ca9cd5814c0c | 240 | exs | Elixir | config/config.exs | iwarshak/ex_queb | 3aab53ac21cb9b9292614abadfaeb96c10e1f74b | [
"MIT"
] | 1 | 2020-09-02T19:15:53.000Z | 2020-09-02T19:15:53.000Z | config/config.exs | iwarshak/ex_queb | 3aab53ac21cb9b9292614abadfaeb96c10e1f74b | [
"MIT"
] | null | null | null | config/config.exs | iwarshak/ex_queb | 3aab53ac21cb9b9292614abadfaeb96c10e1f74b | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# Over ride defaults by adding these into your config.exs file.
# config :ex_queb, filter_param: :q
| 30 | 63 | 0.775 |
0312c70b9d24f30329b79b98fcb7d5d4d1cd4bd1 | 3,591 | ex | Elixir | clients/service_user/lib/google_api/service_user/v1/model/auth_provider.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/service_user/lib/google_api/service_user/v1/model/auth_provider.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/service_user/lib/google_api/service_user/v1/model/auth_provider.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.ServiceUser.V1.Model.AuthProvider do
@moduledoc """
Configuration for an anthentication provider, including support for [JSON Web Token (JWT)](https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32).
## Attributes
- audiences (String.t): The list of JWT [audiences](https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32#section-4.1.3). that are allowed to access. A JWT containing any of these audiences will be accepted. When this setting is absent, only JWTs with audience \"https://Service_name/API_name\" will be accepted. For example, if no audiences are in the setting, LibraryService API will only accept JWTs with the following audience \"https://library-example.googleapis.com/google.example.library.v1.LibraryService\". Example: audiences: bookstore_android.apps.googleusercontent.com, bookstore_web.apps.googleusercontent.com Defaults to: `null`.
- authorizationUrl (String.t): Redirect URL if JWT token is required but no present or is expired. Implement authorizationUrl of securityDefinitions in OpenAPI spec. Defaults to: `null`.
- id (String.t): The unique identifier of the auth provider. It will be referred to by `AuthRequirement.provider_id`. Example: \"bookstore_auth\". Defaults to: `null`.
- issuer (String.t): Identifies the principal that issued the JWT. See https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32#section-4.1.1 Usually a URL or an email address. Example: https://securetoken.google.com Example: [email protected] Defaults to: `null`.
- jwksUri (String.t): URL of the provider's public key set to validate signature of the JWT. See [OpenID Discovery](https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata). Optional if the key set document: - can be retrieved from [OpenID Discovery](https://openid.net/specs/openid-connect-discovery-1_0.html of the issuer. - can be inferred from the email domain of the issuer (e.g. a Google service account). Example: https://www.googleapis.com/oauth2/v1/certs Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:audiences => any(),
:authorizationUrl => any(),
:id => any(),
:issuer => any(),
:jwksUri => any()
}
field(:audiences)
field(:authorizationUrl)
field(:id)
field(:issuer)
field(:jwksUri)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceUser.V1.Model.AuthProvider do
def decode(value, options) do
GoogleApi.ServiceUser.V1.Model.AuthProvider.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceUser.V1.Model.AuthProvider do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 59.85 | 695 | 0.743247 |
0312e497482516234ecf025c1bd145930181a769 | 2,710 | ex | Elixir | lib/mix/lib/mix/tasks/app.tree.ex | kevsmith/elixir | 74825645e8cac770708f45139e651fd9d4e4264c | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/app.tree.ex | kevsmith/elixir | 74825645e8cac770708f45139e651fd9d4e4264c | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/app.tree.ex | kevsmith/elixir | 74825645e8cac770708f45139e651fd9d4e4264c | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.App.Tree do
use Mix.Task
@shortdoc "Prints the application tree"
@recursive true
@moduledoc """
Prints the application tree.
mix app.tree --exclude logger --exclude elixir
If no application is given, it uses the current application defined
in the `mix.exs` file.
## Command line options
* `--exclude` - exclude applications which you do not want to see printed.
`kernel`, `stdlib` and `compiler` are always excluded from the tree.
* `--format` - Can be set to one of either:
* `pretty` - use Unicode codepoints for formatting the tree.
This is the default except on Windows.
* `plain` - do not use Unicode codepoints for formatting the tree.
This is the default on Windows.
* `dot` - produces a DOT graph description of the application tree
in `app_tree.dot` in the current directory.
Warning: this will override any previously generated file.
"""
@default_excluded [:kernel, :stdlib, :compiler]
@spec run(OptionParser.argv) :: :ok
def run(args) do
Mix.Task.run "compile"
{app, opts} =
case OptionParser.parse!(args, strict: [exclude: :keep, format: :string]) do
{opts, []} ->
app = Mix.Project.config[:app] || Mix.raise("no application given and none found in mix.exs file")
{app, opts}
{opts, [app]} ->
{String.to_atom(app), opts}
end
excluded = Keyword.get_values(opts, :exclude) |> Enum.map(&String.to_atom/1)
excluded = @default_excluded ++ excluded
callback = fn {type, app} ->
load(app)
{{app, type(type)}, children_for(app, excluded)}
end
if opts[:format] == "dot" do
Mix.Utils.write_dot_graph!("app_tree.dot", "application tree",
[{:normal, app}], callback, opts)
"""
Generated "app_tree.dot" in the current directory. To generate a PNG:
dot -Tpng app_tree.dot -o app_tree.png
For more options see http://www.graphviz.org/.
"""
|> String.trim_trailing
|> Mix.shell.info
else
Mix.Utils.print_tree([{:normal, app}], callback, opts)
end
end
defp load(app) do
case Application.load(app) do
:ok -> :ok
{:error, {:already_loaded, ^app}} -> :ok
_ -> Mix.raise("could not find application #{app}")
end
end
defp children_for(app, excluded) do
apps = Application.spec(app, :applications) -- excluded
included_apps = Application.spec(app, :included_applications) -- excluded
Enum.map(apps, &{:normal, &1}) ++ Enum.map(included_apps, &{:included, &1})
end
defp type(:normal), do: nil
defp type(:included), do: "(included)"
end
| 29.78022 | 108 | 0.62583 |
0312edac6a3e3956df8a6311bb5903bd71f25e75 | 860 | exs | Elixir | mix.exs | gialib/memcache_ex | 69a413d66c535e948b6d6ef63ae5535cb79f94eb | [
"MIT"
] | null | null | null | mix.exs | gialib/memcache_ex | 69a413d66c535e948b6d6ef63ae5535cb79f94eb | [
"MIT"
] | null | null | null | mix.exs | gialib/memcache_ex | 69a413d66c535e948b6d6ef63ae5535cb79f94eb | [
"MIT"
] | null | null | null | defmodule Memcache.Client.Mixfile do
use Mix.Project
def project do
[app: :memcache_ex,
version: "1.2.0",
elixir: "~> 1.0",
description: description(),
package: package(),
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()]
end
def application do
[applications: [:logger, :poolboy],
mod: {Memcache.Client, []}]
end
defp deps do
[
{:earmark, "~> 0.2.0", only: :dev},
{:ex_doc, "~> 0.11.4", only: :dev},
{:poison, "~> 2.2.0"},
{:poolboy, "~> 1.5.1"},
{:connection, "~> 1.0.2"}
]
end
defp description do
"""
Memcache client library Elixir.
"""
end
defp package do
[
maintainers: ["happy"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/gialib/memcache_ex"}
]
end
end
| 19.545455 | 67 | 0.534884 |
031304b6dc0e60b3e0180e8e286eeef5740be793 | 5,243 | exs | Elixir | test/credo/code/sigils_test.exs | codeclimate-community/credo | b960a25d604b4499a2577321f9d61b39dc4b0437 | [
"MIT"
] | null | null | null | test/credo/code/sigils_test.exs | codeclimate-community/credo | b960a25d604b4499a2577321f9d61b39dc4b0437 | [
"MIT"
] | null | null | null | test/credo/code/sigils_test.exs | codeclimate-community/credo | b960a25d604b4499a2577321f9d61b39dc4b0437 | [
"MIT"
] | null | null | null | defmodule Credo.Code.SigilsTest do
use Credo.TestHelper
alias Credo.Code.Sigils
test "it should return the source without string literals 3" do
source = """
x = ~c|a b c|
x = ~s"a b c"
x = ~r'a b c'
x = ~w(a b c)
x = ~c[a b c]
x = ~s{a b c}
x = ~r<a b c>
x = ~W|a b c|
x = ~C"a b c"
x = ~S'a b c'
x = ~R(a b c)
x = ~W[a b c]
x = ~C{a b c}
x = ~S<a b c>
"~S( i am not a sigil! )"
"""
expected = """
x = ~c| |
x = ~s" "
x = ~r' '
x = ~w( )
x = ~c[ ]
x = ~s{ }
x = ~r< >
x = ~W| |
x = ~C" "
x = ~S' '
x = ~R( )
x = ~W[ ]
x = ~C{ }
x = ~S< >
"~S( i am not a sigil! )"
"""
result = source |> Sigils.replace_with_spaces()
assert expected == result
end
test "it should return the source without string literals 4" do
source = """
x = Regex.match?(~r/^\\d{1,2}\\/\\d{1,2}\\/\\d{4}$/, value)
"""
expected = """
x = Regex.match?(~r/ /, value)
"""
result = source |> Sigils.replace_with_spaces()
assert expected == result
end
test "it should not crash and burn" do
source = """
defmodule Credo.CLI.Command.List do
defp print_help do
x = ~w(remove me)
\"\"\"
Arrows (↑ ↗ → ↘ ↓) hint at the importance of the object being looked at.
\"\"\"
|> UI.puts
# ↑
# ~r/abc/
end
end
"""
expected = """
defmodule Credo.CLI.Command.List do
defp print_help do
x = ~w( )
\"\"\"
Arrows (↑ ↗ → ↘ ↓) hint at the importance of the object being looked at.
\"\"\"
|> UI.puts
# ↑
# ~r/abc/
end
end
"""
result = source |> Sigils.replace_with_spaces()
assert expected == result
end
test "it should remove sigils with interpolation 2" do
source = ~S"""
defmodule CredoSampleModule do
def some_function(parameter1, parameter2) do
values = ~s{ #{"}"} }
end
end
"""
expected = """
defmodule CredoSampleModule do
def some_function(parameter1, parameter2) do
values = ~s{}
end
end
"""
assert expected == source |> Sigils.replace_with_spaces("")
end
test "it should remove sigils with interpolation 222" do
source = ~S"""
defmodule CredoSampleModule do
def some_function(parameter1, parameter2) do
values = ~s{ #{"x"} }
end
end
"""
expected = """
defmodule CredoSampleModule do
def some_function(parameter1, parameter2) do
values = ~s{}
end
end
"""
assert expected == source |> Sigils.replace_with_spaces("")
end
test "it should remove sigils with interpolation 3" do
source = ~S"""
defmodule CredoSampleModule do
def some_function(parameter1, parameter2) do
values = ~s{(#{Enum.map_join(fields, ", ", "e_name/1)}) } <>
~s{VALUES (#{Enum.map_join(1..length(fields), ", ", fn (_) -> "?" end)})}
end
end
"""
expected = """
defmodule CredoSampleModule do
def some_function(parameter1, parameter2) do
values = ~s{} <>
~s{}
end
end
"""
assert expected == source |> Sigils.replace_with_spaces("")
end
@tag :to_be_implemented
test "it should NOT replace interpolations in strings" do
source = ~S"""
def foo(a) do
"#{a} #{a}"
end
def bar do
" )"
end
"""
expected = ~S"""
def foo(a) do
"#{a} #{a}"
end
def bar do
" )"
end
"""
assert expected == Sigils.replace_with_spaces(source, "")
end
test "it should not modify commented out code" do
source = """
defmodule Foo do
defmodule Bar do
# @doc \"\"\"
# Reassign a student to a discussion group.
# This will un-assign student from the current discussion group
# \"\"\"
# def assign_group(leader = %User{}, student = %User{}) do
# cond do
# leader.role == :student ->
# {:error, :invalid}
#
# student.role != :student ->
# {:error, :invalid}
#
# true ->
# Repo.transaction(fn ->
# {:ok, _} = unassign_group(student)
#
# %Group{}
# |> Group.changeset(%{})
# |> put_assoc(:leader, leader)
# |> put_assoc(:student, student)
# |> Repo.insert!()
# end)
# end
# end
def baz, do: 123
end
end
"""
expected = source
assert expected == Sigils.replace_with_spaces(source, "")
end
@tag slow: :disk_io
test "it should produce valid code /2" do
example_code = File.read!("test/fixtures/example_code/nested_escaped_heredocs.ex")
result = Sigils.replace_with_spaces(example_code)
result2 = Sigils.replace_with_spaces(result)
assert result == result2, "Sigils.replace_with_spaces/2 should be idempotent"
assert match?({:ok, _}, Code.string_to_quoted(result))
end
end
| 22.995614 | 90 | 0.504101 |
03131a3546c2ab717aea47daa6e9c7f29bd49389 | 6,284 | ex | Elixir | lib/simplestatex.ex | Tyler-pierce/simplestatex | 1847eaddf3dc6de08672e055235c43e5a4bd492d | [
"MIT"
] | 15 | 2018-01-14T00:19:20.000Z | 2021-02-05T00:20:36.000Z | lib/simplestatex.ex | Tyler-pierce/simplestatex | 1847eaddf3dc6de08672e055235c43e5a4bd492d | [
"MIT"
] | null | null | null | lib/simplestatex.ex | Tyler-pierce/simplestatex | 1847eaddf3dc6de08672e055235c43e5a4bd492d | [
"MIT"
] | null | null | null | defmodule SimpleStatEx do
@moduledoc """
SimpleStatEx is a lightweight library that supports logging simple statistics for any elixir project, including
the Phoenix Framework. Stats are stored via ecto to your data store or in memory. They are rolled up by category
and time window and can be queried conveniently. SimpleStatEx provides the recommended interface to your stats.
"""
alias SimpleStatEx.{SimpleStat, SimpleStatHolder, SimpleStatQuery}
alias SimpleStatEx.Util.{HandleTime, DataAccess}
alias SimpleStatEx.Query.Stat
@doc """
Generate a stat model based on passed arguments
## Examples
iex> SimpleStatEx.stat("index visit", :daily)
%SimpleStat{category: "index visit", period: "daily", count: 1, ...}
"""
def stat(category) when is_binary(category) do
case HandleTime.round(:daily, Timex.now()) do
{:ok, time} ->
{:ok, %SimpleStat{category: category, period: HandleTime.period_to_string!(:daily), time: time}}
{:error, reason} ->
{:error, reason}
end
end
def stat(category, period, count \\ 1) when is_binary(category) do
case HandleTime.round(period, Timex.now()) do
{:ok, time} ->
{:ok, %SimpleStat{category: category, period: HandleTime.period_to_string!(period), count: count, time: time}}
{:error, reason} ->
{:error, reason}
end
end
@doc """
Attempt to transform any simple stat operation into using memory instead of repository. Meant for use in piping from
other parts of this interface such as `stat` and `query`.
## Example
iex> SimpleStatEx.stat("mongol visit") |> SimpleStatEx.memory() |> SimpleStatEx.save()
iex> SimpleStatEx.query("mongol visit") |> SimpleStatEx.memory() |> SimpleStatEx.get()
"""
def memory({:ok, %SimpleStat{} = simple_stat}) do
pid = DataAccess.lookup_bucket(simple_stat)
{:ok, %SimpleStatHolder{simple_stat: simple_stat, category_bucket_pid: pid}}
end
def memory({:ok, %SimpleStat{} = simple_stat, %SimpleStatQuery{} = simple_stat_query}) do
pid = DataAccess.lookup_bucket(simple_stat)
{:ok, %SimpleStatHolder{simple_stat: simple_stat, category_bucket_pid: pid}, simple_stat_query}
end
@doc """
Save a stat or stat container to the datastore or to state. If within the time and period of a stat of the same
category, updates the counter, incrementing by your new stat's count.
## Example
iex> SimpleStatEx.stat("index visit") |> SimpleStatEx.save()
{:ok,
%SimpleStatEx.SimpleStat{__meta__: #Ecto.Schema.Metadata<:loaded, "simplestats">,
category: "index visit", count: 1, id: 1,
inserted_at: ~N[2018-01-10 05:50:35.225979], period: "daily",
time: #DateTime<2018-01-10 00:00:00Z>,
updated_at: ~N[2018-01-10 05:50:35.225986]}}
"""
def save({:ok, simple_stat}) do
Stat.insert(simple_stat)
end
def save(error_reason) do
error_reason
end
@doc """
Build a stat query that can be used to obtain results from the database or stat set. You are free to query
using Ecto in any way you like, Simple Stats helpers simple give you an easy interface to query in the
suggested way, and are compatible with the Stat Sets held in memory.
## Example
iex> SimpleStatEx.query("index visit", :daily) |> SimpleStatEx.limit(10) |> SimpleStatEx.get()
"""
def query(category, period) when is_binary(category) do
case HandleTime.period_to_string(period) do
{:ok, period_string} ->
{:ok, %SimpleStat{category: category, period: period_string}, %SimpleStatQuery{}}
{:error, reason} ->
{:error, reason}
end
end
def query(%SimpleStat{category: category, period: period}) do
query(category, period)
end
def query(category) when is_binary(category) do
query(category, :daily)
end
@doc """
Add a limit to a stat query, overriding the default `1`
## Example
iex> SimpleStatEx.query("index visit") |> SimpleStatEx.limit(50) |> SimpleStatEx.get()
"""
def limit({:ok, simple_stat, %SimpleStatQuery{} = simple_stat_query}, limit) do
{:ok, simple_stat, %{simple_stat_query | limit: limit}}
end
def limit(error_reason, _) do
error_reason
end
@doc """
Add an offset to a stat query, overriding the default `0`
## Example
# Get 1 day stats from 50 days ago
iex> SimpleStatEx.query("index visit") |> SimpleStatEx.offset(50) |> Simple StatEx.get()
"""
def offset({:ok, simple_stat, %SimpleStatQuery{} = simple_stat_query}, offset) do
{:ok, simple_stat, %{simple_stat_query | offset: offset}}
end
def offset(error_reason, _) do
error_reason
end
@doc """
Retrieve a stat using simple stat query builder helpers. This is usually called via pipe from
SimpleStatEx.query.
## Example
iex> SimpleStatEx.get(%SimpleStat{category: "mongol visit", period: :daily}, %SimpleStatQuery{limit: 7, offset: 7})
{:ok,
[%{category: "mongol visit", period: "daily", time: ~N[2018-01-10 00:00:00.000000],
updated_at: ~N[2018-01-10 05:26:03.562011]}]}
iex> SimpleStatEx.query("mongol visit") |> SimpleStatEx.limit(7) |> SimpleStatEx.offset(7) |> SimpleStatEx.get()
{:ok,
[%{category: "test", period: "daily", time: ~N[2018-01-10 00:00:00.000000],
updated_at: ~N[2018-01-10 05:26:03.562011]}]}
"""
def get({simple_stat, %SimpleStatQuery{} = simple_stat_query}) do
get({:ok, simple_stat, simple_stat_query})
end
def get({:ok, simple_stat, %SimpleStatQuery{} = simple_stat_query}) do
Stat.retrieve(simple_stat, simple_stat_query)
end
def get({:error, reason}) do
{:error, reason}
end
def get!(stat_query_tuple) do
{:ok, result} = get(stat_query_tuple)
result
end
@doc """
See get/1 above but only return one result with no list structure
## Example
iex> SimpleStatEx.get(%SimpleStatQuery{category: "mongol visit", period: :daily}, :single)
{:ok,
%{category: "test", period: "daily", time: ~N[2018-01-10 00:00:00.000000],
updated_at: ~N[2018-01-10 05:26:03.562011]}}
"""
def get(stat_query_tuple, :single) do
{:ok, [result|_]} = get(stat_query_tuple)
{:ok, result}
end
def get!(stat_query_tuple, :single) do
[result|_] = get!(stat_query_tuple)
result
end
end
| 31.898477 | 119 | 0.677276 |
0313247e0f9f926745060dab14d82cbd35a3336f | 712 | ex | Elixir | lib/elixir_playground/router.ex | slogsdon/elixir_playground | cabab7a377a814a8741a08fe7b1948b559343050 | [
"MIT"
] | 3 | 2015-01-28T06:08:59.000Z | 2015-11-05T02:40:18.000Z | lib/elixir_playground/router.ex | slogsdon/elixir_playground | cabab7a377a814a8741a08fe7b1948b559343050 | [
"MIT"
] | null | null | null | lib/elixir_playground/router.ex | slogsdon/elixir_playground | cabab7a377a814a8741a08fe7b1948b559343050 | [
"MIT"
] | null | null | null | defmodule Router do
alias ElixirPlayground.Controllers.Main
use Sugar.Router, plugs: [
{ Plugs.HotCodeReload, [] },
{ Plugs.StaticFiles, url: "/static", path: "priv/static" },
# Uncomment the following line for session store
# { Plugs.Session, name: "_sugar_session", adapter: Plugs.Session.Adapters.Ets },
# Uncomment the following line for request logging,
# and add 'applications: [:exlager],' to the application
# Keyword list in your mix.exs
# { Plugs.Logger, [] }
]
# Define your routes here
# Main Routes
get "/", Main, :index
get "/s/:key", Main, :show
# Api Routes
post "/run", Api, :run
post "/save", Api, :save
end
| 29.666667 | 87 | 0.622191 |
03135371e12b55c3db3e5dbdfdcd48ef0297e4da | 4,096 | ex | Elixir | lib/mix/tasks/hoplon.fetch.ex | nietaki/hoplon | 505808fee0a1e5cbb72d1b3b55df598e707b13ea | [
"Apache-2.0"
] | 28 | 2018-04-15T19:29:18.000Z | 2021-07-08T10:24:07.000Z | lib/mix/tasks/hoplon.fetch.ex | nietaki/hoplon | 505808fee0a1e5cbb72d1b3b55df598e707b13ea | [
"Apache-2.0"
] | 16 | 2018-04-14T22:27:25.000Z | 2019-04-09T22:03:47.000Z | lib/mix/tasks/hoplon.fetch.ex | nietaki/hoplon | 505808fee0a1e5cbb72d1b3b55df598e707b13ea | [
"Apache-2.0"
] | 1 | 2018-11-30T08:59:16.000Z | 2018-11-30T08:59:16.000Z | defmodule Mix.Tasks.Hoplon.Fetch do
use Mix.Task
alias Hoplon.CLI.GenericTask
alias Hoplon.CLI.Prompt
alias Hoplon.Crypto
alias Hoplon.CLI.Tools
alias Hoplon.CLI.ConfigFile
require Hoplon.Data
alias Hoplon.Data.Encoder
alias Hoplon.Data
@behaviour GenericTask
@shortdoc "fetch audits from the trusted keys from the server"
@option_docs [
"`--mix-lock-file` - uses a different lockfile than the main one for the project to look for used packages"
]
@moduledoc """
Fetches audits for the used packages, from the chosen server.
Only fetches audits linked and signed by one of your trusted keys.
## Example
mix hoplon.fetch
"""
@moduledoc GenericTask.generate_moduledoc(@moduledoc, @option_docs)
@impl Mix.Task
def run(argv, opts \\ []) do
GenericTask.run(__MODULE__, argv, opts)
end
@impl GenericTask
# TODO show and download
def valid_actions(), do: nil
@impl GenericTask
def option_parser_config() do
[
strict: [
mix_lock_file: :string
],
aliases: []
]
end
@impl GenericTask
def default_switch_values() do
[]
end
@impl GenericTask
def do_task(switches, [] = _args, opts) do
mix_lock_path = Keyword.get(switches, :mix_lock_file)
env_path = Tools.print_and_get_env_path(switches, opts)
config_file_path = Tools.config_file_path(env_path)
config = ConfigFile.read_or_create!(config_file_path)
packages =
Hoplon.Utils.get_packages_from_mix_lock(mix_lock_path)
|> Tools.extract_or_raise("could not read the mix.lock file from #{mix_lock_path}")
# TODO change to false maybe?
trusted_keys = Mix.Tasks.Hoplon.Status.get_trusted_public_keys(env_path, config, true)
package_names_and_hashes =
Enum.map(packages, fn package -> {"#{package.hex_name}", package.hash} end)
Enum.map(package_names_and_hashes, fn name_and_hash ->
fetch_and_write_audits(env_path, config, name_and_hash, trusted_keys, opts)
end)
end
defp fetch_and_write_audits(env_path, config, {package_name, package_hash}, trusted_keys, opts) do
fingerprints = Map.keys(trusted_keys)
params = %{fingerprints: fingerprints}
base = Map.get(config, :api_base_url, Hoplon.ApiClient.default_base_url())
false = String.contains?(package_name, "/")
false = String.contains?(package_hash, "/")
path = "audits/fetch/hexpm/#{package_name}/#{package_hash}"
case Hoplon.ApiClient.post(base, path, [], params) do
{:ok, {200, _headers, %{"audits" => audits}}} ->
Enum.each(audits, fn
audit ->
verify_and_save_audit(env_path, audit, trusted_keys, opts)
end)
other ->
Prompt.puts(
"could not fetch audits for #{package_name}/#{package_hash}: #{inspect(other)}",
opts
)
end
end
def verify_and_save_audit(
env_path,
%{"encoded_audit" => audit_hex, "signature" => signature_hex},
keys,
opts
) do
{:ok, audit_binary} = Crypto.hex_decode(audit_hex)
{:ok, signature_binary} = Crypto.hex_decode(signature_hex)
{:ok, audit} = Encoder.decode(audit_binary, :Audit)
audit_fingerprint = Data.audit(audit, :publicKeyFingerprint)
relevant_key = Map.fetch!(keys, audit_fingerprint)
true = Crypto.verify_signature(audit_binary, signature_binary, relevant_key)
package = Data.audit(audit, :package)
package_name = Data.package(package, :name)
package_hash = Data.package(package, :hash)
audit_dir = Tools.audit_dir(env_path, package_name, package_hash)
File.mkdir_p!(audit_dir)
# TODO compare audit timestamps with potentially existing audit files
# to make sure we're not overwriting newer audits with older ones
audit_path = Tools.audit_path(env_path, package_name, package_hash, audit_fingerprint)
sig_path = Tools.sig_path(env_path, package_name, package_hash, audit_fingerprint)
File.write!(audit_path, audit_binary)
File.write!(sig_path, signature_binary)
Prompt.puts("saved #{audit_path}", opts)
{:ok, :done}
end
end
| 30.117647 | 111 | 0.696533 |
031393c22bb2c00902e8eb830307eed1b7bcf80e | 2,323 | exs | Elixir | config/prod.exs | snyk-omar/changelog.com | 66a8cff17ed8a237e439976aa7fb96b58ef276a3 | [
"MIT"
] | 2,599 | 2016-10-25T15:02:53.000Z | 2022-03-26T02:34:42.000Z | config/prod.exs | snyk-omar/changelog.com | 66a8cff17ed8a237e439976aa7fb96b58ef276a3 | [
"MIT"
] | 253 | 2016-10-25T20:29:24.000Z | 2022-03-29T21:52:36.000Z | config/prod.exs | snyk-omar/changelog.com | 66a8cff17ed8a237e439976aa7fb96b58ef276a3 | [
"MIT"
] | 298 | 2016-10-25T15:18:31.000Z | 2022-01-18T21:25:52.000Z | use Mix.Config
config :changelog, ChangelogWeb.Endpoint,
http: [port: System.get_env("HTTP_PORT", "4000")],
url: [
scheme: System.get_env("URL_SCHEME", "https"),
host: System.get_env("URL_HOST", "changelog.com"),
port: System.get_env("URL_PORT", "443")
],
static_url: [
scheme: System.get_env("STATIC_URL_SCHEME", "https"),
host: System.get_env("STATIC_URL_HOST", "cdn.changelog.com"),
port: System.get_env("STATIC_URL_PORT", "443")
],
cache_static_manifest: "priv/static/cache_manifest.json"
if System.get_env("HTTPS") do
config :changelog, ChangelogWeb.Endpoint,
https: [
port: System.get_env("HTTPS_PORT", "443"),
cipher_suite: :strong,
otp_app: :changelog,
certfile: System.get_env("HTTPS_CERTFILE"),
keyfile: System.get_env("HTTPS_KEYFILE")
]
end
config :logger,
level: :info,
backends: [:console, Sentry.LoggerBackend]
config :arc,
storage_dir: System.get_env("UPLOADS_PATH", "/uploads")
config :changelog, Changelog.Repo,
adapter: Ecto.Adapters.Postgres,
database: System.get_env("DB_NAME", "changelog"),
hostname: System.get_env("DB_HOST", "db"),
password: SecretOrEnv.get("DB_PASS"),
pool_size: 40,
timeout: 60000,
username: System.get_env("DB_USER", "postgres")
config :changelog, Changelog.Mailer,
adapter: Bamboo.SMTPAdapter,
server: "smtp.api.createsend.com",
port: 587,
username: SecretOrEnv.get("CM_SMTP_TOKEN"),
password: SecretOrEnv.get("CM_SMTP_TOKEN")
config :elixir, :time_zone_database, Tzdata.TimeZoneDatabase
config :changelog, Oban,
plugins: [
Oban.Plugins.Pruner,
Oban.Plugins.Stager,
{Oban.Plugins.Cron,
timezone: "US/Central",
crontab: [
{"0 4 * * *", Changelog.ObanWorkers.StatsProcessor},
{"0 3 * * *", Changelog.ObanWorkers.SlackImporter},
{"* * * * *", Changelog.ObanWorkers.NewsPublisher}
]}
]
config :changelog, Changelog.PromEx,
manual_metrics_start_delay: :no_delay,
drop_metrics_groups: [],
grafana: [
host: System.get_env("GRAFANA_URL"),
auth_token: SecretOrEnv.get("GRAFANA_API_KEY"),
datasource_id: System.get_env("GRAFANA_DATASOURCE_ID", "Prometheus"),
annotate_app_lifecycle: true
],
metrics_server: :disabled,
prometheus_bearer_token: SecretOrEnv.get("PROMETHEUS_BEARER_TOKEN_PROM_EX")
| 30.168831 | 77 | 0.697805 |
03139d1419e90d3990e0d35218343851a85e0825 | 1,207 | exs | Elixir | test/auth/user_remote/fetcher_test.exs | samuelnygaard/accent | db753badab1d885397b48a42ac3fb43024345467 | [
"BSD-3-Clause"
] | null | null | null | test/auth/user_remote/fetcher_test.exs | samuelnygaard/accent | db753badab1d885397b48a42ac3fb43024345467 | [
"BSD-3-Clause"
] | 2 | 2021-09-28T05:37:00.000Z | 2022-02-26T10:10:15.000Z | test/auth/user_remote/fetcher_test.exs | samuelnygaard/accent | db753badab1d885397b48a42ac3fb43024345467 | [
"BSD-3-Clause"
] | null | null | null | defmodule AccentTest.UserRemote.Fetcher do
use Accent.RepoCase, async: false
import Mock
alias Accent.UserRemote.Adapter.User
alias Accent.UserRemote.Fetcher
defp mock_response(status, body) do
%HTTPoison.Response{status_code: status, body: body}
end
test "google" do
response = [request: fn _, _, _, _, _, _, _, _, _ -> {:ok, mock_response(200, %{"email" => "[email protected]", "name" => "Test"})} end]
with_mock HTTPoison.Base, response do
expected_user = %User{email: "[email protected]", fullname: "Test", picture_url: nil, provider: "google", uid: "[email protected]"}
assert Fetcher.fetch("google", "test") == {:ok, expected_user}
end
end
test "dummy" do
expected_user = %User{email: "[email protected]", provider: "dummy", uid: "[email protected]"}
assert Fetcher.fetch("dummy", "[email protected]") == {:ok, expected_user}
end
test "nil token" do
assert Fetcher.fetch("dummy", nil) == {:error, %{value: "empty"}}
end
test "empty token" do
assert Fetcher.fetch("dummy", "") == {:error, %{value: "empty"}}
end
test "unknown provider" do
assert Fetcher.fetch("foo", "test") == {:error, %{provider: "unknown"}}
end
end
| 30.948718 | 139 | 0.652858 |
03139eefb2c517bb4be0055cebe9e27575a7a185 | 571 | exs | Elixir | config/test.exs | edwinthinks/semaphore-demo-elixir-phoenix | 16c60f1a37f204156a17628947a7dda552a76ee0 | [
"MIT"
] | null | null | null | config/test.exs | edwinthinks/semaphore-demo-elixir-phoenix | 16c60f1a37f204156a17628947a7dda552a76ee0 | [
"MIT"
] | null | null | null | config/test.exs | edwinthinks/semaphore-demo-elixir-phoenix | 16c60f1a37f204156a17628947a7dda552a76ee0 | [
"MIT"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :sema, SemaWeb.Endpoint,
http: [port: 4002],
server: true
config :sema, :sql_sandbox, true
config :wallaby,
driver: Wallaby.Experimental.Chrome
# Print only warnings and errors during test
config :logger, level: :warn
# Configure your database
config :sema, Sema.Repo, pool: Ecto.Adapters.SQL.Sandbox
if url = System.get_env("DATABASE_URL") do
config :sema, Sema.Repo, url: url
else
config :sema, Sema.Repo, database: "sema_test"
end
| 22.84 | 56 | 0.737303 |
0313a1d5150cc453b29ad054cfcfc21d8aa30077 | 4,509 | ex | Elixir | apps/ewallet_db/lib/ewallet_db/pre_auth_token.ex | Macavirus/ewallet | ce62177b8bd3f7e72156930d384a1c4c047a3b5b | [
"Apache-2.0"
] | null | null | null | apps/ewallet_db/lib/ewallet_db/pre_auth_token.ex | Macavirus/ewallet | ce62177b8bd3f7e72156930d384a1c4c047a3b5b | [
"Apache-2.0"
] | null | null | null | apps/ewallet_db/lib/ewallet_db/pre_auth_token.ex | Macavirus/ewallet | ce62177b8bd3f7e72156930d384a1c4c047a3b5b | [
"Apache-2.0"
] | null | null | null | # Copyright 2018-2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWalletDB.PreAuthToken do
@moduledoc """
Ecto Schema representing a pre authentication token.
"""
use Ecto.Schema
use Utils.Types.ExternalID
use ActivityLogger.ActivityLogging
import Ecto.Changeset
import Ecto.Query, only: [from: 2]
alias Ecto.UUID
alias Utils.Helpers.Crypto
alias EWalletDB.{Account, PreAuthToken, Repo, User}
@primary_key {:uuid, UUID, autogenerate: true}
@timestamps_opts [type: :naive_datetime_usec]
@key_length 32
schema "pre_auth_token" do
external_id(prefix: "ptk_")
field(:token, :string)
field(:owner_app, :string)
belongs_to(
:user,
User,
foreign_key: :user_uuid,
references: :uuid,
type: UUID
)
belongs_to(
:account,
Account,
foreign_key: :account_uuid,
references: :uuid,
type: UUID
)
field(:expired, :boolean)
timestamps()
activity_logging()
end
defp changeset(%PreAuthToken{} = token, attrs) do
token
|> cast_and_validate_required_for_activity_log(
attrs,
cast: [:token, :owner_app, :user_uuid, :account_uuid, :expired],
required: [:token, :owner_app, :user_uuid]
)
|> unique_constraint(:token)
|> assoc_constraint(:user)
end
@doc """
Generate a pre auth token for the specified user to be used for verify two-factor auth,
then returns the pre auth token string.
"""
def generate(%User{} = user, owner_app, originator) when is_atom(owner_app) do
%{
owner_app: Atom.to_string(owner_app),
user_uuid: user.uuid,
account_uuid: nil,
token: Crypto.generate_base64_key(@key_length),
originator: originator
}
|> insert()
end
def generate(_, _, _), do: {:error, :invalid_parameter}
@doc """
Retrieves an auth token using the specified token.
Returns the associated user if authenticated, :token_expired if token exists but expired,
or false otherwise.
"""
def authenticate(token, owner_app) when is_atom(owner_app) do
token
|> get_by_token(owner_app)
|> return_user()
end
def authenticate(user_id, token, owner_app) when token != nil and is_atom(owner_app) do
user_id
|> get_by_user(owner_app)
|> compare_multiple(token)
|> return_user()
end
def authenticate(_, _, _), do: Crypto.fake_verify()
defp compare_multiple(token_records, token) when is_list(token_records) do
Enum.find(token_records, fn record ->
Crypto.secure_compare(record.token, token)
end)
end
defp return_user(token) do
case token do
nil ->
false
%{expired: true} ->
:token_expired
token ->
Repo.preload(token, :user)
end
end
@spec get_by_token(String.t(), atom()) :: %__MODULE__{} | nil
def get_by_token(token, owner_app) when is_binary(token) and is_atom(owner_app) do
PreAuthToken
|> Repo.get_by(%{
token: token,
owner_app: Atom.to_string(owner_app)
})
|> Repo.preload(:user)
end
def get_by_token(_, _), do: nil
# `get_by_user/2` is private to prohibit direct auth token access,
# please use `authenticate/3` instead.
defp get_by_user(user_id, owner_app) when is_binary(user_id) and is_atom(owner_app) do
Repo.all(
from(
a in PreAuthToken,
join: u in User,
on: u.uuid == a.user_uuid,
where: u.id == ^user_id and a.owner_app == ^Atom.to_string(owner_app)
)
)
end
defp get_by_user(_, _), do: nil
# `insert/1` is private to prohibit direct auth token insertion,
# please use `generate/2` instead.
defp insert(attrs) do
%PreAuthToken{}
|> changeset(attrs)
|> Repo.insert_record_with_activity_log()
end
@doc """
Delete all PreAuthTokens associated with the user.
"""
def delete_for_user(user) do
Repo.delete_all(
from(
a in PreAuthToken,
where: a.user_uuid == ^user.uuid
)
)
:ok
end
end
| 25.765714 | 91 | 0.669106 |
03141908623533a83c86c91e5cc73d92e2af39ba | 5,776 | ex | Elixir | lib/sag_appointments/router.ex | pik694/sag-appointments-mas | 220ac4d0bd1bb1dfdffec823114ecb40fb5beb93 | [
"MIT"
] | null | null | null | lib/sag_appointments/router.ex | pik694/sag-appointments-mas | 220ac4d0bd1bb1dfdffec823114ecb40fb5beb93 | [
"MIT"
] | null | null | null | lib/sag_appointments/router.ex | pik694/sag-appointments-mas | 220ac4d0bd1bb1dfdffec823114ecb40fb5beb93 | [
"MIT"
] | null | null | null | defmodule SagAppointments.Router do
require Logger
use GenServer
@cleanup_period 100
@wait_threshold 1000
defstruct query_id: 0, queries: %{}
def start_link() do
GenServer.start_link(__MODULE__, nil, name: __MODULE__)
end
def get_available_slots(opts, all \\ false) do
if all do
GenServer.multi_call(__MODULE__, {:query_available, opts})
else
GenServer.call(__MODULE__, {:query_available, opts})
end
end
def get_visits_for_user(id, all \\ false) do
if all do
GenServer.multi_call(__MODULE__, {:query_by_patient, id})
else
GenServer.call(__MODULE__, {:query_by_patient, id})
end
end
def delete_vist(visit_id, all \\ false) do
if all do
GenServer.call(__MODULE__, {:delete_appointment, visit_id})
else
GenServer.call(__MODULE__, {:delete_appointment, visit_id})
end
end
def add_visit(patient_id, doctor_id, slot, all \\ false) do
if all do
GenServer.call(__MODULE__, {:add_appointment, doctor_id, patient_id, slot})
else
GenServer.call(__MODULE__, {:add_appointment, doctor_id, patient_id, slot})
end
end
def init(nil) do
Process.send_after(self(), :clean_stale_queries, @cleanup_period)
{:ok, %__MODULE__{}}
end
def handle_call(request, from, state) do
if filter_request(state, request) do
children = regions()
{state_incremented, query_id} = get_unique_query_id(state)
updated_state =
build_query(query_id, children, from, request)
|> exec()
|> update_state(state_incremented)
{:noreply, updated_state}
else
GenServer.reply(from, {:ok, :irrelevant})
{:noreply, state}
end
end
defp regions() do
Supervisor.which_children(SagAppointments.Supervisor)
|> Enum.filter(fn {id, _, _, _} -> is_integer(id) end)
|> Enum.map(fn {_, pid, _, _} -> Supervisor.which_children(pid) end)
|> List.flatten()
|> Enum.filter(fn {id, pid, _, _} -> id == :region && is_pid(pid) end)
|> Enum.map(&elem(&1, 1))
end
def handle_cast({:reply, query_id, from, response}, state) do
case handle_response(state, query_id, from, response) do
{:ok, updated_state, {to, response}} ->
Logger.info("Received response from #{inspect(from)}")
Logger.info("Sending response to #{inspect(to)}")
GenServer.reply(to, response)
{:noreply, updated_state}
{:ok, updated_state} ->
Logger.info("Received response from #{inspect(from)}")
{:noreply, updated_state}
_ ->
Logger.info("Received irrelevant response from #{inspect(from)}")
{:noreply, state}
end
end
def handle_info(:clean_stale_queries, state) do
past_threshold = Timex.shift(Timex.now(), milliseconds: -@wait_threshold)
Logger.debug("Cleaning stale queries")
{stale_queries, valid_queries} =
Enum.split_with(state.queries, fn {_, %{query_time: query_time}} ->
Timex.compare(query_time, past_threshold) < 1
end)
Logger.debug("Found #{length(stale_queries)} stale queries")
Enum.each(stale_queries, &send_response(state, &1))
Process.send_after(self(), :clean_stale_queries, @cleanup_period)
{:noreply, Map.put(state, :queries, Map.new(valid_queries))}
end
def send_response(state, {_query_id, query}) do
{to, response} = build_response(state, query)
GenServer.reply(to, response)
end
defp filter_request(%{name: region_name}, {:query_available, opts}) do
case Keyword.fetch(opts, :region) do
:error -> true
{:ok, region} -> region == region_name
end
end
defp filter_request(_, _), do: true
defp handle_response(state, query_id, from, response) do
with {:ok, query} <- Map.fetch(state.queries, query_id),
true <- Enum.member?(query.waiting_for_response, from) do
updated_query =
query
|> Map.update!(:waiting_for_response, &List.delete(&1, from))
|> Map.update!(:responses, &:erlang.++(&1, [response]))
if should_response?(updated_query) do
updated_state = Map.update!(state, :queries, &Map.delete(&1, query_id))
{:ok, updated_state, build_response(state, updated_query)}
else
updated_state = Map.update!(state, :queries, &Map.put(&1, query_id, updated_query))
{:ok, updated_state}
end
end
end
defp should_response?(query) do
request = elem(query.request, 0)
Enum.empty?(query.waiting_for_response) || should_response?(request, query)
end
defp should_response?(:add_appointment, query) do
length(query.responses) > 0
end
defp should_response?(_, _), do: false
defp build_response(state, %{responses: responses, from: from}) do
filtered_responses =
responses
|> Enum.filter(fn
:irrelevant -> false
_ -> true
end)
|> Enum.filter(fn
%{responses: []} -> false
_ -> true
end)
{from, {:ok, do_build_response(filtered_responses, state)}}
end
defp do_build_response([], _), do: :irrelevant
defp do_build_response(responses, _) do
responses
end
defp build_query(query_id, children, from, request) do
{query_id,
%{
waiting_for_response: children,
query_time: Timex.now(),
responses: [],
request: request,
from: from
}}
end
defp exec({query_id, query}) do
query.waiting_for_response
|> Enum.each(&GenServer.cast(&1, {{query_id, self()}, query.request}))
{query_id, query}
end
defp update_state({query_id, query}, state) do
Map.update!(state, :queries, &Map.put_new(&1, query_id, query))
end
defp get_unique_query_id(state) do
{Map.update!(state, :query_id, &:erlang.+(&1, 1)), Map.fetch!(state, :query_id)}
end
end
| 28.594059 | 91 | 0.651835 |
031419227869a55bf3d195abf6bc019a0479681f | 1,396 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/machine_type_scratch_disks.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/compute/lib/google_api/compute/v1/model/machine_type_scratch_disks.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/compute/lib/google_api/compute/v1/model/machine_type_scratch_disks.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Model.MachineTypeScratchDisks do
@moduledoc """
## Attributes
* `diskGb` (*type:* `integer()`, *default:* `nil`) - Size of the scratch disk, defined in GB.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:diskGb => integer() | nil
}
field(:diskGb)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.MachineTypeScratchDisks do
def decode(value, options) do
GoogleApi.Compute.V1.Model.MachineTypeScratchDisks.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.MachineTypeScratchDisks do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 29.702128 | 97 | 0.735673 |
031447ea6de61c3a32f1fc4690dd5e5c1079746d | 3,628 | exs | Elixir | test/pile/extras/enum_x_test.exs | brownt23/crit19 | c45c7b3ae580c193168d83144da0eeb9bc91c8a9 | [
"MIT"
] | 6 | 2019-07-16T19:31:23.000Z | 2021-06-05T19:01:05.000Z | test/pile/extras/enum_x_test.exs | brownt23/crit19 | c45c7b3ae580c193168d83144da0eeb9bc91c8a9 | [
"MIT"
] | null | null | null | test/pile/extras/enum_x_test.exs | brownt23/crit19 | c45c7b3ae580c193168d83144da0eeb9bc91c8a9 | [
"MIT"
] | 3 | 2020-02-24T23:38:27.000Z | 2020-08-01T23:50:17.000Z | defmodule EnumXTest do
use ExUnit.Case, async: true
def equals(n) do
fn x -> x == n end
end
describe "extract" do
test "splits into an element and the remainder of the list" do
assert {1, [2, 3]} = EnumX.extract([1, 2, 3], equals(1))
assert {2, [1, 3]} = EnumX.extract([1, 2, 3], equals(2))
assert {3, [1, 2]} = EnumX.extract([1, 2, 3], equals(3))
end
test "works with singleton list" do
assert {1, []} = EnumX.extract([1], equals(1))
end
test "caller responsibility that element be in the list exactly once" do
catch_error EnumX.extract([1, 2, 3], equals(999))
catch_error EnumX.extract([1, 2, 3, 2], equals(2))
end
end
test "sort_by_id" do
input = [%{id: 3}, %{id: 1}, %{id: 2}]
expected = [%{id: 1}, %{id: 2}, %{id: 3}]
assert EnumX.sort_by_id(input) == expected
end
test "extract ids (and sort them)" do
input = [%{id: 3}, %{id: 1}, %{id: 2}]
expected = [1, 2, 3]
assert EnumX.ids(input) == expected
end
test "pairs" do
input = [ %{name: "bossie", id: 1, extra: "stuff"},
%{name: "jake", id: 2, extra: "stuff"}
]
expected = [{"bossie", 1}, {"jake", 2}]
assert expected == EnumX.pairs(input, :name, :id)
end
test "find_id" do
bossie = %{name: "bossie", id: 1, extra: "stuff"}
input = [ bossie,
%{name: "jake", id: 2, extra: "stuff"}
]
assert bossie == EnumX.find_by_id(input, 1)
end
test "to_id_map" do
input = [ %{name: "bossie", id: 1, extra: "stuff"},
%{name: "jake", id: 2, extra: "stuff"}
]
expected = %{1 => "bossie", 2 => "jake"}
assert expected == EnumX.to_id_map(input, :name)
end
describe "cross_product" do
test "without optional args" do
actual = EnumX.cross_product([1, 2, 3], ["a", "b", "c"])
expected = [
{1, "a"}, {1, "b"}, {1, "c"},
{2, "a"}, {2, "b"}, {2, "c"},
{3, "a"}, {3, "b"}, {3, "c"}
]
assert actual == expected
end
test "with functional optional args" do
actual =
EnumX.cross_product([1, 2, 3], ["a", "b", "c"],
&(&1+1), &String.upcase/1)
expected = [
{2, "A"}, {2, "B"}, {2, "C"},
{3, "A"}, {3, "B"}, {3, "C"},
{4, "A"}, {4, "B"}, {4, "C"}
]
assert actual == expected
end
test "with atoms for structure access" do
actual =
EnumX.cross_product(
[%{id: 1}, %{id: 2}, %{id: 3}],
[%{name: "A"}, %{name: "B"}, %{name: "C"}],
:id, :name)
expected = [
{1, "A"}, {1, "B"}, {1, "C"},
{2, "A"}, {2, "B"}, {2, "C"},
{3, "A"}, {3, "B"}, {3, "C"}
]
assert actual == expected
end
test "filter_by_ids" do
actual =
EnumX.filter_by_ids([%{id: 1}, %{id: 2}, %{id: 3}], [1,3])
assert actual == [%{id: 1}, %{id: 3}]
end
end
describe "pour_struct" do
defmodule Smaller do
defstruct common: nil
end
defmodule Larger do
defstruct common: nil, unique: "unique default"
end
test "from larger to smaller" do
actual = EnumX.pour_into(%Larger{common: "copied"}, Smaller)
assert actual == %Smaller{common: "copied"}
end
test "from smaller to larger" do
actual = EnumX.pour_into(%Smaller{common: "copied"}, Larger)
assert actual == %Larger{common: "copied", unique: "unique default"}
end
end
test "has duplicates" do
refute EnumX.has_duplicates?([1, 2, 3])
assert EnumX.has_duplicates?([1, 2, 1])
end
end
| 26.676471 | 76 | 0.504686 |
031454874b084370fd2277146148afd9cb0ad6a2 | 1,577 | exs | Elixir | projects/api/test/margaret/stories/story_test.exs | strattadb/margaret | dde5d7b42f6d9b4d320069a0117136dae03b13b5 | [
"MIT"
] | 82 | 2017-11-06T01:00:55.000Z | 2020-12-09T10:35:29.000Z | projects/api/test/margaret/stories/story_test.exs | dbstratta/margaret | dde5d7b42f6d9b4d320069a0117136dae03b13b5 | [
"MIT"
] | 98 | 2017-11-06T22:57:32.000Z | 2020-07-03T04:46:39.000Z | projects/api/test/margaret/stories/story_test.exs | strattadb/margaret | dde5d7b42f6d9b4d320069a0117136dae03b13b5 | [
"MIT"
] | 10 | 2017-11-16T05:31:58.000Z | 2020-10-29T18:02:35.000Z | defmodule Margaret.StoryTest do
use Margaret.DataCase
@valid_attrs %{
content: %{"blocks" => [%{"text" => "test"}]},
audience: :all,
published_at: nil,
license: :all_rights_reserved
}
describe "changeset/1" do
test "is valid when the attributes are valid" do
author = Factory.insert(:user)
attrs =
@valid_attrs
|> Map.put(:author_id, author.id)
%Changeset{valid?: valid_changeset?} = Story.changeset(attrs)
assert valid_changeset?
end
test "is invalid when the attributes are invalid" do
%Changeset{valid?: valid_changeset?} = Story.changeset(@valid_attrs)
refute valid_changeset?
end
test "puts the unique_hash in data" do
author = Factory.insert(:user)
attrs =
@valid_attrs
|> Map.put(:author_id, author.id)
changeset = Story.changeset(attrs)
assert {:ok, unique_hash} = fetch_change(changeset, :unique_hash)
assert is_binary(unique_hash)
end
end
describe "update_changeset/2" do
test "is valid when the attributes are valid" do
story = Factory.insert(:story)
%Changeset{valid?: valid_changeset?} = Story.update_changeset(story, @valid_attrs)
assert valid_changeset?
end
test "is invalid when the attributes are invalid" do
story = Factory.insert(:story)
attrs =
@valid_attrs
|> Map.put(:license, :nonexistent_license)
%Changeset{valid?: valid_changeset?} = Story.update_changeset(story, attrs)
refute valid_changeset?
end
end
end
| 23.893939 | 88 | 0.651237 |
0314938b91af6e01e42b683d0286f26448ca5b94 | 8,070 | exs | Elixir | test/xgit/tree_test.exs | scouten/xgit | 0e2f849c83cdf39a9249b319d63ff3682c482c2f | [
"Apache-2.0"
] | 94 | 2019-05-28T05:29:54.000Z | 2022-02-18T20:03:20.000Z | test/xgit/tree_test.exs | scouten/xgit | 0e2f849c83cdf39a9249b319d63ff3682c482c2f | [
"Apache-2.0"
] | 156 | 2019-05-26T03:27:24.000Z | 2020-10-08T05:44:26.000Z | test/xgit/tree_test.exs | scouten/redo | 0e2f849c83cdf39a9249b319d63ff3682c482c2f | [
"Apache-2.0"
] | 5 | 2019-05-28T16:35:55.000Z | 2021-06-16T14:25:17.000Z | defmodule Xgit.TreeTest do
use ExUnit.Case, async: true
alias Xgit.Object
alias Xgit.Repository.Storage
alias Xgit.Test.OnDiskRepoTestCase
alias Xgit.Tree
alias Xgit.Tree.Entry
import FolderDiff
@valid_entry %Entry{
name: 'hello.txt',
object_id: "7919e8900c3af541535472aebd56d44222b7b3a3",
mode: 0o100644
}
@valid %Tree{
entries: [@valid_entry]
}
describe "valid?/1" do
test "happy path: valid entry" do
assert Tree.valid?(@valid)
end
test "not a Tree struct" do
refute Tree.valid?(%{})
refute Tree.valid?("tree")
end
@invalid_mods [
entries: [Map.put(@valid_entry, :name, "binary not allowed here")],
entries: [Map.put(@valid_entry, :name, 'no/slashes')],
entries: [42]
]
test "invalid entries" do
Enum.each(@invalid_mods, fn {key, value} ->
invalid = Map.put(@valid, key, value)
refute(
Tree.valid?(invalid),
"incorrectly accepted entry with :#{key} set to #{inspect(value)}"
)
end)
end
test "sorted (name)" do
assert Tree.valid?(%Tree{
entries: [
Map.put(@valid_entry, :name, 'abc'),
Map.put(@valid_entry, :name, 'abd'),
Map.put(@valid_entry, :name, 'abe')
]
})
end
test "not sorted (name)" do
refute Tree.valid?(%Tree{
entries: [
Map.put(@valid_entry, :name, 'abc'),
Map.put(@valid_entry, :name, 'abf'),
Map.put(@valid_entry, :name, 'abe')
]
})
end
end
describe "from_object/1" do
setup do
{:ok, OnDiskRepoTestCase.repo!()}
end
defp write_git_tree_and_read_xgit_tree_entries(xgit_repo, xgit_path) do
{output, 0} = System.cmd("git", ["write-tree", "--missing-ok"], cd: xgit_path)
tree_id = String.trim(output)
assert {:ok, %Object{} = object} = Storage.get_object(xgit_repo, tree_id)
assert {:ok, %Tree{entries: entries} = _tree} = Tree.from_object(object)
entries
end
test "empty tree", %{xgit_repo: xgit_repo, xgit_path: xgit_path} do
assert write_git_tree_and_read_xgit_tree_entries(xgit_repo, xgit_path) == []
end
test "tree with one entry", %{xgit_repo: xgit_repo, xgit_path: xgit_path} do
{_output, 0} =
System.cmd(
"git",
[
"update-index",
"--add",
"--cacheinfo",
"100644",
"18832d35117ef2f013c4009f5b2128dfaeff354f",
"hello.txt"
],
cd: xgit_path
)
assert write_git_tree_and_read_xgit_tree_entries(xgit_repo, xgit_path) == [
%Entry{
name: 'hello.txt',
object_id: "18832d35117ef2f013c4009f5b2128dfaeff354f",
mode: 0o100644
}
]
end
test "tree with multiple entries", %{xgit_repo: xgit_repo, xgit_path: xgit_path} do
{_output, 0} =
System.cmd(
"git",
[
"update-index",
"--add",
"--cacheinfo",
"100644",
"18832d35117ef2f013c4009f5b2128dfaeff354f",
"hello.txt"
],
cd: xgit_path
)
{_output, 0} =
System.cmd(
"git",
[
"update-index",
"--add",
"--cacheinfo",
"100755",
"d670460b4b4aece5915caf5c68d12f560a9fe3e4",
"test_content.txt"
],
cd: xgit_path
)
assert write_git_tree_and_read_xgit_tree_entries(xgit_repo, xgit_path) == [
%Entry{
name: 'hello.txt',
object_id: "18832d35117ef2f013c4009f5b2128dfaeff354f",
mode: 0o100644
},
%Entry{
name: 'test_content.txt',
object_id: "d670460b4b4aece5915caf5c68d12f560a9fe3e4",
mode: 0o100755
}
]
end
test "object is not a tree" do
object = %Object{
type: :blob,
content: 'test content\n',
size: 13,
id: "d670460b4b4aece5915caf5c68d12f560a9fe3e4"
}
assert {:error, :not_a_tree} = Tree.from_object(object)
end
test "object is an invalid tree (ends after file mode)" do
object = %Object{
type: :tree,
size: 42,
id: "d670460b4b4aece5915caf5c68d12f560a9fe3e4",
content: '100644'
}
assert {:error, :invalid_format} = Tree.from_object(object)
end
test "object is an invalid tree (invalid file mode)" do
object = %Object{
type: :tree,
size: 42,
id: "d670460b4b4aece5915caf5c68d12f560a9fe3e4",
content: '100648 A 12345678901234567890'
}
assert {:error, :invalid_format} = Tree.from_object(object)
end
test "object is an invalid tree (invalid file mode, leading 0)" do
object = %Object{
type: :tree,
size: 42,
id: "d670460b4b4aece5915caf5c68d12f560a9fe3e4",
content: '0100644 A 12345678901234567890'
}
assert {:error, :invalid_format} = Tree.from_object(object)
end
test "object is an invalid tree (not properly sorted)" do
object = %Object{
type: :tree,
size: 42,
id: "d670460b4b4aece5915caf5c68d12f560a9fe3e4",
content:
'100644 B' ++
Enum.map(0..20, fn x -> x end) ++ '100644 A' ++ Enum.map(0..20, fn x -> x end)
}
assert {:error, :invalid_tree} = Tree.from_object(object)
end
test "object is a badly-formatted tree" do
object = %Object{
type: :tree,
size: 42,
id: "d670460b4b4aece5915caf5c68d12f560a9fe3e4",
content: '100644 A' ++ Enum.map(0..20, fn _ -> 0 end)
}
assert {:error, :invalid_format} = Tree.from_object(object)
end
end
describe "to_object/1" do
test "empty tree" do
assert_same_output(
fn _git_dir -> nil end,
%Tree{entries: []}
)
end
test "tree with two entries" do
assert_same_output(
fn git_dir ->
{_output, 0} =
System.cmd(
"git",
[
"update-index",
"--add",
"--cacheinfo",
"100644",
"7919e8900c3af541535472aebd56d44222b7b3a3",
"hello.txt"
],
cd: git_dir
)
{_output, 0} =
System.cmd(
"git",
[
"update-index",
"--add",
"--cacheinfo",
"100755",
"4a43a489f107e7ece679950f53567c648038449a",
"xyzzy.sh"
],
cd: git_dir
)
end,
%Tree{
entries: [
%Entry{
name: 'hello.txt',
object_id: "7919e8900c3af541535472aebd56d44222b7b3a3",
mode: 0o100644
},
%Entry{
name: 'xyzzy.sh',
object_id: "4a43a489f107e7ece679950f53567c648038449a",
mode: 0o100755
}
]
}
)
end
defp assert_same_output(git_ref_fn, xgit_tree) do
%{xgit_path: ref} = OnDiskRepoTestCase.repo!()
%{xgit_path: xgit, xgit_repo: repo} = OnDiskRepoTestCase.repo!()
git_ref_fn.(ref)
{output, 0} = System.cmd("git", ["write-tree", "--missing-ok"], cd: ref)
content_id = String.trim(output)
tree_object = Tree.to_object(xgit_tree)
assert Object.valid?(tree_object)
assert :ok = Object.check(tree_object)
assert content_id == tree_object.id
:ok = Storage.put_loose_object(repo, tree_object)
assert_folders_are_equal(
Path.join([ref, ".git", "objects"]),
Path.join([xgit, ".git", "objects"])
)
end
end
end
| 26.459016 | 90 | 0.524907 |
031495cb8fd400d012a75ae3ac0a997f03396258 | 7,466 | ex | Elixir | clients/private_ca/lib/google_api/private_ca/v1beta1/model/google_api_servicecontrol_v1_operation.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/private_ca/lib/google_api/private_ca/v1beta1/model/google_api_servicecontrol_v1_operation.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/private_ca/lib/google_api/private_ca/v1beta1/model/google_api_servicecontrol_v1_operation.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.PrivateCA.V1beta1.Model.GoogleApiServicecontrolV1Operation do
@moduledoc """
Represents information regarding an operation.
## Attributes
* `consumerId` (*type:* `String.t`, *default:* `nil`) - Identity of the consumer who is using the service. This field should be filled in for the operations initiated by a consumer, but not for service-initiated operations that are not related to a specific consumer. - This can be in one of the following formats: - project:PROJECT_ID, - project`_`number:PROJECT_NUMBER, - projects/PROJECT_ID or PROJECT_NUMBER, - folders/FOLDER_NUMBER, - organizations/ORGANIZATION_NUMBER, - api`_`key:API_KEY.
* `endTime` (*type:* `DateTime.t`, *default:* `nil`) - End time of the operation. Required when the operation is used in ServiceController.Report, but optional when the operation is used in ServiceController.Check.
* `extensions` (*type:* `list(map())`, *default:* `nil`) - Unimplemented.
* `importance` (*type:* `String.t`, *default:* `nil`) - DO NOT USE. This is an experimental field.
* `labels` (*type:* `map()`, *default:* `nil`) - Labels describing the operation. Only the following labels are allowed: - Labels describing monitored resources as defined in the service configuration. - Default labels of metric values. When specified, labels defined in the metric value override these default. - The following labels defined by Google Cloud Platform: - `cloud.googleapis.com/location` describing the location where the operation happened, - `servicecontrol.googleapis.com/user_agent` describing the user agent of the API request, - `servicecontrol.googleapis.com/service_agent` describing the service used to handle the API request (e.g. ESP), - `servicecontrol.googleapis.com/platform` describing the platform where the API is served, such as App Engine, Compute Engine, or Kubernetes Engine.
* `logEntries` (*type:* `list(GoogleApi.PrivateCA.V1beta1.Model.GoogleApiServicecontrolV1LogEntry.t)`, *default:* `nil`) - Represents information to be logged.
* `metricValueSets` (*type:* `list(GoogleApi.PrivateCA.V1beta1.Model.GoogleApiServicecontrolV1MetricValueSet.t)`, *default:* `nil`) - Represents information about this operation. Each MetricValueSet corresponds to a metric defined in the service configuration. The data type used in the MetricValueSet must agree with the data type specified in the metric definition. Within a single operation, it is not allowed to have more than one MetricValue instances that have the same metric names and identical label value combinations. If a request has such duplicated MetricValue instances, the entire request is rejected with an invalid argument error.
* `operationId` (*type:* `String.t`, *default:* `nil`) - Identity of the operation. This must be unique within the scope of the service that generated the operation. If the service calls Check() and Report() on the same operation, the two calls should carry the same id. UUID version 4 is recommended, though not required. In scenarios where an operation is computed from existing information and an idempotent id is desirable for deduplication purpose, UUID version 5 is recommended. See RFC 4122 for details.
* `operationName` (*type:* `String.t`, *default:* `nil`) - Fully qualified name of the operation. Reserved for future use.
* `quotaProperties` (*type:* `GoogleApi.PrivateCA.V1beta1.Model.GoogleApiServicecontrolV1QuotaProperties.t`, *default:* `nil`) - Represents the properties needed for quota check. Applicable only if this operation is for a quota check request. If this is not specified, no quota check will be performed.
* `resources` (*type:* `list(GoogleApi.PrivateCA.V1beta1.Model.GoogleApiServicecontrolV1ResourceInfo.t)`, *default:* `nil`) - The resources that are involved in the operation. The maximum supported number of entries in this field is 100.
* `startTime` (*type:* `DateTime.t`, *default:* `nil`) - Required. Start time of the operation.
* `traceSpans` (*type:* `list(GoogleApi.PrivateCA.V1beta1.Model.GoogleApiServicecontrolV1TraceSpan.t)`, *default:* `nil`) - Unimplemented. A list of Cloud Trace spans. The span names shall contain the id of the destination project which can be either the produce or the consumer project.
* `userLabels` (*type:* `map()`, *default:* `nil`) - Private Preview. This feature is only available for approved services. User defined labels for the resource that this operation is associated with.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:consumerId => String.t(),
:endTime => DateTime.t(),
:extensions => list(map()),
:importance => String.t(),
:labels => map(),
:logEntries =>
list(GoogleApi.PrivateCA.V1beta1.Model.GoogleApiServicecontrolV1LogEntry.t()),
:metricValueSets =>
list(GoogleApi.PrivateCA.V1beta1.Model.GoogleApiServicecontrolV1MetricValueSet.t()),
:operationId => String.t(),
:operationName => String.t(),
:quotaProperties =>
GoogleApi.PrivateCA.V1beta1.Model.GoogleApiServicecontrolV1QuotaProperties.t(),
:resources =>
list(GoogleApi.PrivateCA.V1beta1.Model.GoogleApiServicecontrolV1ResourceInfo.t()),
:startTime => DateTime.t(),
:traceSpans =>
list(GoogleApi.PrivateCA.V1beta1.Model.GoogleApiServicecontrolV1TraceSpan.t()),
:userLabels => map()
}
field(:consumerId)
field(:endTime, as: DateTime)
field(:extensions, type: :list)
field(:importance)
field(:labels, type: :map)
field(:logEntries,
as: GoogleApi.PrivateCA.V1beta1.Model.GoogleApiServicecontrolV1LogEntry,
type: :list
)
field(:metricValueSets,
as: GoogleApi.PrivateCA.V1beta1.Model.GoogleApiServicecontrolV1MetricValueSet,
type: :list
)
field(:operationId)
field(:operationName)
field(:quotaProperties,
as: GoogleApi.PrivateCA.V1beta1.Model.GoogleApiServicecontrolV1QuotaProperties
)
field(:resources,
as: GoogleApi.PrivateCA.V1beta1.Model.GoogleApiServicecontrolV1ResourceInfo,
type: :list
)
field(:startTime, as: DateTime)
field(:traceSpans,
as: GoogleApi.PrivateCA.V1beta1.Model.GoogleApiServicecontrolV1TraceSpan,
type: :list
)
field(:userLabels, type: :map)
end
defimpl Poison.Decoder, for: GoogleApi.PrivateCA.V1beta1.Model.GoogleApiServicecontrolV1Operation do
def decode(value, options) do
GoogleApi.PrivateCA.V1beta1.Model.GoogleApiServicecontrolV1Operation.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.PrivateCA.V1beta1.Model.GoogleApiServicecontrolV1Operation do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 66.070796 | 815 | 0.743236 |
031523508049c0430ca310e4a76073e3d7929685 | 1,750 | ex | Elixir | lib/small_url_web/controllers/short_link_controller.ex | AlexJuca/small-url | 25f4828ac3566a435b15e6b9581e1e1e508d6650 | [
"Apache-2.0"
] | 7 | 2021-06-28T14:43:10.000Z | 2021-09-21T21:00:30.000Z | lib/small_url_web/controllers/short_link_controller.ex | AlexJuca/small-url | 25f4828ac3566a435b15e6b9581e1e1e508d6650 | [
"Apache-2.0"
] | 2 | 2021-06-30T21:43:06.000Z | 2021-07-01T16:40:10.000Z | lib/small_url_web/controllers/short_link_controller.ex | AlexJuca/small-url | 25f4828ac3566a435b15e6b9581e1e1e508d6650 | [
"Apache-2.0"
] | null | null | null | defmodule SmallUrlWeb.ShortLinkController do
use SmallUrlWeb, :controller
alias SmallUrl.Links
alias SmallUrl.Links.ShortLinks
alias SmallUrl.Repo
def redirect_to_original_url(conn, %{"key" => key}) do
shortlink = Links.get_short_link_by_key(key)
case shortlink do
nil ->
conn
|> put_view(SmallUrlWeb.ErrorView)
|> render("404.html")
shortlink ->
IO.inspect(shortlink)
shortlink
|> register_click
conn
|> redirect(external: Map.get(shortlink, :url))
end
end
def show_link_analytics(conn, %{"key" => key}) do
case Links.get_short_link_by_key(key) do
%ShortLinks{} = _ ->
conn
|> gather_stats(key)
nil ->
conn
|> resp(404, "Not found")
end
end
def gather_stats(conn, key) do
stats = SmallUrl.Stats.Click.gather_stats(key)
conn
|> put_view(SmallUrlWeb.LinkAnalyticsView)
|> render("analytics.json", link_info: stats)
end
defp register_click(%ShortLinks{} = link) do
attrs = %{
:key => link.key,
:click_date => DateTime.utc_now() |> DateTime.truncate(:second),
:shortlinks_id => link.id
}
click = Ecto.build_assoc(link, :clicks, attrs)
Repo.insert!(click)
|> broadcast(:short_link_clicked)
end
def subscribe(key) do
Phoenix.PubSub.subscribe(SmallUrl.PubSub, "short_link:#{key}")
end
# def subscribe do
# Phoenix.PubSub.subscribe(SmallUrl.PubSub, "short_link")
# end
def broadcast({:error, _reason} = error, _event), do: error
def broadcast(click, _event) do
key = Map.get(click, :key)
Phoenix.PubSub.broadcast(SmallUrl.PubSub, "short_link:#{key}", %{event: click})
{:ok, click}
end
end
| 23.026316 | 83 | 0.634286 |
03152f04ab2b159f79ad1b2237d398a1a75d8634 | 192 | ex | Elixir | test/support/mocks.ex | alesshh/acme-bank | 0f885dc12614cba4c8f3a46c04b714f3ada8bb9b | [
"MIT"
] | null | null | null | test/support/mocks.ex | alesshh/acme-bank | 0f885dc12614cba4c8f3a46c04b714f3ada8bb9b | [
"MIT"
] | null | null | null | test/support/mocks.ex | alesshh/acme-bank | 0f885dc12614cba4c8f3a46c04b714f3ada8bb9b | [
"MIT"
] | null | null | null | Mox.defmock(AcmeBank.AccountsMock, for: AcmeBank.AccountsBehaviour)
Mox.defmock(AcmeBank.WalletMock, for: AcmeBank.WalletBehaviour)
Mox.defmock(AcmeBank.AuthMock, for: AcmeBank.AuthBehaviour)
| 48 | 67 | 0.84375 |
03154596d8ed542901169f2194a07571a3b92087 | 1,170 | ex | Elixir | apps/web/lib/web/channels/user_socket.ex | joshnuss/ornia | 6a4c69a761b41ba0bcfd1c30f54dd2ccc92e5ead | [
"MIT"
] | 1 | 2020-01-14T23:19:25.000Z | 2020-01-14T23:19:25.000Z | apps/web/lib/web/channels/user_socket.ex | joshnuss/ornia | 6a4c69a761b41ba0bcfd1c30f54dd2ccc92e5ead | [
"MIT"
] | null | null | null | apps/web/lib/web/channels/user_socket.ex | joshnuss/ornia | 6a4c69a761b41ba0bcfd1c30f54dd2ccc92e5ead | [
"MIT"
] | null | null | null | defmodule Ornia.Web.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", Ornia.Web.RoomChannel
## Transports
transport :websocket, Phoenix.Transports.WebSocket
# transport :longpoll, Phoenix.Transports.LongPoll
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# Ornia.Web.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 30.789474 | 83 | 0.7 |
0315543aee9a31607e0ce65a2590086c2194e687 | 666 | ex | Elixir | elixir/lib/junks/content/note_tag.ex | crappygraphix/junks | a56b5b86e1a5cbcf0a71fc44d6292d6bcd525b76 | [
"Apache-2.0"
] | 1 | 2019-04-29T17:46:44.000Z | 2019-04-29T17:46:44.000Z | elixir/lib/junks/content/note_tag.ex | crappygraphix/junks | a56b5b86e1a5cbcf0a71fc44d6292d6bcd525b76 | [
"Apache-2.0"
] | null | null | null | elixir/lib/junks/content/note_tag.ex | crappygraphix/junks | a56b5b86e1a5cbcf0a71fc44d6292d6bcd525b76 | [
"Apache-2.0"
] | null | null | null | defmodule Junks.Content.NoteTag do
use Ecto.Schema
import Ecto.Changeset
alias Junks.Errors
alias Junks.Content.Note
alias Junks.Content.Tag
schema "note_tags" do
belongs_to :note, Note
belongs_to :tag, Tag
timestamps()
end
@doc false
def cs_create(note_tag, attrs = %{tag_id: _, note_id: _}) do
note_tag
|> cast(attrs, [:tag_id, :note_id])
|> validate_required([:tag_id, :note_id], message: Errors.missing)
|> foreign_key_constraint(:note_id, message: Errors.no_fk)
|> foreign_key_constraint(:tag_id, message: Errors.no_fk)
|> unique_constraint(:relation, name: :note_tag, message: Errors.exists)
end
end
| 25.615385 | 76 | 0.702703 |
031567c36c3e43ee42952e6eb083b6b984b097e0 | 1,155 | exs | Elixir | test/support/conn_case.exs | henb/coherence | 725247353bad46df464caffa12b9ea2788fe774f | [
"MIT"
] | 2 | 2018-01-19T06:12:16.000Z | 2018-03-12T07:17:17.000Z | test/support/conn_case.exs | henb/coherence | 725247353bad46df464caffa12b9ea2788fe774f | [
"MIT"
] | null | null | null | test/support/conn_case.exs | henb/coherence | 725247353bad46df464caffa12b9ea2788fe774f | [
"MIT"
] | 1 | 2019-09-11T10:21:21.000Z | 2019-09-11T10:21:21.000Z | defmodule TestCoherence.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
imports other functionality to make it easier
to build and query models.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
alias TestCoherence.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query, only: [from: 1, from: 2]
import TestCoherenceWeb.Router.Helpers
import TestCoherence.TestHelpers
alias Coherence.Config
# The default endpoint for testing
@endpoint TestCoherenceWeb.Endpoint
end
end
setup tags do
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.checkout(TestCoherence.Repo)
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 24.0625 | 60 | 0.71342 |
03158f19b9bc058fc7d19da61b3cdeffdc5995c5 | 2,332 | ex | Elixir | lib/poison/poison.ex | AnilRedshift/wand-core | c3f591b25f89221cd67ad1ea683c80f375af2360 | [
"BSD-3-Clause"
] | 1 | 2018-07-01T05:31:04.000Z | 2018-07-01T05:31:04.000Z | lib/poison/poison.ex | AnilRedshift/wand-core | c3f591b25f89221cd67ad1ea683c80f375af2360 | [
"BSD-3-Clause"
] | 2 | 2018-06-29T21:38:57.000Z | 2018-07-13T09:54:16.000Z | lib/poison/poison.ex | AnilRedshift/wand-core | c3f591b25f89221cd67ad1ea683c80f375af2360 | [
"BSD-3-Clause"
] | null | null | null | defmodule WandCore.Poison do
@moduledoc false
alias WandCore.Poison.Encoder
alias WandCore.Poison.Decode
alias WandCore.Poison.Parser
@doc """
Encode a value to JSON.
iex> WandCore.Poison.encode([1, 2, 3])
{:ok, "[1,2,3]"}
"""
@spec encode(Encoder.t, Keyword.t) :: {:ok, iodata} | {:ok, String.t}
| {:error, {:invalid, any}}
def encode(value, options \\ []) do
{:ok, encode!(value, options)}
rescue
exception in [WandCore.Poison.EncodeError] ->
{:error, {:invalid, exception.value}}
end
@doc """
Encode a value to JSON as iodata.
iex> WandCore.Poison.encode_to_iodata([1, 2, 3])
{:ok, [91, ["1", 44, "2", 44, "3"], 93]}
"""
@spec encode_to_iodata(Encoder.t, Keyword.t) :: {:ok, iodata}
| {:error, {:invalid, any}}
def encode_to_iodata(value, options \\ []) do
encode(value, [iodata: true] ++ options)
end
@doc """
Encode a value to JSON, raises an exception on error.
iex> WandCore.Poison.encode!([1, 2, 3])
"[1,2,3]"
"""
@spec encode!(Encoder.t, Keyword.t) :: iodata | no_return
def encode!(value, options \\ []) do
iodata = Encoder.encode(value, options)
unless options[:iodata] do
iodata |> IO.iodata_to_binary
else
iodata
end
end
@doc """
Encode a value to JSON as iodata, raises an exception on error.
iex> WandCore.Poison.encode_to_iodata!([1, 2, 3])
[91, ["1", 44, "2", 44, "3"], 93]
"""
@spec encode_to_iodata!(Encoder.t, Keyword.t) :: iodata | no_return
def encode_to_iodata!(value, options \\ []) do
encode!(value, [iodata: true] ++ options)
end
@doc """
Decode JSON to a value.
iex> WandCore.Poison.decode("[1,2,3]")
{:ok, [1, 2, 3]}
"""
@spec decode(iodata, Keyword.t) :: {:ok, Parser.t} | {:error, :invalid}
| {:error, {:invalid, String.t}}
def decode(iodata, options \\ []) do
case Parser.parse(iodata, options) do
{:ok, value} -> {:ok, Decode.decode(value, options)}
error -> error
end
end
@doc """
Decode JSON to a value, raises an exception on error.
iex> WandCore.Poison.decode!("[1,2,3]")
[1, 2, 3]
"""
@spec decode!(iodata, Keyword.t) :: Parser.t | no_return
def decode!(iodata, options \\ []) do
Decode.decode(Parser.parse!(iodata, options), options)
end
end
| 26.804598 | 73 | 0.596484 |
031590f87bd0831a0ee427feea71ddd68d173b58 | 6,571 | ex | Elixir | apps/api_web/lib/api_web/api_controller_helpers.ex | fjlanasa/api | c39bc393aea572bfb81754b2ea1adf9dda9ce24a | [
"MIT"
] | 62 | 2019-01-17T12:34:39.000Z | 2022-03-20T21:49:47.000Z | apps/api_web/lib/api_web/api_controller_helpers.ex | fjlanasa/api | c39bc393aea572bfb81754b2ea1adf9dda9ce24a | [
"MIT"
] | 375 | 2019-02-13T15:30:50.000Z | 2022-03-30T18:50:41.000Z | apps/api_web/lib/api_web/api_controller_helpers.ex | fjlanasa/api | c39bc393aea572bfb81754b2ea1adf9dda9ce24a | [
"MIT"
] | 14 | 2019-01-16T19:35:57.000Z | 2022-02-26T18:55:54.000Z | defmodule ApiWeb.ApiControllerHelpers do
@moduledoc """
Helpers for Api Controllers. Requires an index_data/2 and show_data/2
callback to return data.
"""
@callback index_data(Plug.Conn.t(), map) :: any
@callback show_data(Plug.Conn.t(), map) :: any
import Plug.Conn, only: [assign: 3, put_status: 2]
import Phoenix.Controller, only: [render: 3, put_view: 2, get_format: 1]
alias ApiWeb.ApiControllerHelpers
alias ApiWeb.Plugs.Deadline
alias State.Pagination.Offsets
# # of milliseconds after which to terminate the request
@deadline_ms 10_000
defmacro __using__(_) do
quote location: :keep do
@behaviour ApiControllerHelpers
defdelegate split_include(conn, opts), to: ApiControllerHelpers
plug(:split_include)
plug(ApiWeb.Plugs.ModifiedSinceHandler, caller: __MODULE__)
plug(ApiWeb.Plugs.RateLimiter)
def index(conn, params), do: ApiControllerHelpers.index(__MODULE__, conn, params)
def show(conn, params), do: ApiControllerHelpers.show(__MODULE__, conn, params)
def state_module, do: nil
defoverridable index: 2, show: 2, state_module: 0
end
end
def index(module, conn, params) do
conn
|> get_format()
|> index_for_format()
|> apply(:call, [conn, module, params])
end
def call(conn, module, params) do
conn = Deadline.set(conn, @deadline_ms)
data = module.index_data(conn, params)
render_json_api(conn, params, data)
end
def show(module, conn, params) do
conn
|> get_format()
|> show_for_format(module, conn, params)
end
def show_for_format("event-stream", _module, conn, params) do
render_json_api(
conn,
params,
{:error, :not_acceptable,
"Streaming not supported for an individual resource. Instead list resources and filter by ID."}
)
end
def show_for_format(_format, module, conn, params) do
data =
case ApiWeb.Params.validate_show_params(params, conn) do
:ok ->
module.show_data(conn, params)
error ->
error
end
render_json_api(conn, params, data)
end
def index_for_format("event-stream"), do: ApiWeb.EventStream
def index_for_format(_), do: __MODULE__
def render_json_api(conn, params, {data, %Offsets{} = offsets}) do
Deadline.check!(conn)
pagination_links = pagination_links(conn, offsets)
opts =
conn
|> ApiControllerHelpers.opts_for_params(params)
|> Map.put(:page, pagination_links)
render(conn, "index.json-api", data: data, opts: opts)
end
def render_json_api(conn, params, data) when is_list(data) do
Deadline.check!(conn)
render(
conn,
"index.json-api",
data: data,
opts: ApiControllerHelpers.opts_for_params(conn, params)
)
end
def render_json_api(conn, params, %{} = data) do
Deadline.check!(conn)
render(conn, "show.json-api",
data: data,
opts: ApiControllerHelpers.opts_for_params(conn, params)
)
end
def render_json_api(conn, _params, nil) do
conn
|> put_status(:not_found)
|> put_view(ApiWeb.ErrorView)
|> render("404.json-api", [])
end
def render_json_api(conn, _params, {:error, :not_acceptable, details}) do
conn
|> put_status(:not_acceptable)
|> put_view(ApiWeb.ErrorView)
|> render("406.json-api", details: details)
end
def render_json_api(conn, _params, {:error, error, details}) do
conn
|> put_status(:bad_request)
|> put_view(ApiWeb.ErrorView)
|> render("400.json-api", error: error, details: details)
end
def render_json_api(conn, _params, {:error, error}) do
conn
|> put_status(:bad_request)
|> put_view(ApiWeb.ErrorView)
|> render("400.json-api", error: error)
end
def opts_for_params(conn, params) when is_map(params) do
fields = filter_valid_field_params(conn, Map.get(params, "fields"))
%{
include: Map.get(params, "include"),
fields: fields
}
end
@doc """
Filters for valid types with valid field attributes.
Invalid attributes, invalid types, and types without any valid attributes are
removed.
"""
@spec filter_valid_field_params(Plug.Conn.t(), term) :: map
def filter_valid_field_params(conn, %{} = fields) do
for {type, _} = field <- fields, valid_type?(type), into: %{} do
attributes = do_filter_valid_field_attributes(conn, field)
{type, attributes}
end
end
def filter_valid_field_params(_conn, _params), do: nil
# Filter types for types with a view like ShapeView or RouteView
defp valid_type?(type) do
view_module = view_module_for_type(type)
case Code.ensure_compiled(view_module) do
{:module, ^view_module} -> true
{:error, :nofile} -> false
end
rescue
ArgumentError -> false
end
# Filter requested fields for valid field attributes supported in the view
defp do_filter_valid_field_attributes(conn, {type, nil}),
do: do_filter_valid_field_attributes(conn, {type, ""})
defp do_filter_valid_field_attributes(_conn, {_type, ""}), do: []
defp do_filter_valid_field_attributes(conn, {type, fields}) do
view_module = view_module_for_type(type)
attr_filter = fn attr -> conn |> view_module.attribute_set |> MapSet.member?(attr) end
fields
|> String.split(",")
|> Enum.filter(attr_filter)
|> Enum.map(&String.to_existing_atom/1)
end
defp view_module_for_type(type) do
view_name = String.capitalize(type) <> "View"
Module.safe_concat([ApiWeb, view_name])
end
def split_include(%{params: params} = conn, []) do
split_include =
case params do
%{"include" => include} when is_binary(include) ->
include
|> String.split([",", "."])
|> MapSet.new()
_ ->
[]
end
assign(conn, :split_include, split_include)
end
@doc false
def pagination_links(conn, %Offsets{} = offsets) do
offsets
|> Map.from_struct()
|> Enum.map(&build_pagination_link(&1, conn))
end
defp build_pagination_link({_key, nil} = key_pair, _conn), do: key_pair
defp build_pagination_link({key, offset}, conn) do
pagination_url = generate_pagination_url(conn, offset)
{key, pagination_url}
end
defp generate_pagination_url(conn, offset) do
new_params = update_in(conn.query_params, ["page"], &Map.put(&1, "offset", offset))
new_path = conn.request_path <> "?" <> Plug.Conn.Query.encode(new_params)
endpoint_module = Phoenix.Controller.endpoint_module(conn)
Path.join(endpoint_module.url(), new_path)
end
end
| 27.26556 | 102 | 0.673261 |
0315978b2cf70439bb85b25afc654d81c27421b8 | 1,129 | exs | Elixir | config/config.exs | alice-bot/alice_doge_me | 927bc47f7aa5bdf83eeb8db2ca7c44a91b021e1d | [
"MIT"
] | null | null | null | config/config.exs | alice-bot/alice_doge_me | 927bc47f7aa5bdf83eeb8db2ca7c44a91b021e1d | [
"MIT"
] | null | null | null | config/config.exs | alice-bot/alice_doge_me | 927bc47f7aa5bdf83eeb8db2ca7c44a91b021e1d | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :alice_doge_me, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:alice_doge_me, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.419355 | 73 | 0.753764 |
03159c883ca9174d4e6cc73b51618d5c7b20ebb7 | 96 | exs | Elixir | test/support/test_struct.exs | mindreframer/exsm | e151c96060b6cec5938c2c3953751aed42fad5bc | [
"Apache-2.0"
] | null | null | null | test/support/test_struct.exs | mindreframer/exsm | e151c96060b6cec5938c2c3953751aed42fad5bc | [
"Apache-2.0"
] | null | null | null | test/support/test_struct.exs | mindreframer/exsm | e151c96060b6cec5938c2c3953751aed42fad5bc | [
"Apache-2.0"
] | null | null | null | defmodule ExsmTest.TestStruct do
defstruct [:my_state, :missing_fields, :force_exception]
end
| 24 | 58 | 0.8125 |
0315af9d80fd50030b92640269438fda17697ad9 | 1,368 | ex | Elixir | lib/elixlsx/compiler/db_util.ex | Joeman29/elixlsx | c878142b1b366ae20f99423b189b1415a355bcc2 | [
"MIT"
] | null | null | null | lib/elixlsx/compiler/db_util.ex | Joeman29/elixlsx | c878142b1b366ae20f99423b189b1415a355bcc2 | [
"MIT"
] | null | null | null | lib/elixlsx/compiler/db_util.ex | Joeman29/elixlsx | c878142b1b366ae20f99423b189b1415a355bcc2 | [
"MIT"
] | 2 | 2018-10-10T12:14:19.000Z | 2019-04-13T13:13:04.000Z | defmodule Elixlsx.Compiler.DBUtil do
@moduledoc ~S"""
Generic functions for the Compiler.*DB modules.
"""
@type object_type :: any
@type gen_db_datatype :: %{object_type => non_neg_integer}
@type gen_db_type :: {gen_db_datatype, non_neg_integer}
@doc ~S"""
If the value does not exist in the database, return
the tuple {dict, nextid} unmodified. Otherwise,
returns a tuple {dict', nextid+1}, where dict'
is the dictionary with the new element inserted
(with id `nextid`)
"""
@spec register(gen_db_type, object_type) :: gen_db_type
def register({dict, nextid}, value) do
# Note that the parameter "value" in the API
# refers to the *key* in the dictionary
case Map.fetch(dict, value) do
:error -> {Map.put(dict, value, nextid), nextid + 1}
{:ok, _} -> {dict, nextid}
end
end
@doc ~S"""
return the ID for an object in the database
"""
@spec get_id(gen_db_datatype, object_type) :: non_neg_integer
def get_id(dict, value) do
case Map.fetch(dict, value) do
:error -> raise %ArgumentError{message: "Unable to find element: " <> inspect(value)}
{:ok, id} -> id
end
end
@spec id_sorted_values(gen_db_datatype) :: list(object_type)
def id_sorted_values(dict) do
dict
|> Enum.map(fn {k, v} -> {v, k} end)
|> Enum.sort()
|> Enum.map(fn {_, k} -> k end)
end
end
| 30.4 | 91 | 0.654971 |
0315b611b30926511ed8b51ecb6e7708a6434cc6 | 1,619 | ex | Elixir | lib/mix/phx.bricks.gen.filter.ex | leanpanda-com/phoenix_bricks | 170a1e05842a69d5c594636464ed001b4c811cee | [
"MIT"
] | 1 | 2021-04-19T19:10:45.000Z | 2021-04-19T19:10:45.000Z | lib/mix/phx.bricks.gen.filter.ex | leanpanda-com/phoenix_bricks | 170a1e05842a69d5c594636464ed001b4c811cee | [
"MIT"
] | null | null | null | lib/mix/phx.bricks.gen.filter.ex | leanpanda-com/phoenix_bricks | 170a1e05842a69d5c594636464ed001b4c811cee | [
"MIT"
] | null | null | null | defmodule Mix.Tasks.Phx.Bricks.Gen.Filter do
use Mix.Task
alias Mix.PhoenixBricks.Schema
@shortdoc "Generates params filter logic for a resource"
@moduledoc """
Generates a Filter schema around an Ecto schema
"""
@doc false
def run(args) do
if Mix.Project.umbrella?() do
Mix.raise(
"mix phx.bricks.gen.filter must be invoked from within your *_web application root directory"
)
end
args
|> build()
|> create_filter_file()
end
defp build(args) do
{_opts, parsed} = OptionParser.parse!(args, strict: [])
[schema_name | filters] = validate_args!(parsed)
Schema.new(schema_name, filters)
end
defp create_filter_file(%Schema{base_file_path: base_file_path} = schema) do
Mix.Phoenix.copy_from(
[".", :phoenix_bricks],
"priv/templates/phx.bricks.gen",
[schema: schema],
[{:eex, "filter.ex", "#{base_file_path}_filter.ex"}]
)
end
defp validate_args!([]), do: raise_with_help("Schema name not provided")
defp validate_args!([schema_name | filters] = args) do
if !Schema.valid_schema_name?(schema_name), do: raise_with_help("Schema name not valid")
if !Schema.valid_fields?(filters), do: raise_with_help("Fields not valid")
args
end
@spec raise_with_help(String.t()) :: no_return()
defp raise_with_help(msg) do
Mix.raise("""
#{msg}
mix phx.bricks.gen.filter expects a schema module name.
For example:
mix phx.bricks.gen.filter Product
The filter serves as schema for filter form and provides a keyword list of
filters parsed from params.
""")
end
end
| 26.112903 | 101 | 0.675726 |
0315e7d86f560755a8126f14b67f4960efc15bb3 | 1,412 | ex | Elixir | lib/scenic/primitive/line.ex | zacck/scenic | 5f2170b7fba63b0af597ddeb3107fb1ffb79c2fe | [
"Apache-2.0"
] | null | null | null | lib/scenic/primitive/line.ex | zacck/scenic | 5f2170b7fba63b0af597ddeb3107fb1ffb79c2fe | [
"Apache-2.0"
] | null | null | null | lib/scenic/primitive/line.ex | zacck/scenic | 5f2170b7fba63b0af597ddeb3107fb1ffb79c2fe | [
"Apache-2.0"
] | null | null | null | #
# Created by Boyd Multerer on 5/6/17.
# Copyright © 2017 Kry10 Industries. All rights reserved.
#
defmodule Scenic.Primitive.Line do
@moduledoc false
use Scenic.Primitive
# import IEx
@styles [:hidden, :stroke, :cap]
# ============================================================================
# data verification and serialization
# --------------------------------------------------------
def info(data),
do: """
#{IO.ANSI.red()}#{__MODULE__} data must be two points: {{x0,y0}, {x1,y1}}
#{IO.ANSI.yellow()}Received: #{inspect(data)}
#{IO.ANSI.default_color()}
"""
# --------------------------------------------------------
def verify({{x0, y0}, {x1, y1}} = data)
when is_number(x0) and is_number(y0) and is_number(x1) and is_number(y1),
do: {:ok, data}
def verify(_), do: :invalid_data
# ============================================================================
@spec valid_styles() :: [:cap | :hidden | :stroke, ...]
def valid_styles(), do: @styles
# ============================================================================
# --------------------------------------------------------
def default_pin(data), do: centroid(data)
# --------------------------------------------------------
def centroid(data)
def centroid({{x0, y0}, {x1, y1}}) do
{
(x0 + x1) / 2,
(y0 + y1) / 2
}
end
end
| 27.153846 | 80 | 0.391643 |
031617b1871cd4a61f45a78b7921b64cafc7509a | 160 | ex | Elixir | spec/support/fixture/aggregate/counter/command/initialize.ex | Apemb/Casus | 2402bee198c4ccbdd3c51d35e6e1d2bdad987636 | [
"MIT"
] | 3 | 2020-06-22T13:47:04.000Z | 2020-06-23T11:57:41.000Z | spec/support/fixture/aggregate/counter/command/initialize.ex | Apemb/Casus | 2402bee198c4ccbdd3c51d35e6e1d2bdad987636 | [
"MIT"
] | null | null | null | spec/support/fixture/aggregate/counter/command/initialize.ex | Apemb/Casus | 2402bee198c4ccbdd3c51d35e6e1d2bdad987636 | [
"MIT"
] | null | null | null | defmodule Fixture.Counter.Command.Initialize do
@moduledoc false
@enforce_keys [:id, :initial_counter_value]
defstruct [:id, :initial_counter_value]
end
| 22.857143 | 47 | 0.7875 |
03163f79ee4a61d304c30975bfd925bc3b2b8cb6 | 556 | ex | Elixir | lib/cog/util/factory_sup.ex | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 1,003 | 2016-02-23T17:21:12.000Z | 2022-02-20T14:39:35.000Z | lib/cog/util/factory_sup.ex | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 906 | 2016-02-22T22:54:19.000Z | 2022-03-11T15:19:43.000Z | lib/cog/util/factory_sup.ex | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 95 | 2016-02-23T13:42:31.000Z | 2021-11-30T14:39:55.000Z | defmodule Cog.Util.FactorySup do
defmacro __using__(opts) do
worker_mod = Keyword.fetch!(opts, :worker)
worker_args = Keyword.get(opts, :args, [])
quote do
use Supervisor
def start_link() do
Supervisor.start_link(__MODULE__, [], name: __MODULE__)
end
def init(_) do
children = [worker(unquote(worker_mod), unquote(worker_args), restart: :temporary, shutdown: :brutal_kill)]
supervise(children, strategy: :simple_one_for_one, max_restarts: 0, max_seconds: 1)
end
end
end
end
| 22.24 | 115 | 0.661871 |
03164eea541d04ebc832ebf84493335048306ddc | 262 | exs | Elixir | config/test.exs | flat235/upman | c480bc102ea20ceb597d261a290e07a1f6fa3e8a | [
"Apache-2.0"
] | 4 | 2018-08-10T20:41:49.000Z | 2018-11-29T15:56:05.000Z | config/test.exs | flat235/upman | c480bc102ea20ceb597d261a290e07a1f6fa3e8a | [
"Apache-2.0"
] | 6 | 2018-06-11T16:03:12.000Z | 2020-04-06T00:08:15.000Z | config/test.exs | flat235/upman | c480bc102ea20ceb597d261a290e07a1f6fa3e8a | [
"Apache-2.0"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :upman, UpmanWeb.Endpoint,
http: [port: 4001],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
| 23.818182 | 56 | 0.732824 |
03165523b9890486a147c16ef85e779309857d62 | 320 | ex | Elixir | lib/podcatcher/repo.ex | danjac/podcatcher | 748cf7419aebfff9216e7ff9353a5bdb46d3d7b1 | [
"MIT"
] | null | null | null | lib/podcatcher/repo.ex | danjac/podcatcher | 748cf7419aebfff9216e7ff9353a5bdb46d3d7b1 | [
"MIT"
] | null | null | null | lib/podcatcher/repo.ex | danjac/podcatcher | 748cf7419aebfff9216e7ff9353a5bdb46d3d7b1 | [
"MIT"
] | null | null | null | defmodule Podcatcher.Repo do
use Ecto.Repo, otp_app: :podcatcher
use Scrivener, page_size: 15, max_page_size: 15
@doc """
Dynamically loads the repository url from the
DATABASE_URL environment variable.
"""
def init(_, opts) do
{:ok, Keyword.put(opts, :url, System.get_env("DATABASE_URL"))}
end
end
| 24.615385 | 66 | 0.7125 |
0316615a5873d07ac69134dd92839bf64cdb361a | 594 | exs | Elixir | lesson_07/demo/auth_umbrella/mix.exs | martijnmeeldijk/ip_major | 867f09975aa8db0b308081216ace639c5677446b | [
"BSD-3-Clause"
] | 1 | 2021-09-22T09:56:35.000Z | 2021-09-22T09:56:35.000Z | lesson_07/demo/auth_umbrella/mix.exs | martijnmeeldijk/ip_major | 867f09975aa8db0b308081216ace639c5677446b | [
"BSD-3-Clause"
] | 7 | 2020-03-14T19:30:29.000Z | 2022-02-27T01:20:40.000Z | lesson_07/demo/auth_umbrella/mix.exs | martijnmeeldijk/ip_major | 867f09975aa8db0b308081216ace639c5677446b | [
"BSD-3-Clause"
] | 11 | 2020-02-13T14:52:45.000Z | 2020-08-03T12:18:56.000Z | defmodule Auth.Umbrella.MixProject do
use Mix.Project
def project do
[
apps_path: "apps",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options.
#
# Dependencies listed here are available only for this project
# and cannot be accessed from applications inside the apps folder
defp deps do
[]
end
end
| 21.214286 | 77 | 0.626263 |
0316688a0b2055b62f7264b83a6f292cee08a230 | 723 | ex | Elixir | lib/systemstats_web/gettext.ex | Dhall777/systemstats | 380426af8fc898521201311b11881cc8d2db3388 | [
"BSD-3-Clause"
] | null | null | null | lib/systemstats_web/gettext.ex | Dhall777/systemstats | 380426af8fc898521201311b11881cc8d2db3388 | [
"BSD-3-Clause"
] | null | null | null | lib/systemstats_web/gettext.ex | Dhall777/systemstats | 380426af8fc898521201311b11881cc8d2db3388 | [
"BSD-3-Clause"
] | null | null | null | defmodule SystemstatsWeb.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import SystemstatsWeb.Gettext
# Simple translation
gettext("Here is the string to translate")
# Plural translation
ngettext("Here is the string to translate",
"Here are the strings to translate",
3)
# Domain-based translation
dgettext("errors", "Here is the error message to translate")
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :systemstats
end
| 28.92 | 72 | 0.683264 |
031675c05f84fd33bee8a96717af68b1fac386f2 | 872 | ex | Elixir | clients/tpu/lib/google_api/tpu/v1/metadata.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/tpu/lib/google_api/tpu/v1/metadata.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/tpu/lib/google_api/tpu/v1/metadata.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.TPU.V1 do
@moduledoc """
API client metadata for GoogleApi.TPU.V1.
"""
@discovery_revision "20220301"
def discovery_revision(), do: @discovery_revision
end
| 32.296296 | 74 | 0.755734 |
031676901f075e983449bdd3a464ee7aef3e5f46 | 11,910 | ex | Elixir | lib/fusion_auth/jwt.ex | Cogility/fusion_auth | bb765849d6e1d6bc105c2316ea5fe5dd792bd49e | [
"MIT"
] | 10 | 2020-11-13T08:33:22.000Z | 2021-08-09T17:37:09.000Z | lib/fusion_auth/jwt.ex | Cogility/fusion_auth | bb765849d6e1d6bc105c2316ea5fe5dd792bd49e | [
"MIT"
] | 11 | 2021-01-06T21:30:14.000Z | 2021-09-08T09:04:51.000Z | lib/fusion_auth/jwt.ex | Cogility/fusion_auth | bb765849d6e1d6bc105c2316ea5fe5dd792bd49e | [
"MIT"
] | null | null | null | defmodule FusionAuth.JWT do
@moduledoc """
The `FusionAuth.JWT` module provides access functions to the [FusionAuth JWT API](https://fusionauth.io/docs/v1/tech/apis/jwt).
Most functions require a Tesla Client struct created with `FusionAuth.client(base_url, api_key, tenant_id)`.
Those that use JWT Authentication may require a different `api_key` structure.
See [JWT Authentication](https://fusionauth.io/docs/v1/tech/apis/authentication#jwt-authentication) for examples of how you can send the JWT to FusionAuth.
"""
alias FusionAuth.Utils
@type client :: FusionAuth.client()
@type result :: FusionAuth.result()
@jwt_issue_url "/api/jwt/issue"
@jwt_reconcile_url "/api/jwt/reconcile"
@jwt_public_key_url "/api/jwt/public-key"
@jwt_refresh_url "/api/jwt/refresh"
@jwt_validate_url "/api/jwt/validate"
@doc """
Issue an Access Token by Application ID
This API is used to issue a new access token (JWT) using an existing access token (JWT).
This API provides the single signon mechanism for access tokens. For example you have an access token for application A and you need an access token for application B.
You may use this API to request an access token to application B with the authorized token to application A. The returned access token will have the same expiration of the one provided.
This API will use a JWT as authentication. See [JWT Authentication](https://fusionauth.io/docs/v1/tech/apis/authentication#jwt-authentication) for examples of how you can send the JWT to FusionAuth.
## Examples
iex> FusionAuth.JWT.issue_jwt_by_application_id(client, token, application_id, refresh_token)
{
:ok,
%{
"token" => "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCIsImtpZCI6IjY1NTYzYjY5OSJ9.eyJhdWQiOiIzYzIxOWU1OC1lZDBlLTRiMTgtYWQ0OC1mNGY5Mjc5M2FlMzIiLCJleHAiOjE1OTE4MTk2ODksImlhdCI6MTU5MTgxNjcxMSwiaXNzIjoiYWNtZS5jb20iLCJzdWIiOiJmZmZjODY0OC1iYWIyLTRiZGQtYjJlYi1hNDhlODUzZDkyMTciLCJhdXRoZW50aWNhdGlvblR5cGUiOiJKV1RfU1NPIiwiZW1haWwiOiJhZGVsYWNydXpAY29naWxpdHkuY29tIiwiZW1haWxfdmVyaWZpZWQiOnRydWUsImFwcGxpY2F0aW9uSWQiOiIzYzIxOWU1OC1lZDBlLTRiMTgtYWQ0OC1mNGY5Mjc5M2FlMzIiLCJyb2xlcyI6WyJhZG1pbiJdfQ.c9Nyx9UucmALsIueJPWlOOXAC_FkcHeMCInrgdv3zQU"
},
%Tesla.Env{...}
}
iex>
For more information, visit the FusionAuth API Documentation for [Issue a JWT](https://fusionauth.io/docs/v1/tech/apis/jwt#issue-a-jwt).
"""
@spec issue_jwt_by_application_id(client(), String.t(), String.t(), String.t()) :: result()
def issue_jwt_by_application_id(client, token, application_id, refresh_token) do
client = jwt_client(client, "Bearer #{token}")
parameters = [
applicationId: application_id,
refreshToken: refresh_token
]
Tesla.get(
client,
@jwt_issue_url <> Utils.build_query_parameters(parameters)
)
|> FusionAuth.result()
end
@doc """
Reconcile a JWT
The Reconcile API is used to take a JWT issued by a third party identity provider as described by an [Identity Provider](https://fusionauth.io/docs/v1/tech/apis/identity-providers/) configuration and reconcile the User represented by the JWT to FusionAuth.
For more information, visit the FusionAuth API Documentation for [Reconcile a JWT](https://fusionauth.io/docs/v1/tech/apis/jwt#reconcile-a-jwt).
"""
@spec reconcile_jwt(client(), String.t(), map(), String.t()) :: result()
def reconcile_jwt(client, application_id, data, identity_provider_id) do
post_data = %{
applicationId: application_id,
data: data,
identityProviderId: identity_provider_id
}
Tesla.post(client, @jwt_reconcile_url, post_data)
|> FusionAuth.result()
end
@doc """
Retrieve all Public Keys
This API is used to retrieve Public Keys generated by FusionAuth, used used to cryptographically verify JWT signatures signed using the corresponding RSA or ECDSA private key.
For more information, visit the FusionAuth API Documentation for [Retrieve Public Keys](https://fusionauth.io/docs/v1/tech/apis/jwt#retrieve-public-keys).
"""
@spec get_public_keys(client()) :: result()
def get_public_keys(client) do
Tesla.get(client, @jwt_public_key_url)
|> FusionAuth.result()
end
@doc """
Retrieve a single Public Key for a specific Application by Application Id
For more information, visit the FusionAuth API Documentation for [Retrieve Public Keys](https://fusionauth.io/docs/v1/tech/apis/jwt#retrieve-public-keys).
"""
@spec get_public_key_by_application_id(client(), String.t()) :: result()
def get_public_key_by_application_id(client, application_id) do
parameters = [applicationId: application_id]
Tesla.get(client, @jwt_public_key_url <> Utils.build_query_parameters(parameters))
|> FusionAuth.result()
end
@doc """
Retrieve a single Public Key by Key Identifier
For more information, visit the FusionAuth API Documentation for [Retrieve Public Keys](https://fusionauth.io/docs/v1/tech/apis/jwt#retrieve-public-keys).
"""
@spec get_public_key_by_key_id(client(), String.t()) :: result()
def get_public_key_by_key_id(client, public_key_id) do
parameters = [kid: public_key_id]
Tesla.get(client, @jwt_public_key_url <> Utils.build_query_parameters(parameters))
|> FusionAuth.result()
end
@doc """
Request a new Access Token by presenting a valid Refresh Token
The refresh token may be provided either in the HTTP request body or as a cookie. If both are provided, the cookie will take precedence.
## Examples
iex> FusionAuth.JWT.refresh_jwt(client, refresh_token, token)
{
:ok,
%{
"token" => "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCIsImtpZCI6IjY1NTYzYjY5OSJ9.eyJhdWQiOiJmN2E3MmFkMS1kZTZhLTQxMmYtYTM3Mi1lNjg5YTNiN2FkY2IiLCJleHAiOjE1OTE4MTk2ODksImlhdCI6MTU5MTgxNjA4OSwiaXNzIjoiYWNtZS5jb20iLCJzdWIiOiJmZmZjODY0OC1iYWIyLTRiZGQtYjJlYi1hNDhlODUzZDkyMTciLCJhdXRoZW50aWNhdGlvblR5cGUiOiJSRUZSRVNIX1RPS0VOIiwiZW1haWwiOiJhZGVsYWNydXpAY29naWxpdHkuY29tIiwiZW1haWxfdmVyaWZpZWQiOnRydWUsImFwcGxpY2F0aW9uSWQiOiJmN2E3MmFkMS1kZTZhLTQxMmYtYTM3Mi1lNjg5YTNiN2FkY2IiLCJyb2xlcyI6W119.5orARQLfaMYmoOLfxrcWMqRW9_eog5g5l4OivPovGEE"
},
%Tesla.Env{...}
}
For more information, visit the FusionAuth API Documentation for [Refresh a JWT](https://fusionauth.io/docs/v1/tech/apis/jwt#refresh-a-jwt).
"""
@spec refresh_jwt(client(), String.t(), String.t()) :: result()
def refresh_jwt(client, refresh_token, token) do
post_data = %{
refreshToken: refresh_token,
token: token
}
Tesla.post(client, @jwt_refresh_url, post_data)
|> FusionAuth.result()
end
[]
@doc """
Retrieve Refresh Tokens issued to a User by User ID
## Examples
iex> FusionAuth.JWT.get_user_refresh_tokens_by_user_id(client, user_id)
{
:ok,
%{
"refreshTokens" => [...]
},
%Tesla.Env{...}
}
For more information, visit the FusionAuth API Documentation for [Retrieve Refresh Tokens](https://fusionauth.io/docs/v1/tech/apis/jwt#retrieve-refresh-tokens).
"""
@spec get_user_refresh_tokens_by_user_id(client(), String.t()) :: result()
def get_user_refresh_tokens_by_user_id(client, user_id) do
parameters = [userId: user_id]
Tesla.get(client, @jwt_refresh_url <> Utils.build_query_parameters(parameters))
|> FusionAuth.result()
end
@doc """
Retrieve Refresh Tokens issued to a User
This API will use a JWT as authentication. See [JWT Authentication](https://fusionauth.io/docs/v1/tech/apis/authentication#jwt-authentication) for examples of how you can send the JWT to FusionAuth.
## Examples
iex> FusionAuth.JWT.get_user_refresh_tokens(client, token)
{
:ok,
%{
"refreshTokens" => [...]
},
%Tesla.Env{...}
}
For more information, visit the FusionAuth API Documentation for [Retrieve Refresh Tokens](https://fusionauth.io/docs/v1/tech/apis/jwt#retrieve-refresh-tokens).
"""
@spec get_user_refresh_tokens(client(), String.t()) :: result()
def get_user_refresh_tokens(client, token) do
client = jwt_client(client, "Bearer #{token}")
Tesla.get(
client,
@jwt_refresh_url
)
|> FusionAuth.result()
end
@doc """
Revoke all Refresh Tokens for an entire Application
## Examples
iex> JWT.revoke_refresh_tokens_by_application_id(client, application_id)
{
:ok,
"",
%Tesla.Env{...}
}
For more information, visit the FusionAuth API Documentation for [Revoke Refresh Tokens](https://fusionauth.io/docs/v1/tech/apis/jwt#revoke-refresh-tokens).
"""
@spec revoke_refresh_tokens_by_application_id(client(), String.t()) :: result()
def revoke_refresh_tokens_by_application_id(client, application_id) do
parameters = [applicationId: application_id]
Tesla.delete(client, @jwt_refresh_url <> Utils.build_query_parameters(parameters))
|> FusionAuth.result()
end
@doc """
Revoke all Refresh Tokens issued to a User
## Examples
iex> FusionAuth.JWT.revoke_refresh_token(client, user_id)
{
:ok,
"",
%Tesla.Env{...}
}
For more information, visit the FusionAuth API Documentation for [Revoke Refresh Tokens](https://fusionauth.io/docs/v1/tech/apis/jwt#revoke-refresh-tokens).
"""
@spec revoke_refresh_tokens_by_user_id(client(), String.t()) :: result()
def revoke_refresh_tokens_by_user_id(client, user_id) do
parameters = [userId: user_id]
Tesla.delete(client, @jwt_refresh_url <> Utils.build_query_parameters(parameters))
|> FusionAuth.result()
end
@doc """
Revoke a single Refresh Token
This API may be authenticated using an Access Token. See Authentication for examples of authenticating using an Access Token. The token owner must match the identity in the access token if provided to be successful.
## Examples
iex> FusionAuth.JWT.revoke_refresh_token(client, token)
{
:ok,
"",
%Tesla.Env{...}
}
For more information, visit the FusionAuth API Documentation for [Revoke Refresh Tokens](https://fusionauth.io/docs/v1/tech/apis/jwt#revoke-refresh-tokens).
"""
@spec revoke_refresh_token(client(), String.t()) :: result()
def revoke_refresh_token(client, token) do
parameters = [token: token]
Tesla.delete(client, @jwt_refresh_url <> Utils.build_query_parameters(parameters))
|> FusionAuth.result()
end
@doc """
Validate Access Token
The access token can be provided to the API using an HTTP request header, or a cookie. The response body will contain the decoded JWT payload.
## Examples
iex> FusionAuth.JWT.validate_jwt(client, token)
{
:ok,
%{
"jwt" => %{
"authenticationType" => "PASSWORD",
"email" => "[email protected]",
"email_verified" => true,
"exp" => 1591815558,
"iat" => 1591811958,
"iss" => "acme.com",
"sub" => "fffc8648-bab2-4bdd-b2eb-a48e853d9217"
}
},
%Tesla.Env{...}
}
For more information, visit the FusionAuth API Documentation for [Validate a JWT](https://fusionauth.io/docs/v1/tech/apis/jwt#validate-a-jwt).
"""
@spec validate_jwt(client(), String.t()) :: result()
def validate_jwt(client, token) do
client = jwt_client(client, "JWT #{token}")
Tesla.get(
client,
@jwt_validate_url
)
|> FusionAuth.result()
end
defp jwt_client(client, authorization) do
tenant_id = Application.get_env(:fusion_auth, :tenant_id)
config = Map.get(client, :pre)
headers =
{Tesla.Middleware.Headers, :call,
[
[
{"X-FusionAuth-TenantId", tenant_id},
{"Authorization", authorization}
]
]}
{_, config} = List.pop_at(config, -1)
Map.put(client, :pre, [headers | config])
end
end
| 37.570978 | 530 | 0.712175 |
03167c180d059c111bc6f05d04e1453cd834c38d | 6,906 | ex | Elixir | lib/transmission.ex | begedin/elixir-sparkpost | e8b94b9f8d33f165a1fe030ef52e0642bdd59c9d | [
"Apache-2.0"
] | 1 | 2021-01-08T18:16:23.000Z | 2021-01-08T18:16:23.000Z | lib/transmission.ex | begedin/elixir-sparkpost | e8b94b9f8d33f165a1fe030ef52e0642bdd59c9d | [
"Apache-2.0"
] | null | null | null | lib/transmission.ex | begedin/elixir-sparkpost | e8b94b9f8d33f165a1fe030ef52e0642bdd59c9d | [
"Apache-2.0"
] | null | null | null | defmodule SparkPost.Transmission do
@moduledoc """
The SparkPost Transmission API endpoint for sending email.
Use `SparkPost.Transmission.send/1` to send messages,
`SparkPost.Transmission.list/1` to list previous sends and
`SparkPost.Transmission.get/1` to retrieve details on a given transmission.
Check out the documentation for each function
or use the [SparkPost API reference](https://www.sparkPost.com/api#/reference/transmissions)
for details.
## Request Fields
Used in calls to `SparkPost.Transmission.send/1`.
- campaign_id
- return_path
- metadata
- substitution_data
- recipients
- content
Returned by `SparkPost.Transmission.list/1`.
- id
- campaign_id
- description
- content
Returned by `SparkPost.Transmission.get/1`.
- id
- description
- state
- campaign_id
- content
- return_path
- rcpt_list_chunk_size
- rcpt_list_total_chunks
- num_rcpts
- num_generated
- num_failed_gen
- generation_start_time
- generation_end_time
- substitution_data
- metadata
- options
"""
defstruct options: %SparkPost.Transmission.Options{},
campaign_id: nil,
return_path: nil,
metadata: nil,
substitution_data: nil,
recipients: :required,
content: :required,
# System generated fields from this point on
id: nil,
description: nil,
state: nil,
rcpt_list_chunk_size: nil,
rcp_list_total_chunks: nil,
num_rcpts: nil,
num_generated: nil,
num_failed_gen: nil,
generation_start_time: nil,
generation_end_time: nil
alias SparkPost.{
Content,
Endpoint,
Recipient,
Transmission
}
@doc """
Create a new transmission and send some email.
## Parameters
- %SparkPost.Transmission{} consisting of:
- recipients: ["email@address", %SparkPost.Recipient{}, ...] or %SparkPost.Recipient.ListRef{}
- content: %SparkPost.Content.Inline{}, %SparkPost.Content.Raw{} or %SparkPost.Content.TemplateRef{}
- options: %SparkPost.Transmission.Options{}
- campaign_id: campaign identifier (string)
- return_path: envelope FROM address, available in Enterprise only (email address string)
- metadata: transmission-level metadata k/v pairs (keyword)
- substitution_data: transmission-level substitution_data k/v pairs (keyword)
## Examples
### Send a message to a single recipient with inline text and HTML content
```
SparkPost.Transmission.send(%SparkPost.Transmission{
recipients: ["[email protected]"],
content: %SparkPost.Content.Inline{
from: "[email protected]",
subject: "A subject",
text: "Text body",
html: "<b>HTML</b> body"
}
})
```
#=>
```
%SparkPost.Transmission.Response{
id: "102258889940193104",
total_accepted_recipients: 1,
total_rejected_recipients: 0
}
```
### Send a message to 2 recipients using a stored message template
SparkPost.Transmission.send(%SparkPost.Transmission{
recipients: ["[email protected]", "[email protected]"],
content: %SparkPost.Content.TemplateRef{template_id: "test-template-1"}
})
#=>
%SparkPost.Transmission.Response{
id: "102258889940193105",
total_accepted_recipients: 2,
total_rejected_recipients: 0
}
### Send a message with an attachment
SparkPost.Transmission.send(%SparkPost.Transmission{
recipients: ["[email protected]"],
content: %SparkPost.Content.Inline{
subject: "Now with attachments!",
text: "There is an attachment with this message",
attachments: [
SparkPost.Content.to_attachment("cat.jpg", "image/jpeg", File.read!("cat.jpg"))
]
}
})
#=>
%SparkPost.Transmission.Response{
id: "102258889940193106",
total_accepted_recipients: 1,
total_rejected_recipients: 0
}
"""
def send(%__MODULE__{} = body) do
body = %{
body
| recipients: Recipient.to_recipient_list(body.recipients),
content: Content.to_content(body.content)
}
response = Endpoint.request(:post, "transmissions", body)
Endpoint.marshal_response(response, Transmission.Response)
end
@doc """
Retrieve the details of an existing transmission.
## Parameters
- transmission ID: identifier of the transmission to retrieve
## Example: Fetch a transmission
SparkPost.Transmission.get("102258889940193105")
#=>
%SparkPost.Transmission{
campaign_id: "",
content: %{template_id: "inline", template_version: 0, use_draft_template: false},
description: "",
generation_end_time: "2016-01-14T12:52:05+00:00",
generation_start_time: "2016-01-14T12:52:05+00:00",
id: "48215348926834924",
metadata: "",
num_failed_gen: 0,
num_generated: 2,
num_rcpts: 2,
options: %{click_tracking: true, conversion_tracking: "", open_tracking: true},
rcp_list_total_chunks: nil,
rcpt_list_chunk_size: 100,
recipients: :required,
return_path: nil,
state: "Success",
substitution_data: ""
}
"""
def get(transid) do
response = Endpoint.request(:get, "transmissions/" <> transid)
Endpoint.marshal_response(response, __MODULE__, :transmission)
end
@doc """
List all multi-recipient transmissions, possibly filtered by campaign_id and/or content.
## Parameters
- query filters to narrow the list (keyword)
- campaign_id
- template_id
## Example: List all multi-recipient transmissions:
SparkPost.Transmission.list()
#=>
[
%SparkPost.Transmission{
campaign_id: "",
content: %{template_id: "inline"},
description: "",
generation_end_time: nil,
generation_start_time: nil,
id: "102258558346809186",
metadata: nil,
num_failed_gen: nil,
num_generated: nil,
num_rcpts: nil,
options: :required,
rcp_list_total_chunks: nil,
rcpt_list_chunk_size: nil,
recipients: :required,
return_path: nil,
state: "Success",
substitution_data: nil
},
%SparkPost.Transmission{
campaign_id: "",
content: %{template_id: "inline"},
description: "",
generation_end_time: nil,
generation_start_time: nil,
id: "48215348926834924",
metadata: nil,
num_failed_gen: nil,
num_generated: nil,
num_rcpts: nil,
options: :required,
rcp_list_total_chunks: nil,
rcpt_list_chunk_size: nil,
recipients: :required,
return_path: nil,
state: "Success",
substitution_data: nil
}
]
"""
def list(filters \\ []) do
response = Endpoint.request(:get, "transmissions", %{}, %{}, params: filters)
case response do
%Endpoint.Response{} ->
Enum.map(response.results, fn trans -> struct(__MODULE__, trans) end)
_ ->
response
end
end
end
| 26.060377 | 104 | 0.657399 |
03167e1b69cd0724864c070ce0e000ef64cf2e71 | 645 | exs | Elixir | deps/elixir_make/mix.exs | JoakimEskils/elixir-absinthe | d81e24ec7c7b1164e6d152101dd50422f192d7e9 | [
"MIT"
] | null | null | null | deps/elixir_make/mix.exs | JoakimEskils/elixir-absinthe | d81e24ec7c7b1164e6d152101dd50422f192d7e9 | [
"MIT"
] | null | null | null | deps/elixir_make/mix.exs | JoakimEskils/elixir-absinthe | d81e24ec7c7b1164e6d152101dd50422f192d7e9 | [
"MIT"
] | null | null | null | defmodule ElixirMake.Mixfile do
use Mix.Project
def project do
[app: :elixir_make,
version: "0.4.0",
elixir: "~> 1.1",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: "A Make compiler for Mix",
package: package(),
deps: []]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[applications: []]
end
defp package do
%{licenses: ["Apache 2"],
links: %{"GitHub" => "https://github.com/elixir-lang/elixir_make"},
maintainers: ["Andrea Leopardi", "José Valim"]}
end
end
| 23.035714 | 73 | 0.623256 |
0316aca6a81a5dbaa6cbfb6743f21296ec573b1f | 2,822 | exs | Elixir | test/oban/queue/executor_test.exs | moogle19/oban | b63036db6fae389832dae3eb9209d33ce4d5f471 | [
"Apache-2.0"
] | null | null | null | test/oban/queue/executor_test.exs | moogle19/oban | b63036db6fae389832dae3eb9209d33ce4d5f471 | [
"Apache-2.0"
] | null | null | null | test/oban/queue/executor_test.exs | moogle19/oban | b63036db6fae389832dae3eb9209d33ce4d5f471 | [
"Apache-2.0"
] | null | null | null | defmodule Oban.Queue.ExecutorTest do
use Oban.Case, async: true
import ExUnit.CaptureLog
alias Oban.{CrashError, PerformError}
alias Oban.Queue.Executor
defmodule Worker do
use Oban.Worker
@impl Worker
def perform(%{args: %{"mode" => "ok"}}), do: :ok
def perform(%{args: %{"mode" => "result"}}), do: {:ok, :result}
def perform(%{args: %{"mode" => "warn"}}), do: {:bad, :this_will_warn}
def perform(%{args: %{"mode" => "raise"}}), do: raise(ArgumentError)
def perform(%{args: %{"mode" => "catch"}}), do: throw(:no_reason)
def perform(%{args: %{"mode" => "error"}}), do: {:error, "no reason"}
def perform(%{args: %{"mode" => "sleep"}}), do: Process.sleep(10)
end
@conf Config.new(repo: Repo)
describe "perform/1" do
test "accepting :ok as a success" do
assert %{state: :success, result: :ok} = call_with_mode("ok")
assert %{state: :success, result: {:ok, :result}} = call_with_mode("result")
end
test "raising, catching and error tuples are failures" do
assert %{state: :failure} = call_with_mode("raise")
assert %{state: :failure, error: %CrashError{}} = call_with_mode("catch")
assert %{state: :failure, error: %PerformError{}} = call_with_mode("error")
end
test "inability to resolve a worker is a failure" do
job = %Job{args: %{}, worker: "Not.A.Real.Worker"}
assert %{state: :failure, error: %RuntimeError{message: "unknown worker" <> _}} =
@conf
|> Executor.new(job)
|> Executor.resolve_worker()
end
test "warning on unexpected return values" do
message = capture_log(fn -> %{state: :success} = call_with_mode("warn") end)
assert message =~ "Expected #{__MODULE__}.Worker.perform/1"
assert message =~ "{:bad, :this_will_warn}"
end
test "reporting duration and queue_time measurements" do
now = DateTime.utc_now()
job = %Job{
args: %{"mode" => "sleep"},
worker: to_string(Worker),
attempted_at: DateTime.add(now, 30, :millisecond),
scheduled_at: now
}
assert %{duration: duration, queue_time: queue_time} =
@conf
|> Executor.new(job)
|> Executor.resolve_worker()
|> Executor.perform()
|> Executor.record_finished()
duration_ms = System.convert_time_unit(duration, :native, :millisecond)
queue_time_ms = System.convert_time_unit(queue_time, :native, :millisecond)
assert_in_delta duration_ms, 10, 20
assert_in_delta queue_time_ms, 30, 20
end
end
defp call_with_mode(mode) do
job = %Job{args: %{"mode" => mode}, worker: to_string(Worker)}
@conf
|> Executor.new(job)
|> Executor.resolve_worker()
|> Executor.perform()
end
end
| 32.813953 | 87 | 0.607725 |
0316b962cbae4a6933bca3b3f487fd2b1cef7113 | 5,325 | ex | Elixir | test/support/off_broadway_tortoise_case.ex | tymoor/off_broadway_mqtt | 2066a62a400ea40a86fc8fdd1d588d8406bd95be | [
"Apache-2.0"
] | 11 | 2019-07-03T00:54:04.000Z | 2021-12-13T22:24:09.000Z | test/support/off_broadway_tortoise_case.ex | tymoor/off_broadway_mqtt | 2066a62a400ea40a86fc8fdd1d588d8406bd95be | [
"Apache-2.0"
] | 1 | 2020-10-17T02:25:56.000Z | 2020-11-14T01:51:15.000Z | test/support/off_broadway_tortoise_case.ex | tymoor/off_broadway_mqtt | 2066a62a400ea40a86fc8fdd1d588d8406bd95be | [
"Apache-2.0"
] | 5 | 2019-08-09T03:14:29.000Z | 2022-03-22T21:39:02.000Z | defmodule OffBroadway.MQTTCase do
@moduledoc """
Test templace for testing aspects of this library.
Provides utility to start all the necessary dependencies.
"""
use ExUnit.CaseTemplate
alias OffBroadway.MQTT.Config
alias OffBroadway.MQTT.Queue
using _opts do
quote do
import OffBroadway.MQTT.Assertions
import OffBroadway.MQTT.Factory
import OffBroadway.MQTTCase
alias OffBroadway.MQTT
alias OffBroadway.MQTT.Data
alias OffBroadway.MQTT.Queue
end
end
setup tags do
tags
|> subscribe_telemetry_event_tag
|> start_registry_tag
|> start_supervisor_tag
|> start_mqtt_client_tag
|> build_config_tag
|> start_queue_tag
end
defp subscribe_telemetry_event_tag(tags) do
if tags[:subscribe_telemetry_event],
do: subscribe_telemetry_event(tags),
else: tags
end
defp build_config_tag(tags) do
if tags[:build_config],
do: build_config(tags),
else: tags
end
defp start_registry_tag(tags) do
if tags[:start_registry],
do: start_registry(tags),
else: tags
end
defp start_supervisor_tag(tags) do
if tags[:start_supervisor],
do: start_supervisor(tags),
else: tags
end
defp start_mqtt_client_tag(tags) do
if tags[:start_mqtt_client],
do: start_mqtt_client(tags),
else: tags
end
defp start_queue_tag(tags) do
if tags[:start_queue],
do: start_queue(tags),
else: tags
end
@doc """
Starts a `Registry` and puts it's registered name under `registry`
to the context.
"""
def start_registry(%{test: name_prefix} = context) do
name = :"#{name_prefix} registry"
{:ok, _} = start_supervised({Registry, [name: name, keys: :unique]})
context
|> Map.put(:registry, name)
end
@doc """
Starts a `Registry` and puts it's registered name under `supervisor`
to the context.
"""
def start_supervisor(%{test: name_prefix} = context) do
name = :"#{name_prefix} supervisor"
{:ok, _} =
start_supervised(
{DynamicSupervisor, [name: name, strategy: :one_for_one]}
)
context
|> Map.put(:supervisor, name)
end
@doc """
Starts a `Tortoise.Connection` and puts it's client_id under `test_client_id`
to the context.
"""
def start_mqtt_client(context) do
{client_id, subscriptions} =
context
|> Map.get(:start_mqtt_client, [])
|> case do
[] -> {build_test_client_id(), []}
{client_id, subscriptions} -> {client_id, subscriptions}
client_id -> {client_id, []}
end
server_opts =
:off_broadway_mqtt
|> Application.get_all_env()
|> Keyword.get(:server_opts, [])
|> Keyword.drop([:protocol])
tortoise_opts = [
client_id: client_id,
handler: {OffBroadway.MQTT.TestHandler, [pid: self()]},
server: {Tortoise.Transport.Tcp, server_opts},
subscriptions: subscriptions
]
{:ok, _} = Tortoise.Connection.start_link(tortoise_opts)
receive do
{:test_mqtt_client, :up} -> :ok
after
5000 ->
raise "test mqtt client connection timed out: #{inspect(server_opts)}"
end
context
|> Map.put(:test_client_id, client_id)
end
@doc """
Starts a `#{inspect(Queue)}` and puts it's registered name under `queue` to the
context.
"""
def start_queue(%{test: test_name} = context) do
config = context[:config] || config_from_context(context)
queue_name =
context
|> Map.get(:start_queue, test_name)
|> case do
{:via, _, _} = reg_name ->
reg_name
true ->
OffBroadway.MQTT.queue_name(config, to_string(test_name))
name ->
OffBroadway.MQTT.queue_name(config, name)
end
{:via, _, {_, topic}} = queue_name
{:ok, pid} = start_supervised({Queue, [config, queue_name]})
context
|> Map.put(:queue, queue_name)
|> Map.put(:queue_topic, topic)
|> Map.put(:pid, pid)
end
@doc """
Builds a random but unique client id.
"""
def build_test_client_id do
"test_client_#{System.unique_integer([:positive])}"
end
@doc """
Builds a configuration from the context.
It adds the supervisor and registry if avalilable.
"""
def config_from_context(%{registry: reg, supervisor: sup}) do
Config.new_from_app_config(
queue_registry: reg,
queue_supervisor: sup
)
end
def config_from_context(_) do
Config.new_from_app_config()
end
@doc """
Subscribes to the event given with `:subscribe_telemetry_event` and forwards
them to the test process.
"""
def subscribe_telemetry_event(context) do
event =
case context[:subscribe_telemetry_event] do
[_ | _] = event -> event
_ -> raise "no telemetry event given to subscribe!"
end
test_pid = self()
handle_event = fn event, topic, data, extra ->
send(test_pid, {:telemetry, event, topic, data, extra})
end
context.test
|> to_string
|> :telemetry.attach(event, handle_event, nil)
context
|> Map.put(:event, event)
end
@doc """
Builds a config from the values in the context and adds it under the `:config`
key to the context.
"""
def build_config(context) do
Map.put(context, :config, config_from_context(context))
end
end
| 23.45815 | 81 | 0.646197 |
0316e6d8d90cda9c2965a90c84738dd0e8982e53 | 242 | ex | Elixir | src/elixir/lib/mix/tasks/copy_json.ex | S-Dey/ifsc | 2aa24afbcaacdab0ee1d4327496990ccc94b7885 | [
"MIT"
] | null | null | null | src/elixir/lib/mix/tasks/copy_json.ex | S-Dey/ifsc | 2aa24afbcaacdab0ee1d4327496990ccc94b7885 | [
"MIT"
] | 4 | 2020-12-31T09:10:55.000Z | 2022-02-26T10:09:48.000Z | src/elixir/lib/mix/tasks/copy_json.ex | S-Dey/ifsc | 2aa24afbcaacdab0ee1d4327496990ccc94b7885 | [
"MIT"
] | null | null | null | defmodule Mix.Tasks.Ifsc.CopyJson do
use Mix.Task
def run(_) do
File.mkdir_p!("priv/ifsc-data")
Enum.map(
~w(banknames.json IFSC.json sublet.json),
&(File.copy("src/" <> &1, "priv/ifsc-data/" <> &1))
)
end
end
| 18.615385 | 57 | 0.590909 |
0316ecdc1f32f35ff8caaeabe6b22144af0c2718 | 6,143 | exs | Elixir | test/lib/bamboo/postmark_adapter_test.exs | epogue/bamboo_postmark | 8f281f2040c03890b026c982432dbf695046f45a | [
"MIT"
] | null | null | null | test/lib/bamboo/postmark_adapter_test.exs | epogue/bamboo_postmark | 8f281f2040c03890b026c982432dbf695046f45a | [
"MIT"
] | null | null | null | test/lib/bamboo/postmark_adapter_test.exs | epogue/bamboo_postmark | 8f281f2040c03890b026c982432dbf695046f45a | [
"MIT"
] | null | null | null | defmodule Bamboo.PostmarkAdapterTest do
use ExUnit.Case
alias Bamboo.Email
alias Bamboo.PostmarkAdapter
alias Bamboo.PostmarkHelper
@config %{adapter: PostmarkAdapter, api_key: "123_abc"}
@config_with_bad_key %{adapter: PostmarkAdapter, api_key: nil}
defmodule FakePostmark do
use Plug.Router
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Poison
plug :match
plug :dispatch
def start_server(parent) do
Agent.start_link(fn -> Map.new end, name: __MODULE__)
Agent.update(__MODULE__, &Map.put(&1, :parent, parent))
port = get_free_port()
Application.put_env(:bamboo, :postmark_base_uri, "http://localhost:#{port}")
Plug.Adapters.Cowboy.http __MODULE__, [], port: port, ref: __MODULE__
end
defp get_free_port do
{:ok, socket} = :ranch_tcp.listen(port: 0)
{:ok, port} = :inet.port(socket)
:erlang.port_close(socket)
port
end
def shutdown do
Plug.Adapters.Cowboy.shutdown __MODULE__
end
post "email" do
case get_in(conn.params, ["From"]) do
"INVALID_EMAIL" ->
conn |> send_resp(500, "Error!!") |> send_to_parent
_ ->
conn |> send_resp(200, "SENT") |> send_to_parent
end
end
post "email/withTemplate" do
case get_in(conn.params, ["From"]) do
"INVALID_EMAIL" ->
conn |> send_resp(500, "Error!!") |> send_to_parent
_ ->
conn |> send_resp(200, "SENT") |> send_to_parent
end
end
defp send_to_parent(conn) do
parent = Agent.get(__MODULE__, fn(set) -> Map.get(set, :parent) end)
send parent, {:fake_postmark, conn}
conn
end
end
setup do
FakePostmark.start_server(self())
on_exit fn ->
FakePostmark.shutdown
end
:ok
end
test "raises if the api key is nil" do
assert_raise ArgumentError, ~r/no API key set/, fn ->
PostmarkAdapter.deliver(new_email(from: "[email protected]"), @config_with_bad_key)
end
assert_raise ArgumentError, ~r/no API key set/, fn ->
PostmarkAdapter.handle_config(%{})
end
end
test "deliver/2 passes the request_options to hackney" do
request_options = [recv_timeout: 0]
config = Map.put(@config, :request_options, request_options)
assert_raise Bamboo.PostmarkAdapter.ApiError, fn ->
PostmarkAdapter.deliver(new_email(), config)
end
end
test "deliver/2 returns the textual body of the request" do
response = PostmarkAdapter.deliver(new_email(), @config)
assert response.body == "SENT"
end
test "deliver/2 makes the request to the right url" do
PostmarkAdapter.deliver(new_email(), @config)
assert_receive {:fake_postmark, %{request_path: request_path}}
assert request_path == "/email"
end
test "deliver/2 sends the to the right url for templates" do
new_email() |> PostmarkHelper.template("hello") |> PostmarkAdapter.deliver(@config)
assert_receive {:fake_postmark, %{request_path: request_path}}
assert request_path == "/email/withTemplate"
end
test "deliver/2 sends from, html and text body, subject, and headers" do
email =
[
from: {"From", "[email protected]"},
subject: "My Subject",
text_body: "TEXT BODY",
html_body: "HTML BODY",
]
|> new_email()
|> Email.put_header("Reply-To", "[email protected]")
PostmarkAdapter.deliver(email, @config)
assert_receive {:fake_postmark, %{params: params}}
assert params["From"] == "#{elem(email.from, 0)} <#{elem(email.from, 1)}>"
assert params["Subject"] == email.subject
assert params["TextBody"] == email.text_body
assert params["HtmlBody"] == email.html_body
assert params["Headers"] ==
[%{"Name" => "Reply-To", "Value" => "[email protected]"}]
end
test "deliver/2 correctly formats recipients" do
email = new_email(
to: [{"To", "[email protected]"}],
cc: [{"CC", "[email protected]"}],
bcc: [{"BCC", "[email protected]"}],
)
PostmarkAdapter.deliver(email, @config)
assert_receive {:fake_postmark, %{params: params}}
assert params["To"] == "To <[email protected]>"
assert params["Bcc"] == "BCC <[email protected]>"
assert params["Cc"] == "CC <[email protected]>"
end
test "deliver/2 puts template name and empty content" do
email = PostmarkHelper.template(new_email(), "hello")
PostmarkAdapter.deliver(email, @config)
assert_receive {:fake_postmark, %{params: %{"TemplateId" => template_id,
"TemplateModel" => template_model}}}
assert template_id == "hello"
assert template_model == %{}
end
test "deliver/2 puts template name and content" do
email = PostmarkHelper.template(new_email(), "hello", [
%{name: 'example name', content: 'example content'}
])
PostmarkAdapter.deliver(email, @config)
assert_receive {:fake_postmark, %{params: %{"TemplateId" => template_id,
"TemplateModel" => template_model}}}
assert template_id == "hello"
assert template_model == [%{"content" => 'example content',
"name" => 'example name'}]
end
test "deliver/2 puts tag param" do
email = PostmarkHelper.tag(new_email(), "some_tag")
PostmarkAdapter.deliver(email, @config)
assert_receive {:fake_postmark, %{params: %{"Tag" => "some_tag"}}}
end
test "deliver/2 puts tracking params" do
email =
new_email()
|> PostmarkHelper.template("hello")
|> PostmarkHelper.put_param("TrackOpens", true)
|> PostmarkHelper.put_param("TrackLinks", "HtmlOnly")
PostmarkAdapter.deliver(email, @config)
assert_receive {:fake_postmark, %{params: %{
"TrackLinks" => "HtmlOnly", "TrackOpens" => true, "TemplateId" => "hello"}
}}
end
test "raises if the response is not a success" do
email = new_email(from: "INVALID_EMAIL")
assert_raise Bamboo.PostmarkAdapter.ApiError, fn ->
PostmarkAdapter.deliver(email, @config)
end
end
defp new_email(attrs \\ []) do
[from: "[email protected]", to: []]
|> Keyword.merge(attrs)
|> Email.new_email()
|> Bamboo.Mailer.normalize_addresses()
end
end
| 28.705607 | 87 | 0.64252 |
03170b448b4e47923886cced0de998579a06b4b3 | 7,215 | ex | Elixir | lib/mix/lib/mix/compilers/elixir.ex | Tica2/elixir | 6cf1dcbfe4572fc75619f05e40c10fd0844083ef | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/compilers/elixir.ex | Tica2/elixir | 6cf1dcbfe4572fc75619f05e40c10fd0844083ef | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/compilers/elixir.ex | Tica2/elixir | 6cf1dcbfe4572fc75619f05e40c10fd0844083ef | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Compilers.Elixir do
@moduledoc false
@manifest_vsn 1
@doc """
Compiles stale Elixir files.
It expects a manifest file, the source directories, the extensions
to read in sources, the destination directory, a flag to know if
compilation is being forced or not and a callback to be invoked
once (and only if) compilation starts.
The manifest is written down with information including dependencies
between modules, which helps it recompile only the modules that
have changed at runtime.
"""
def compile(manifest, srcs, skip, exts, dest, force, on_start) do
keep = srcs -- skip
all = Mix.Utils.extract_files(keep, exts)
{all_entries, skip_entries} = parse_manifest(manifest, keep)
removed =
for {_b, _m, source, _cd, _rd, _f, _bin} <- all_entries, not(source in all), do: source
changed =
if force do
# A config, path dependency or manifest has
# changed, let's just compile everything
all
else
modified = Mix.Utils.last_modified(manifest)
all_mtimes = mtimes(all_entries)
# Otherwise let's start with the new ones
# plus the ones that have changed
for(source <- all,
not Enum.any?(all_entries, fn {_b, _m, s, _cd, _rd, _f, _bin} -> s == source end),
do: source)
++
for({_b, _m, source, _cd, _rd, files, _bin} <- all_entries,
times = Enum.map([source|files], &HashDict.fetch!(all_mtimes, &1)),
Mix.Utils.stale?(times, [modified]),
do: source)
end
{entries, changed} = remove_stale_entries(all_entries, removed ++ changed)
stale = changed -- removed
cond do
stale != [] ->
compile_manifest(manifest, entries ++ skip_entries, stale, dest, on_start)
:ok
removed != [] ->
write_manifest(manifest, entries ++ skip_entries)
:ok
true ->
:noop
end
end
defp mtimes(entries) do
Enum.reduce(entries, HashDict.new, fn {_b, _m, source, _cd, _rd, files, _bin}, dict ->
Enum.reduce([source|files], dict, fn file, dict ->
if HashDict.has_key?(dict, file) do
dict
else
HashDict.put(dict, file, Mix.Utils.last_modified(file))
end
end)
end)
end
@doc """
Removes compiled files.
"""
def clean(manifest) do
Enum.map read_manifest(manifest), fn {beam, _, _, _, _, _, _} ->
File.rm(beam)
end
:ok
end
defp compile_manifest(manifest, entries, stale, dest, on_start) do
Mix.Project.ensure_structure()
true = Code.prepend_path(dest)
on_start.()
cwd = File.cwd!
# Starts a server responsible for keeping track which files
# were compiled and the dependencies between them.
{:ok, pid} = Agent.start_link(fn -> entries end)
try do
_ = Kernel.ParallelCompiler.files :lists.usort(stale),
each_module: &each_module(pid, dest, cwd, &1, &2, &3),
each_file: &each_file(&1),
dest: dest
Agent.cast pid, fn entries ->
write_manifest(manifest, entries)
entries
end
after
Agent.stop(pid, :infinity)
end
:ok
end
defp each_module(pid, dest, cwd, source, module, binary) do
beam =
dest
|> Path.join(Atom.to_string(module) <> ".beam")
|> Path.relative_to(cwd)
{compile, runtime} = Kernel.LexicalTracker.remotes(module)
compile =
compile
|> List.delete(module)
|> Enum.reject(&match?("elixir_" <> _, Atom.to_string(&1)))
runtime =
runtime
|> List.delete(module)
|> Enum.reject(&match?("elixir_" <> _, Atom.to_string(&1)))
files = for file <- get_external_resources(module),
File.regular?(file),
relative = Path.relative_to(file, cwd),
Path.type(relative) == :relative,
do: relative
source = Path.relative_to(source, cwd)
tuple = {beam, module, source, compile, runtime, files, binary}
Agent.cast pid, &:lists.keystore(beam, 1, &1, tuple)
end
defp get_external_resources(module) do
for {:external_resource, values} <- module.__info__(:attributes),
value <- values,
do: value
end
defp each_file(file) do
Mix.shell.info "Compiled #{file}"
end
## Resolution
# This function receives the manifest entries and some source
# files that have changed. It then, recursively, figures out
# all the files that changed (via the module dependencies) and
# return the non-changed entries and the removed sources.
defp remove_stale_entries(all, []) do
{all, []}
end
defp remove_stale_entries(all, changed) do
remove_stale_entries(all, HashSet.new, Enum.into(changed, HashSet.new))
end
defp remove_stale_entries(entries, old_stale, old_removed) do
{rest, new_stale, new_removed} =
Enum.reduce entries, {[], old_stale, old_removed}, &remove_stale_entry/2
if HashSet.size(new_stale) > HashSet.size(old_stale) or
HashSet.size(new_removed) > HashSet.size(old_removed) do
remove_stale_entries(rest, new_stale, new_removed)
else
{rest, Enum.to_list(new_removed)}
end
end
defp remove_stale_entry({beam, module, source, compile, runtime, _f, _bin} = entry,
{rest, stale, removed}) do
cond do
# If I changed in disk or have a compile time dependency
# on something stale, I need to be recompiled.
source in removed or Enum.any?(compile, &(&1 in stale)) ->
_ = File.rm(beam)
_ = :code.purge(module)
_ = :code.delete(module)
{rest, HashSet.put(stale, module), HashSet.put(removed, source)}
# If I have a runtime time dependency on something stale,
# I am stale too.
Enum.any?(runtime, &(&1 in stale)) ->
{[entry|rest], HashSet.put(stale, module), removed}
# Otherwise, we don't store it anywhere
true ->
{[entry|rest], stale, removed}
end
end
## Manifest handling
defp read_manifest(manifest) do
case :file.consult(manifest) do
{:ok, [{:version, @manifest_vsn}|t]} -> t
{:error, _} -> []
end
end
defp parse_manifest(manifest, keep_paths) do
Enum.reduce read_manifest(manifest), {[], []}, fn
{_, _, source, _, _, _, _} = entry, {keep, skip} ->
if String.starts_with?(source, keep_paths) do
{[entry|keep], skip}
else
{keep, [entry|skip]}
end
end
end
defp write_manifest(_manifest, []) do
:ok
end
defp write_manifest(manifest, entries) do
File.mkdir_p!(Path.dirname(manifest))
File.open!(manifest, [:write], fn device ->
:io.format(device, '~p.~n', [{:version, @manifest_vsn}])
Enum.map entries, fn {beam, _, _, _, _, _, binary} = entry ->
if binary, do: File.write!(beam, binary)
:io.format(device, '~p.~n', [put_elem(entry, 6, nil)])
end
:ok
end)
# The Mix.Dep.Lock keeps all the project dependencies. Since Elixir
# is a dependency itself, we need to touch the lock so the current
# Elixir version, used to compile the files above, is properly stored.
Mix.Dep.Lock.touch
end
end
| 29.81405 | 94 | 0.621483 |
031715767f53ca854e014ed04702b50b213dc70b | 424 | exs | Elixir | farmbot_ext/test/farmbot_ext/api_fetcher_test.exs | va2ron1/farmbot_os | c80a38058713adc2ad91a4802664bcfe8da9d96c | [
"MIT"
] | null | null | null | farmbot_ext/test/farmbot_ext/api_fetcher_test.exs | va2ron1/farmbot_os | c80a38058713adc2ad91a4802664bcfe8da9d96c | [
"MIT"
] | null | null | null | farmbot_ext/test/farmbot_ext/api_fetcher_test.exs | va2ron1/farmbot_os | c80a38058713adc2ad91a4802664bcfe8da9d96c | [
"MIT"
] | null | null | null | defmodule FarmbotExt.APIFetcherTest do
require Helpers
use ExUnit.Case, async: false
use Mimic
setup :verify_on_exit!
alias FarmbotExt.APIFetcher
test "client" do
Helpers.use_fake_jwt()
%module{} = APIFetcher.client()
assert module == Tesla.Client
end
test "get_body" do
Helpers.use_fake_jwt()
{status, _message} = APIFetcher.get_body!("/nope")
assert status == :error
end
end
| 18.434783 | 54 | 0.695755 |
03171b2e3f5778a6d18475884225c4a14ad450be | 55 | ex | Elixir | apps/rtc/lib/rtc_web/views/layout_view.ex | michaeljguarino/forge | 50ee583ecb4aad5dee4ef08fce29a8eaed1a0824 | [
"Apache-2.0"
] | 59 | 2021-09-16T19:29:39.000Z | 2022-03-31T20:44:24.000Z | apps/rtc/lib/rtc_web/views/layout_view.ex | svilenkov/plural | ac6c6cc15ac4b66a3b5e32ed4a7bee4d46d1f026 | [
"Apache-2.0"
] | 111 | 2021-08-15T09:56:37.000Z | 2022-03-31T23:59:32.000Z | apps/rtc/lib/rtc_web/views/layout_view.ex | svilenkov/plural | ac6c6cc15ac4b66a3b5e32ed4a7bee4d46d1f026 | [
"Apache-2.0"
] | 4 | 2021-12-13T09:43:01.000Z | 2022-03-29T18:08:44.000Z | defmodule RtcWeb.LayoutView do
use RtcWeb, :view
end
| 13.75 | 30 | 0.781818 |
03171b57f4afcd5493cff11e7b4dfe8f0ba9e646 | 5,740 | exs | Elixir | test/dialyxir/project_test.exs | staring-frog/dialyxir | b78735f75b325238b7db20d9eed22f018cca5f26 | [
"Apache-2.0"
] | 1,455 | 2015-01-03T02:53:19.000Z | 2022-03-12T00:31:25.000Z | test/dialyxir/project_test.exs | staring-frog/dialyxir | b78735f75b325238b7db20d9eed22f018cca5f26 | [
"Apache-2.0"
] | 330 | 2015-05-14T13:53:13.000Z | 2022-03-29T17:12:23.000Z | test/dialyxir/project_test.exs | staring-frog/dialyxir | b78735f75b325238b7db20d9eed22f018cca5f26 | [
"Apache-2.0"
] | 146 | 2015-02-03T18:19:43.000Z | 2022-03-07T10:05:20.000Z | defmodule Dialyxir.ProjectTest do
alias Dialyxir.Project
use ExUnit.Case
import ExUnit.CaptureIO, only: [capture_io: 1, capture_io: 2]
defp in_project(app, f) when is_atom(app) do
Mix.Project.in_project(app, "test/fixtures/#{Atom.to_string(app)}", fn _ -> f.() end)
end
defp in_project(apps, f) when is_list(apps) do
path = Enum.map_join(apps, "/", &Atom.to_string/1)
app = List.last(apps)
Mix.Project.in_project(app, "test/fixtures/#{path}", fn _ -> f.() end)
end
test "Default Project PLT File in _build dir" do
in_project(:default_apps, fn ->
assert Regex.match?(~r/_build\/.*plt/, Project.plt_file())
end)
end
test "Can specify a different local PLT path" do
in_project(:alt_local_path, fn ->
assert Regex.match?(~r/dialyzer\/.*plt/, Project.plt_file())
end)
end
test "Can specify a different PLT file name" do
in_project(:local_plt, fn ->
assert Regex.match?(~r/local\.plt/, Project.plt_file())
end)
end
test "Deprecation warning on use of bare :plt_file" do
in_project(:local_plt, fn ->
out = capture_io(&Project.check_config/0)
assert Regex.match?(~r/.*plt_file.*deprecated.*/, out)
end)
end
test "Can specify a different PLT file name along with :no_warn" do
in_project(:local_plt_no_warn, fn ->
assert Regex.match?(~r/local\.plt/, Project.plt_file())
end)
end
test "No deprecation warning on use of plt_file: {:no_warn, myfile}" do
in_project(:local_plt_no_warn, fn ->
out = capture_io(&Project.check_config/0)
refute Regex.match?(~r/.*plt_path.*deprecated.*/, out)
end)
end
test "App list for default contains direct and
indirect :application dependencies" do
in_project(:default_apps, fn ->
apps = Project.cons_apps()
# direct
assert Enum.member?(apps, :logger)
# direct
assert Enum.member?(apps, :public_key)
# indirect
assert Enum.member?(apps, :asn1)
end)
end
test "App list for umbrella contains child dependencies
indirect :application dependencies" do
in_project(:umbrella, fn ->
apps = Project.cons_apps()
# direct
assert Enum.member?(apps, :logger)
# direct, child1
assert Enum.member?(apps, :public_key)
# indirect
assert Enum.member?(apps, :asn1)
# direct, child2
assert Enum.member?(apps, :mix)
end)
end
@tag :skip
test "App list for umbrella contains all child dependencies
when run from child directory" do
in_project([:umbrella, :apps, :second_one], fn ->
apps = Project.cons_apps()
# direct
assert Enum.member?(apps, :logger)
# direct, child1
assert Enum.member?(apps, :public_key)
# indirect
assert Enum.member?(apps, :asn1)
# direct, child2
assert Enum.member?(apps, :mix)
end)
end
test "App list for :apps_direct contains only direct dependencies" do
in_project(:direct_apps, fn ->
apps = Project.cons_apps()
# direct
assert Enum.member?(apps, :logger)
# direct
assert Enum.member?(apps, :public_key)
# indirect
refute Enum.member?(apps, :asn1)
end)
end
test "App list for :plt_ignore_apps does not contain the ignored dependency" do
in_project(:ignore_apps, fn ->
apps = Project.cons_apps()
refute Enum.member?(apps, :logger)
end)
end
test "Core PLT files located in mix home by default" do
in_project(:default_apps, fn ->
assert String.contains?(Project.erlang_plt(), Mix.Utils.mix_home())
end)
end
test "Core PLT file paths can be specified with :plt_core_path" do
in_project(:alt_core_path, fn ->
assert String.contains?(Project.erlang_plt(), "_build")
end)
end
test "By default core elixir and erlang plts are in mix.home" do
in_project(:default_apps, fn ->
assert String.contains?(Project.erlang_plt(), Mix.Utils.mix_home())
end)
end
test "By default a dialyzer ignore file is nil" do
in_project(:default_apps, fn ->
assert Project.dialyzer_ignore_warnings() == nil
end)
end
test "Filtered dialyzer warnings" do
in_project(:default_apps, fn ->
output_list =
~S"""
project.ex:9 This should still be here
project.ex:9: Guard test is_atom(_@5::#{'__exception__':='true', '__struct__':=_, _=>_}) can never succeed
project.ex:9: Guard test is_binary(_@4::#{'__exception__':='true', '__struct__':=_, _=>_}) can never succeed
"""
|> String.trim_trailing("\n")
|> String.split("\n")
pattern = ~S"""
Guard test is_atom(_@5::#{'__exception__':='true', '__struct__':=_, _=>_}) can never succeed
Guard test is_binary(_@4::#{'__exception__':='true', '__struct__':=_, _=>_}) can never succeed
"""
lines = Project.filter_legacy_warnings(output_list, pattern)
assert lines == ["project.ex:9 This should still be here"]
end)
end
test "Project with non-existent dependency" do
in_project(:nonexistent_deps, fn ->
out = capture_io(:stderr, &Project.cons_apps/0)
assert Regex.match?(~r/Error loading nonexistent, dependency list may be incomplete/, out)
end)
end
test "igonored apps are removed in umbrella projects" do
in_project(:umbrella_ignore_apps, fn ->
refute Enum.member?(Project.cons_apps(), :logger)
end)
end
test "list_unused_filters? works as intended" do
assert Project.list_unused_filters?(list_unused_filters: true)
refute Project.list_unused_filters?(list_unused_filters: nil)
# Override in mix.exs
in_project(:ignore, fn ->
assert Project.list_unused_filters?(list_unused_filters: nil)
end)
end
end
| 30.531915 | 116 | 0.65662 |
03173c56b633495b28d81a9576be7c23bfdee0aa | 23,034 | ex | Elixir | lib/wechat.ex | feng19/elixir_wechat | 5ae052476f27abf85da484b902347042c097b150 | [
"MIT"
] | null | null | null | lib/wechat.ex | feng19/elixir_wechat | 5ae052476f27abf85da484b902347042c097b150 | [
"MIT"
] | null | null | null | lib/wechat.ex | feng19/elixir_wechat | 5ae052476f27abf85da484b902347042c097b150 | [
"MIT"
] | null | null | null | defmodule WeChat do
@moduledoc """
The link to WeChat Official Account Platform API document in [Chinese](https://developers.weixin.qq.com/doc/offiaccount/Getting_Started/Overview.html){:target="_blank"} | [English](https://developers.weixin.qq.com/doc/offiaccount/en/Getting_Started/Overview.html){:target="_blank"}.
Currently, there are two ways to use the WeChat's APIs:
* As `common` application, directly integrates WeChat's APIs after turn on your WeChat Official Account into the developer mode ([see details](https://developers.weixin.qq.com/doc/offiaccount/en/Basic_Information/Access_Overview.html){:target="_blank"});
* As `component` application, authorizes your WeChat Official Account to the WeChat Official Account third-party platform application, leverages a set of common solutions from the third-party platform ([see details](https://developers.weixin.qq.com/doc/oplatform/en/Third-party_Platforms/Third_party_platform_appid.html){:target="_blank"}).
Refer the official document's recommend to manage access token ([see details](https://developers.weixin.qq.com/doc/offiaccount/en/Basic_Information/Get_access_token.html){:target="_blank"}), we need to
temporarily storage access token in a centralization way, we prepare four behaviours to manage the minimum responsibilities for each use case.
Use this library in the 3rd-party web app which can read the temporary storage data (e.g. access token/jsapi-ticket/card-ticket) from the centralization nodes(hereinafter "hub"):
* The `WeChat.Storage.Client` storage adapter behaviour is required for the `common` application;
* The `WeChat.Storage.ComponentClient` storage adapter behaviour is required for the `component` application.
Use this library in the hub web app:
* The `WeChat.Storage.Hub` storage adapter behaviour is required for the `common` application;
* The `WeChat.Storage.ComponentHub` storage adapter behaviour is required for the `component` application.
As usual, the hub web app is one-off setup to use this library, most of time we use `elixir_wechat` is in the 3rd-party web app as a client, so here provide a default storage adapter to conveniently
initialize it as a client use case:
* The `WeChat.Storage.Adapter.DefaultClient` implements `WeChat.Storage.Client` behaviour, and is used for the `common` application by default:
```elixir
defmodule MyClient do
use WeChat,
adapter_storage: {:default, "http://localhost:4000"}
end
#
# the above equals the following
#
defmodule MyClient do
use WeChat,
adapter_storage: {WeChat.Storage.Adapter.DefaultClient, "http://localhost:4000"}
end
```
* The `WeChat.Storage.Adapter.DefaultComponentClient` implements `WeChat.Storage.ComponentClient` behaviour, and is used for the `component` application by default:
```elixir
defmodule MyComponentClient do
use WeChat.Component,
adapter_storage: {:default, "http://localhost:4000"}
end
#
# the above equals the following
#
defmodule MyComponentClient do
use WeChat.Component,
adapter_storage: {WeChat.Storage.Adapter.DefaultComponentClient, "http://localhost:4000"}
end
```
## Usage
### As `common` application
```elixir
defmodule MyClient do
use WeChat,
adapter_storage: {:default, "http://localhost:4000"},
appid: "MyAppID"
end
MyClient.request(:post, url: "WeChatURL1", body: %{}, query: [])
MyClient.request(:get, url: "WeChatURL2", query: [])
```
Or use `WeChat.request/2` directly
```elixir
WeChat.request(:post, url: "WeChatURL1",
appid: "MyAppID", adapter_storage: {:default, "http://localhost:4000"},
body: %{}, query: [])
WeChat.request(:get, url: "WeChatURL2",
appid: "MyAppID", adapter_storage: {:default, "http://localhost:4000"},
query: [])
```
### As `component` application
```elixir
defmodule MyComponentClient do
use WeChat.Component,
adapter_storage: {:default, "http://localhost:4000"},
appid: "MyAppID",
authorizer_appid: "MyAuthorizerAppID"
end
MyComponentClient.request(:post, url: "WeChatURL1", body: %{}, query: [])
MyComponentClient.request(:post, url: "WeChatURL2", query: [])
```
Or use `WeChat.request/2` directly
```elixir
WeChat.request(:post, url: "WeChatURL1",
appid: "MyAppID", authorizer_appid: "MyAuthorizerAppID",
adapter_storage: {:default, "http://localhost:4000"}, body: %{}, query: [])
WeChat.request(:get, url: "WeChatURL2",
appid: "MyAppID", authorizer_appid: "MyAuthorizerAppID",
adapter_storage: {:default, "http://localhost:4000"}, query: [])
```
"""
alias WeChat.{Http, Utils}
@type method :: :head | :get | :delete | :trace | :options | :post | :put | :patch
@type error :: atom() | WeChat.Error.t()
defmacro __using__(opts \\ []) do
default_opts =
opts
|> Macro.prewalk(&Macro.expand(&1, __CALLER__))
|> Keyword.take([:adapter_storage, :appid, :authorizer_appid, :scenario])
quote do
def default_opts, do: unquote(default_opts)
@doc """
See WeChat.request/2 for more information.
"""
@spec request(method :: WeChat.method(), options :: Keyword.t()) ::
{:ok, term()} | {:error, WeChat.error()}
def request(method, options) do
options = WeChat.Utils.merge_keyword(options, unquote(default_opts))
WeChat.common_request(method, options)
end
@doc """
The expire time (in seconds) to `access_token` and `ticket` temporary storage,
by default it is 7200 seconds
"""
defdelegate expires_in(), to: WeChat
defoverridable request: 2, expires_in: 0
end
end
defmodule Error do
@moduledoc """
A WeChat error expression.
"""
@type t :: %__MODULE__{
errcode: String.t(),
reason: String.t(),
message: String.t(),
http_status: integer()
}
@derive {Jason.Encoder, only: [:errcode, :message, :reason, :http_status]}
defexception errcode: nil, message: nil, reason: nil, http_status: nil
def message(%__MODULE__{
errcode: errcode,
message: message,
reason: reason,
http_status: http_status
}) do
"errcode: #{inspect(errcode)}, message: #{inspect(message)}, reason: #{inspect(reason)}, http_status: #{
inspect(http_status)
}"
end
end
defmodule Request do
@moduledoc false
@type body :: {:form, map()} | map()
@type t :: %__MODULE__{
method: atom(),
uri: URI.t(),
appid: String.t(),
authorizer_appid: String.t(),
adapter_storage: module(),
body: body(),
query: keyword(),
opts: keyword(),
access_token: String.t(),
scenario: :hub | nil
}
defstruct [
:method,
:uri,
:appid,
:authorizer_appid,
:adapter_storage,
:body,
:query,
:opts,
:access_token,
:scenario
]
end
defmodule Token do
@moduledoc false
@type t :: %__MODULE__{
access_token: String.t(),
refresh_token: String.t(),
timestamp: integer(),
expires_in: integer()
}
@derive Jason.Encoder
defstruct [:access_token, :refresh_token, :timestamp, :expires_in]
end
defmodule Ticket do
@moduledoc false
@type t :: %__MODULE__{
value: String.t(),
type: String.t(),
timestamp: integer(),
expires_in: integer()
}
@derive Jason.Encoder
defstruct [:value, :type, :timestamp, :expires_in]
end
defmodule UploadMedia do
@moduledoc """
Use for upload media file related.
"""
@type t :: %__MODULE__{
file_path: String.t(),
type:
{:image, String.t()}
| {:voice, String.t()}
| {:video, String.t()}
| {:thumb, String.t()}
}
@enforce_keys [:file_path, :type]
defstruct [:file_path, :type]
end
defmodule UploadMediaContent do
@moduledoc """
Use for upload media file content related.
"""
@type t :: %__MODULE__{
file_content: binary(),
file_name: String.t(),
type:
{:image, String.t()}
| {:voice, String.t()}
| {:video, String.t()}
| {:thumb, String.t()}
}
@enforce_keys [:file_content, :file_name, :type]
defstruct [:file_content, :file_name, :type]
end
defmodule JSSDKSignature do
@moduledoc """
A WeChat JSSDK signature expression.
"""
@type t :: %__MODULE__{
value: String.t(),
timestamp: integer(),
noncestr: String.t()
}
defstruct [:value, :timestamp, :noncestr]
end
defmodule CardSignature do
@moduledoc """
A WeChat Card signature expression.
"""
@type t :: %__MODULE__{
value: String.t(),
timestamp: integer(),
noncestr: String.t()
}
defstruct [:value, :timestamp, :noncestr]
end
@doc """
To configure and load WeChat JSSDK in the target page's url properly, use `jsapi_ticket` and `url` to generate a signature for this scenario.
"""
@spec sign_jssdk(jsapi_ticket :: String.t(), url :: String.t()) :: JSSDKSignature.t()
defdelegate sign_jssdk(jsapi_ticket, url), to: Utils
@doc """
See `WeChat.sign_card/1`.
"""
@spec sign_card(wxcard_ticket :: String.t(), card_id :: String.t()) :: CardSignature.t()
defdelegate sign_card(wxcard_ticket, card_id), to: Utils
@doc """
See `WeChat.sign_card/1`.
"""
@spec sign_card(wxcard_ticket :: String.t(), card_id :: String.t(), openid :: String.t()) ::
CardSignature.t()
defdelegate sign_card(wxcard_ticket, card_id, openid), to: Utils
@doc """
To initialize WeChat Card functions in JSSDK, use `wxcard_ticket` and `card_id` to generate a signature for this scenario,
[see official document](https://developers.weixin.qq.com/doc/offiaccount/OA_Web_Apps/JS-SDK.html#65){:target="_blank"}.
"""
@spec sign_card(list :: [String.t()]) :: CardSignature.t()
defdelegate sign_card(list), to: Utils
@doc """
Call WeChat's HTTP API functions in a explicit way.
We can defined a global module to assemble `appid`, `authorizer_appid`(only used for "component" application), and `adapter_storage`.
For example:
```
defmodule MyClient do
use WeChat,
appid: "...",
adapter_storage: "..."
end
```
```
defmodule MyComponentClient do
use WeChat.Component,
appid: "...",
authorizer_appid: "...",
adapter_storage: "..."
end
```
And then we can use `MyClient` or `MyComponentClient` to call `request/2`, as usual, there dose NOT need to pass the above parameters when invoke, but if needed you
input them to override.
We can directly use `WeChat.request/2` as well, in this way, the `appid`, `authorizer_appid`(only used for "component" application), and `adapter_storage` are required
for each invoke.
The `method` parameter can be used as one of `t:method/0`.
## Options
- `:appid`, required, if you use a global module to assemble it, this value is optional. If you are using a `common` application, `appid` means the application id of your
WeChat Official Account; if you are a `component` application, `appid` means the application id of your WeChat Official Account third-party platform application.
- `:authorizer_appid`, optional, if you are using a `component` application, this value is required, the application id of your WeChat Official Account third-party platform
application.
- `:adapter_storage`, required, the predefined storage way to used for `access_token`, `jsapi_ticket`, and `card_ticket`, here provide a `{:default, "MyHubURL"}` as option.
- `:url`, required, the URL to call WeChat's HTTP API function, for example, "/cgi-bin/material/get_materialcount", also you can input a completed URL like
this "https://api.weixin.qq.com/cgi-bin/material/get_materialcount".
- `:host`, optional, the host of URI to call WeChat's HTTP API function, if you input a completed URL with host, this value is optional, by default it is "api.weixin.qq.com".
- `:scheme`, optional, the scheme of URI to call WeChat's HTTP API function, if you input a completed URL with scheme, this value is optional, by default it is "https".
- `:port`, optional, the port of URI to call WeChat's HTTP API function, if you input a completed URL with port, this value is optional, by default it is "443"(as integer).
- `:body`, optional, a map, decided by your used WeChat's HTTP API functions, following WeChat official document to setup the body of the request.
- `:query`, optional, a keyword, decided by your used WeChat's HTTP API functions, following WeChat official document to setup the query string of the request, this library will
automatically appended a proper `access_token` into the query of each request, so we do NOT need to input `access_token` parameter.
- `:opts`, optional, custom, per-request middleware or adapter options (exported from `Tesla`)
"""
@spec request(method :: method(), options :: Keyword.t()) ::
{:ok, term()} | {:error, WeChat.Error.t()}
def request(method, options) do
method
|> prepare_request(options)
|> check_adapter_storage(:all)
|> setup_httpclient()
|> send_request()
end
@doc false
def common_request(method, options) do
method
|> prepare_request(options)
|> check_adapter_storage(:common)
|> setup_httpclient()
|> send_request()
end
@doc false
def component_request(method, options) do
method
|> prepare_request(options)
|> check_adapter_storage(:component)
|> setup_httpclient()
|> send_request()
end
@doc """
The expire time (in seconds) to `access_token` and `ticket` temporary storage,
by default it is 7200 seconds.
For hub scenario, both `common` and `component` application can override this function in the defined
basic module if needed, and then can use this function as a global setting to use in `access_token` and `ticket`
life cycle management, for example:
```
defmodule MyHubComponentClient do
use WeChat.Component,
scenario: :hub,
adapter_storage: MyComponentLocalStorage
def expires_in(), do: 7000
end
```
```
defmodule MyHubCommonClient do
use WeChat,
scenario: :hub,
adapter_storage: MyCommonLocalStorage
def expires_in(), do: 7000
end
```
For client scenario, no need to use this function, the local registry for `access_token` and `ticket` will
use hub's response(contain time related) when fetch/refresh `access_token` and `ticket` to manage them as a
client side temporary cache.
"""
@spec expires_in() :: integer()
def expires_in(), do: 7200
@doc """
A function helper to fetch `common` application's access token.
When apply it to hub, if no available access token from hub's storage, there will use
the set account's `secret_key` to refresh a new one.
"""
def fetch_access_token(appid, adapter_storage) when is_atom(adapter_storage) do
fetch_access_token(appid, {adapter_storage, nil})
end
def fetch_access_token(appid, {adapter_storage, args}) do
token = adapter_storage.fetch_access_token(appid, args)
case token do
{:ok, %WeChat.Token{access_token: access_token}} when access_token != nil ->
token
_ ->
refetch_access_token(appid, adapter_storage, args)
end
end
defp prepare_request(method, options) do
uri = Utils.parse_uri(options[:url], Keyword.take(options, [:host, :scheme, :port]))
query = options[:query] || []
appid = options[:appid] || Keyword.get(query, :appid)
%Request{
method: check_method_opt(method),
uri: uri,
appid: appid,
authorizer_appid: options[:authorizer_appid],
body: options[:body],
query: query,
opts: options[:opts],
adapter_storage: options[:adapter_storage],
scenario: options[:scenario]
}
end
defp setup_httpclient(%Request{uri: %URI{path: path}}) when path == "" or path == nil do
raise %WeChat.Error{reason: "invalid_request", message: "url is required"}
end
defp setup_httpclient(%Request{uri: %URI{path: "/cgi-bin/component" <> _}} = request) do
{Http.component_client(request), request}
end
defp setup_httpclient(%Request{uri: %URI{path: "cgi-bin/component" <> _}} = request) do
{Http.component_client(request), request}
end
defp setup_httpclient(%Request{uri: %URI{path: "/sns/oauth2/component/" <> _}} = request) do
{Http.component_client(request), request}
end
defp setup_httpclient(%Request{uri: %URI{path: "sns/oauth2/component/" <> _}} = request) do
{Http.component_client(request), request}
end
defp setup_httpclient(%Request{uri: %URI{path: "/sns/component/" <> _}} = request) do
{Http.component_client(request), request}
end
defp setup_httpclient(%Request{uri: %URI{path: "sns/component/" <> _}} = request) do
{Http.component_client(request), request}
end
defp setup_httpclient(request) do
{Http.client(request), request}
end
defp send_request({client, request}) do
options = [
method: request.method,
url: URI.to_string(request.uri),
query: request.query,
body: request.body,
opts: request.opts
]
Http.request(client, options)
end
defp ensure_implements(module, available_adapter_storage_behaviours)
when is_list(available_adapter_storage_behaviours) do
matched =
module.__info__(:attributes)
|> Keyword.get(:behaviour, [])
|> Enum.count(fn behaviour ->
Enum.member?(available_adapter_storage_behaviours, behaviour)
end)
if matched != 1 do
raise %WeChat.Error{
reason: "invalid_config",
message:
"please ensure module: #{inspect(module)} implemented one of #{
inspect(available_adapter_storage_behaviours)
} adapter storage behaviour"
}
end
end
defp check_adapter_storage(request, :all) do
adapter_storage = do_check_adapter_storage(request.adapter_storage, :all)
Map.put(request, :adapter_storage, adapter_storage)
end
defp check_adapter_storage(request, :common) do
adapter_storage = do_check_adapter_storage(request.adapter_storage, :common)
Map.put(request, :adapter_storage, adapter_storage)
end
defp check_adapter_storage(request, :component) do
adapter_storage = do_check_adapter_storage(request.adapter_storage, :component)
Map.put(request, :adapter_storage, adapter_storage)
end
defp do_check_adapter_storage({adapter_storage, args}, :all) when is_atom(adapter_storage) do
ensure_implements(
adapter_storage,
[
WeChat.Storage.Client,
WeChat.Storage.Hub,
WeChat.Storage.ComponentClient,
WeChat.Storage.ComponentHub
]
)
{adapter_storage, args}
end
defp do_check_adapter_storage(adapter_storage, :all) when is_atom(adapter_storage) do
ensure_implements(
adapter_storage,
[
WeChat.Storage.Client,
WeChat.Storage.Hub,
WeChat.Storage.ComponentClient,
WeChat.Storage.ComponentHub
]
)
{adapter_storage, []}
end
defp do_check_adapter_storage(invalid, :all) do
raise %WeChat.Error{
reason: "invalid_config",
message:
"using unexpected #{inspect(invalid)} adapter storage, please use it as one of [`WeChat.Storage.Client`, `WeChat.Storage.Hub`, `WeChat.Storage.ComponentClient`, `WeChat.Storage.ComponentHub`]"
}
end
defp do_check_adapter_storage({:default, hub_base_url}, :common)
when is_bitstring(hub_base_url) do
{WeChat.Storage.Adapter.DefaultClient, hub_base_url}
end
defp do_check_adapter_storage(adapter_storage, :common) when is_atom(adapter_storage) do
ensure_implements(
adapter_storage,
[
WeChat.Storage.Client,
WeChat.Storage.Hub
]
)
{adapter_storage, []}
end
defp do_check_adapter_storage({adapter_storage, args}, :common)
when is_atom(adapter_storage) do
ensure_implements(
adapter_storage,
[
WeChat.Storage.Client,
WeChat.Storage.Hub
]
)
{adapter_storage, args}
end
defp do_check_adapter_storage(invalid, :common) do
raise %WeChat.Error{
reason: "invalid_config",
message:
"using unexpected #{inspect(invalid)} adapter storage, please use it as `WeChat.Storage.Client` or `WeChat.Storage.Hub`"
}
end
defp do_check_adapter_storage({:default, hub_base_url}, :component)
when is_bitstring(hub_base_url) do
{WeChat.Storage.Adapter.DefaultComponentClient, hub_base_url}
end
defp do_check_adapter_storage(adapter_storage, :component) when is_atom(adapter_storage) do
ensure_implements(
adapter_storage,
[
WeChat.Storage.ComponentClient,
WeChat.Storage.ComponentHub
]
)
{adapter_storage, []}
end
defp do_check_adapter_storage({adapter_storage, args}, :component)
when is_atom(adapter_storage) do
ensure_implements(
adapter_storage,
[
WeChat.Storage.ComponentClient,
WeChat.Storage.ComponentHub
]
)
{adapter_storage, args}
end
defp do_check_adapter_storage(invalid, :component) do
raise %WeChat.Error{
reason: "invalid_config",
message:
"using unexpected #{inspect(invalid)} adapter storage, please use it as `WeChat.Storage.ComponentClient` or `WeChat.Storage.ComponentHub`"
}
end
defp check_method_opt(method)
when method == :head
when method == :get
when method == :delete
when method == :trace
when method == :options
when method == :post
when method == :put
when method == :patch do
method
end
defp check_method_opt(method) do
raise %WeChat.Error{
reason: "invalid_request",
message: "input invalid http method: #{inspect(method)}"
}
end
defp refetch_access_token(appid, adapter_storage, args) do
request_result =
WeChat.request(
:get,
appid: appid,
adapter_storage: {adapter_storage, args},
url: "/cgi-bin/token"
)
case request_result do
{:ok, %{body: %{"access_token" => access_token} = body}} ->
{
:ok,
%WeChat.Token{
access_token: access_token,
timestamp: Map.get(body, "timestamp"),
expires_in: Map.get(body, "expires_in")
}
}
{:ok, response} ->
raise Utils.as_error(response)
{:error, error} ->
raise Utils.as_error(error)
end
end
end
| 32.488011 | 344 | 0.659113 |
031743c54b757284e83130e6477544b2710d4ab6 | 583 | exs | Elixir | test/changelog_web/controllers/news_source_controller_test.exs | PsOverflow/changelog.com | 53f4ecfc39b021c6b8cfcc0fa11f29aff8038a7f | [
"MIT"
] | 1 | 2021-03-14T21:12:49.000Z | 2021-03-14T21:12:49.000Z | test/changelog_web/controllers/news_source_controller_test.exs | PsOverflow/changelog.com | 53f4ecfc39b021c6b8cfcc0fa11f29aff8038a7f | [
"MIT"
] | null | null | null | test/changelog_web/controllers/news_source_controller_test.exs | PsOverflow/changelog.com | 53f4ecfc39b021c6b8cfcc0fa11f29aff8038a7f | [
"MIT"
] | 1 | 2018-10-03T20:55:52.000Z | 2018-10-03T20:55:52.000Z | defmodule ChangelogWeb.NewsSourceControllerTest do
use ChangelogWeb.ConnCase
test "getting the index", %{conn: conn} do
s1 = insert(:news_source)
s2 = insert(:news_source)
insert(:news_item, source: s1)
conn = get(conn, news_source_path(conn, :index))
assert conn.status == 200
assert conn.resp_body =~ s1.name
refute conn.resp_body =~ s2.name
end
test "getting a news source page", %{conn: conn} do
s = insert(:news_source)
conn = get(conn, news_source_path(conn, :show, s.slug))
assert html_response(conn, 200) =~ s.name
end
end
| 29.15 | 59 | 0.684391 |
031759363f092d4aefadf4814a00f12411af7ca8 | 5,845 | ex | Elixir | clients/dataproc/lib/google_api/dataproc/v1/model/policy.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/dataproc/lib/google_api/dataproc/v1/model/policy.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/dataproc/lib/google_api/dataproc/v1/model/policy.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dataproc.V1.Model.Policy do
@moduledoc """
An Identity and Access Management (IAM) policy, which specifies access controls for Google Cloud resources.A Policy is a collection of bindings. A binding binds one or more members to a single role. Members can be user accounts, service accounts, Google groups, and domains (such as G Suite). A role is a named list of permissions; each role can be an IAM predefined role or a user-created custom role.For some types of Google Cloud resources, a binding can also specify a condition, which is a logical expression that allows access to a resource only if the expression evaluates to true. A condition can add constraints based on attributes of the request, the resource, or both. To learn which resources support conditions in their IAM policies, see the IAM documentation (https://cloud.google.com/iam/help/conditions/resource-policies).JSON example:
{
"bindings": [
{
"role": "roles/resourcemanager.organizationAdmin",
"members": [
"user:[email protected]",
"group:[email protected]",
"domain:google.com",
"serviceAccount:[email protected]"
]
},
{
"role": "roles/resourcemanager.organizationViewer",
"members": [
"user:[email protected]"
],
"condition": {
"title": "expirable access",
"description": "Does not grant access after Sep 2020",
"expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')",
}
}
],
"etag": "BwWWja0YfJA=",
"version": 3
}
YAML example:
bindings:
- members:
- user:[email protected]
- group:[email protected]
- domain:google.com
- serviceAccount:[email protected]
role: roles/resourcemanager.organizationAdmin
- members:
- user:[email protected]
role: roles/resourcemanager.organizationViewer
condition:
title: expirable access
description: Does not grant access after Sep 2020
expression: request.time < timestamp('2020-10-01T00:00:00.000Z')
- etag: BwWWja0YfJA=
- version: 3
For a description of IAM and its features, see the IAM documentation (https://cloud.google.com/iam/docs/).
## Attributes
* `bindings` (*type:* `list(GoogleApi.Dataproc.V1.Model.Binding.t)`, *default:* `nil`) - Associates a list of members to a role. Optionally, may specify a condition that determines how and when the bindings are applied. Each of the bindings must contain at least one member.
* `etag` (*type:* `String.t`, *default:* `nil`) - etag is used for optimistic concurrency control as a way to help prevent simultaneous updates of a policy from overwriting each other. It is strongly suggested that systems make use of the etag in the read-modify-write cycle to perform policy updates in order to avoid race conditions: An etag is returned in the response to getIamPolicy, and systems are expected to put that etag in the request to setIamPolicy to ensure that their change will be applied to the same version of the policy.Important: If you use IAM Conditions, you must include the etag field whenever you call setIamPolicy. If you omit this field, then IAM allows you to overwrite a version 3 policy with a version 1 policy, and all of the conditions in the version 3 policy are lost.
* `version` (*type:* `integer()`, *default:* `nil`) - Specifies the format of the policy.Valid values are 0, 1, and 3. Requests that specify an invalid value are rejected.Any operation that affects conditional role bindings must specify version 3. This requirement applies to the following operations:
Getting a policy that includes a conditional role binding
Adding a conditional role binding to a policy
Changing a conditional role binding in a policy
Removing any role binding, with or without a condition, from a policy that includes conditionsImportant: If you use IAM Conditions, you must include the etag field whenever you call setIamPolicy. If you omit this field, then IAM allows you to overwrite a version 3 policy with a version 1 policy, and all of the conditions in the version 3 policy are lost.If a policy does not include any conditions, operations on that policy may specify any valid version or leave the field unset.To learn which resources support conditions in their IAM policies, see the IAM documentation (https://cloud.google.com/iam/help/conditions/resource-policies).
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:bindings => list(GoogleApi.Dataproc.V1.Model.Binding.t()),
:etag => String.t(),
:version => integer()
}
field(:bindings, as: GoogleApi.Dataproc.V1.Model.Binding, type: :list)
field(:etag)
field(:version)
end
defimpl Poison.Decoder, for: GoogleApi.Dataproc.V1.Model.Policy do
def decode(value, options) do
GoogleApi.Dataproc.V1.Model.Policy.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Dataproc.V1.Model.Policy do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 57.871287 | 853 | 0.728657 |
03176fdf41160b4e2f8b71aad09fd1a8a7dbc2c4 | 522 | ex | Elixir | lib/mipha/markdown/auto_linker.ex | ZPVIP/mipha | a7df054f72eec7de88b60d94c501488375bdff6a | [
"MIT"
] | 156 | 2018-06-01T19:52:32.000Z | 2022-02-03T10:58:10.000Z | lib/mipha/markdown/auto_linker.ex | ZPVIP/mipha | a7df054f72eec7de88b60d94c501488375bdff6a | [
"MIT"
] | 139 | 2018-07-10T01:57:23.000Z | 2021-08-02T21:29:24.000Z | lib/mipha/markdown/auto_linker.ex | ZPVIP/mipha | a7df054f72eec7de88b60d94c501488375bdff6a | [
"MIT"
] | 29 | 2018-07-17T08:43:45.000Z | 2021-12-14T13:45:30.000Z | defmodule Mipha.Markdown.AutoLinker do
@moduledoc """
AutoLinker will run on a string and return the string with any naked links
wrapped in an `a` tag with the link as both content and href..
"""
# A RegEx to match any URL that has spaces or newlines on either side of it.
@url_regex ~r{([ \n]+|^)(http|ftp|https)://([\w_-]+(?:(?:\.[\w_-]+)+))([\w.,@?^=%&:/~+#-]*[\w@?^=%&/~+#-])?}
def run(body) do
@url_regex
|> Regex.replace(body, "\\1<a href=\"\\2\://\\3\\4\\5\">\\2://\\3\\4\\5</a>")
end
end
| 34.8 | 110 | 0.565134 |
031778a3cd32fa0d804d330507daa61fe8bfbd35 | 3,107 | exs | Elixir | spec/ecto/infinite_date_range_spec.exs | makkrnic/ecto-infinite-times | 8570b5cffb3f3c6dff449c8a7a90787f47a62a4a | [
"MIT"
] | 1 | 2021-12-03T06:52:35.000Z | 2021-12-03T06:52:35.000Z | spec/ecto/infinite_date_range_spec.exs | makkrnic/ecto-infinite-times | 8570b5cffb3f3c6dff449c8a7a90787f47a62a4a | [
"MIT"
] | null | null | null | spec/ecto/infinite_date_range_spec.exs | makkrnic/ecto-infinite-times | 8570b5cffb3f3c6dff449c8a7a90787f47a62a4a | [
"MIT"
] | null | null | null | defmodule InfiniteTimes.Ecto.InfiniteDateRangeSpec do
use ESpec
alias InfiniteTimes.InfDate
alias InfiniteTimes.Ecto.InfiniteDateRange
describe "cast/1" do
context "when provided with %InfiniteDateRange{}" do
it "casts sucessfully" do
range = InfiniteTimes.InfiniteDateRange.new(InfDate.new(~D[2018-01-05]), InfDate.new(~D[2018-02-05]))
range
|> InfiniteDateRange.cast()
|> should(match_pattern {:ok, ^range})
end
end
context "when provided with valid tuple" do
context "as `%Date{}`'s" do
let :valid_tuples, do: [
{~D[2018-01-05], ~D[2018-02-05]},
{~D[2018-01-05], nil},
{nil, ~D[2018-02-05]},
{nil, nil},
]
it "casts successfully" do
valid_tuples()
|> Enum.each(fn (tuple) ->
tuple
|> InfiniteDateRange.cast()
|> should(match_pattern {:ok, _})
end)
end
end
context "as `%InfDate{}`'s" do
let :valid_tuples, do: [
{~D[2018-01-05], ~D[2018-02-05]},
{~D[2018-01-05], :infinity},
{:neg_infinity, ~D[2018-02-05]},
{:neg_infinity, :infinity},
]
it "casts successfully" do
valid_tuples()
|> Enum.each(fn ({lower, upper}) ->
{InfDate.new(lower), InfDate.new(upper)}
|> InfiniteDateRange.cast()
|> should(match_pattern {:ok, _})
end)
end
end
end
end
describe "load/1" do
let :valid_tuples, do: [
%Postgrex.Range{lower: ~D[2018-01-05], upper: ~D[2018-02-05]},
%Postgrex.Range{lower: ~D[2018-01-05], upper: :infinity},
%Postgrex.Range{lower: ~D[2018-01-05], upper: nil},
%Postgrex.Range{lower: :neg_infinity, upper: ~D[2018-02-05]},
%Postgrex.Range{lower: :nil, upper: ~D[2018-02-05]},
%Postgrex.Range{lower: :neg_infinity, upper: :infinity},
%Postgrex.Range{lower: nil, upper: nil},
]
context "with valid tuples" do
it "returns the InfiniteDateRange" do
valid_tuples()
|> Enum.each(fn (range) ->
range
|> InfiniteDateRange.load()
|> should(match_pattern {:ok, %InfiniteTimes.InfiniteDateRange{}})
end)
end
end
context "with invalid args" do
it "returns :error" do
nil
|> InfiniteDateRange.load()
|> should(eq :error)
end
end
end
describe "dump/1" do
context "when provided with InfiniteDateRange" do
let :valid_tuples, do: [
{~D[2018-01-05], ~D[2018-02-05]},
{~D[2018-01-05], nil},
{nil, ~D[2018-02-05]},
{nil, nil},
]
it "returns {:ok, %Postgrex.Range}" do
valid_tuples()
|> Enum.map(fn (tuple) ->
{:ok, range} = tuple
|> InfiniteDateRange.cast()
range
end)
|> Enum.each(fn (range) ->
range
|> InfiniteDateRange.dump()
|> should(match_pattern {:ok, %Postgrex.Range{}})
end)
end
end
end
end
| 27.990991 | 109 | 0.535565 |
0317a5d6e1dd761a552606b7ec962b5b22675941 | 21,631 | ex | Elixir | lib/sanbase/prices/price.ex | santiment/sanbase2 | 9ef6e2dd1e377744a6d2bba570ea6bd477a1db31 | [
"MIT"
] | 81 | 2017-11-20T01:20:22.000Z | 2022-03-05T12:04:25.000Z | lib/sanbase/prices/price.ex | santiment/sanbase2 | 9ef6e2dd1e377744a6d2bba570ea6bd477a1db31 | [
"MIT"
] | 359 | 2017-10-15T14:40:53.000Z | 2022-01-25T13:34:20.000Z | lib/sanbase/prices/price.ex | santiment/sanbase2 | 9ef6e2dd1e377744a6d2bba570ea6bd477a1db31 | [
"MIT"
] | 16 | 2017-11-19T13:57:40.000Z | 2022-02-07T08:13:02.000Z | defmodule Sanbase.Price do
use Ecto.Schema
use AsyncWith
import Sanbase.Price.SqlQuery
import Sanbase.Utils.Transform,
only: [
maybe_unwrap_ok_value: 1,
maybe_apply_function: 2,
wrap_ok: 1
]
import Sanbase.Metric.Transform,
only: [
maybe_nullify_values: 1,
remove_missing_values: 1,
exec_timeseries_data_query: 2
]
alias Sanbase.Model.Project
alias Sanbase.ClickhouseRepo
@default_source "coinmarketcap"
@supported_sources ["coinmarketcap", "cryptocompare"]
@supported_sources_str Enum.join(@supported_sources, ", ")
@deprecated_sources ["kaiko"]
@metrics [:price_usd, :price_btc, :marketcap_usd, :volume_usd]
@metrics @metrics ++ Enum.map(@metrics, &Atom.to_string/1)
@aggregations Sanbase.Metric.SqlQuery.Helper.aggregations()
@type metric :: String.t() | Atom.t()
@type error :: String.t()
@type slug :: String.t()
@type slugs :: list(slug)
@type interval :: String.t()
@type opts :: Keyword.t()
@type timeseries_data_map :: %{
datetime: DateTime.t(),
slug: slug,
price_usd: float() | nil,
price_btc: float() | nil,
marketcap: float(),
marketcap_usd: float() | nil,
volume: float() | nil,
volume_usd: float() | nil
}
@type timeseries_data_result :: {:ok, list(timeseries_data_map())} | {:error, error()}
@type timeseries_metric_data_map :: %{
datetime: DateTime.t(),
value: float() | nil
}
@type timeseries_metric_data_result ::
{:ok, list(timeseries_metric_data_map())} | {:error, error()}
@type aggregated_metric_timeseries_data_map :: %{
String.t() => float()
}
@type aggregated_metric_timeseries_data_result ::
{:ok, aggregated_metric_timeseries_data_map()} | {:error, error()}
@type aggregated_marketcap_and_volume_map :: %{
slug: slug,
marketcap: float() | nil,
marketcap_usd: float() | nil,
volume: float() | nil,
volume_usd: float() | nil
}
@type aggregated_marketcap_and_volume_result ::
{:ok, list(aggregated_marketcap_and_volume_map())} | {:error, error()}
@type ohlc_map :: %{
open_price_usd: float() | nil,
high_price_usd: float() | nil,
close_price_usd: float() | nil,
low_price_usd: float() | nil
}
@type ohlc_result :: {:ok, ohlc_map()} | {:error, error()}
@type timeseries_ohlc_data_map :: %{
datetime: DateTime.t(),
open_price_usd: float() | nil,
high_price_usd: float() | nil,
close_price_usd: float() | nil,
low_price_usd: float() | nil
}
@type timeseries_ohlc_data_result :: {:ok, list(timeseries_ohlc_data_map())} | {:error, error()}
@type last_record_before_map :: %{
price_usd: float() | nil,
price_btc: float() | nil,
marketcap: float(),
marketcap_usd: float() | nil,
volume: float() | nil,
volume_usd: float() | nil
}
@type last_record_before_result :: {:ok, last_record_before_map()} | {:error, error()}
@type combined_marketcap_and_volume_map :: %{
datetime: DateTime.t(),
marketcap_usd: float(),
marketcap: float(),
volume_usd: float(),
volume: float()
}
@type combined_marketcap_and_volume_result ::
{:ok, list(combined_marketcap_and_volume_map())} | {:error, error()}
@table "asset_prices_v3"
schema @table do
field(:datetime, :naive_datetime, source: :dt)
field(:source, :string)
field(:slug, :string)
field(:price_usd, :float)
field(:price_btc, :float)
field(:marketcap_usd, :float)
field(:volume_usd, :float)
end
def table(), do: @table
@spec changeset(any(), any()) :: no_return()
def changeset(_, _), do: raise("Cannot change the asset_prices table")
@doc ~s"""
Return timeseries data for the given time period where every point consists
of price in USD, price in BTC, marketcap in USD and volume in USD
"""
@spec timeseries_data(slug | list(slug), DateTime.t(), DateTime.t(), interval, opts) ::
timeseries_data_result
def timeseries_data(slug, from, to, interval, opts \\ [])
def timeseries_data("TOTAL_ERC20", from, to, interval, opts) do
# Break here otherwise the Enum.filter/2 will remove all errors and report a wrong result
with {:ok, _source} <- opts_to_source(opts) do
Project.List.erc20_projects_slugs()
|> combined_marketcap_and_volume(from, to, interval, opts)
end
end
def timeseries_data(slug_or_slugs, from, to, interval, opts) do
with {:ok, source} <- opts_to_source(opts) do
aggregation = Keyword.get(opts, :aggregation) || :last
{query, args} =
timeseries_data_query(slug_or_slugs, from, to, interval, source, aggregation)
ClickhouseRepo.query_transform(
query,
args,
fn [timestamp, price_usd, price_btc, marketcap_usd, volume_usd, has_changed] ->
%{
datetime: DateTime.from_unix!(timestamp),
price_usd: price_usd,
price_btc: price_btc,
marketcap_usd: marketcap_usd,
marketcap: marketcap_usd,
volume_usd: volume_usd,
volume: volume_usd,
has_changed: has_changed
}
end
)
|> remove_missing_values()
end
end
@doc ~s"""
Return timeseries data for the given time period where every point consists
of price in USD, price in BTC, marketcap in USD and volume in USD
"""
@spec timeseries_metric_data(
slug | list(slug),
metric,
DateTime.t(),
DateTime.t(),
interval,
opts
) ::
timeseries_metric_data_result
def timeseries_metric_data(slug_or_slugs, metric, from, to, interval, opts \\ [])
# TODO: Use the source
def timeseries_metric_data("TOTAL_ERC20", metric, from, to, interval, opts) do
with {:ok, _source} <- opts_to_source(opts) do
Project.List.erc20_projects_slugs()
|> combined_marketcap_and_volume(from, to, interval, opts)
|> maybe_apply_function(fn result ->
metric = String.to_existing_atom(metric)
result
|> Enum.map(fn %{^metric => value, datetime: datetime} ->
%{datetime: datetime, value: value}
end)
end)
end
end
def timeseries_metric_data(slug_or_slugs, metric, from, to, interval, opts) do
with {:ok, source} <- opts_to_source(opts) do
aggregation = Keyword.get(opts, :aggregation) || :last
{query, args} =
timeseries_metric_data_query(
slug_or_slugs,
metric,
from,
to,
interval,
source,
aggregation
)
exec_timeseries_data_query(query, args)
end
end
def timeseries_metric_data_per_slug(slug_or_slugs, metric, from, to, interval, opts) do
with {:ok, source} <- opts_to_source(opts) do
aggregation = Keyword.get(opts, :aggregation) || :last
{query, args} =
timeseries_metric_data_per_slug_query(
slug_or_slugs,
metric,
from,
to,
interval,
source,
aggregation
)
ClickhouseRepo.query_reduce(query, args, %{}, fn [timestamp, slug, value], acc ->
datetime = DateTime.from_unix!(timestamp)
elem = %{slug: slug, value: value}
Map.update(acc, datetime, [elem], &[elem | &1])
end)
|> maybe_apply_function(fn list ->
list
|> Enum.map(fn {datetime, data} -> %{datetime: datetime, data: data} end)
end)
end
end
@doc ~s"""
Returns aggregated price in USD, price in BTC, marketcap in USD and
volume in USD for the given slugs and time period.
The aggregation can be changed by providing the following keyword parameters:
- :price_aggregation (:avg by default) - control price in USD and BTC aggregation
- :volume_aggregation (:avg by default) - control the volume aggregation
- :marketcap_aggregation (:avg by default) - control the marketcap aggregation
The available aggregations are #{inspect(@aggregations)}
"""
@spec aggregated_timeseries_data(slug | slugs, DateTime.t(), DateTime.t(), opts) ::
{:ok, list(map())} | {:error, String.t()}
def aggregated_timeseries_data(slug_or_slugs, from, to, opts \\ [])
def aggregated_timeseries_data([], _, _, _), do: {:ok, []}
def aggregated_timeseries_data(slug_or_slugs, from, to, opts)
when is_binary(slug_or_slugs) or is_list(slug_or_slugs) do
with {:ok, source} <- opts_to_source(opts) do
slugs = List.wrap(slug_or_slugs)
{query, args} = aggregated_timeseries_data_query(slugs, from, to, source)
ClickhouseRepo.query_transform(query, args, fn
[slug, price_usd, price_btc, marketcap_usd, volume_usd, has_changed] ->
%{
slug: slug,
price_usd: price_usd,
price_btc: price_btc,
marketcap_usd: marketcap_usd,
marketcap: marketcap_usd,
volume_usd: volume_usd,
volume: volume_usd,
has_changed: has_changed
}
end)
|> maybe_nullify_values()
end
end
@doc ~s"""
Return the aggregated data for all slugs for the provided metric in
the given interval
The default aggregation can be overriden by passing the :aggregation
key with as part of the keyword options list.
The supported aggregations are: #{inspect(@aggregations)}
In the success case the result is a map where the slug is the key and the value
is the aggregated metric's value
"""
@spec aggregated_metric_timeseries_data(slug | slugs, metric, DateTime.t(), DateTime.t(), opts) ::
aggregated_metric_timeseries_data_result()
def aggregated_metric_timeseries_data(slug_or_slugs, metric, from, to, opts \\ [])
def aggregated_metric_timeseries_data([], _, _, _, _), do: {:ok, %{}}
def aggregated_metric_timeseries_data(slugs, metric, from, to, opts)
when is_list(slugs) and length(slugs) > 50 do
# Break here otherwise the Enum.filter/2 will remove all errors and report a wrong result
with {:ok, _source} <- opts_to_source(opts) do
result =
Enum.chunk_every(slugs, 50)
|> Sanbase.Parallel.map(
&aggregated_metric_timeseries_data(&1, metric, from, to, opts),
timeout: 25_000,
max_concurrency: 8,
ordered: false
)
|> Enum.filter(&match?({:ok, _}, &1))
|> Enum.map(&elem(&1, 1))
|> Enum.reduce(%{}, &Map.merge(&1, &2))
{:ok, result}
end
end
def aggregated_metric_timeseries_data(slug_or_slugs, metric, from, to, opts)
when metric in @metrics and (is_binary(slug_or_slugs) or is_list(slug_or_slugs)) do
with {:ok, source} <- opts_to_source(opts) do
aggregation = Keyword.get(opts, :aggregation) || :avg
slugs = List.wrap(slug_or_slugs)
{query, args} =
aggregated_metric_timeseries_data_query(slugs, metric, from, to, source, aggregation)
ClickhouseRepo.query_reduce(query, args, %{}, fn
[slug, value, has_changed], acc ->
value = if has_changed == 1, do: value
Map.put(acc, slug, value)
end)
end
end
@doc ~s"""
Return the aggregated marketcap in USD and volume in USD for all slugs in the
given interval.
The default aggregation can be overriden by passing the :volume_aggregation
and/or :marketcap_aggregation keys in the keyword options list
The supported aggregations are: #{inspect(@aggregations)}
"""
@spec aggregated_marketcap_and_volume(slug | slugs, DateTime.t(), DateTime.t(), opts) ::
aggregated_marketcap_and_volume_result()
def aggregated_marketcap_and_volume(slug_or_slugs, from, to, opts \\ [])
def aggregated_marketcap_and_volume([], _, _, _), do: {:ok, %{}}
def aggregated_marketcap_and_volume(slug_or_slugs, from, to, opts)
when is_binary(slug_or_slugs) or is_list(slug_or_slugs) do
with {:ok, source} <- opts_to_source(opts) do
slugs = List.wrap(slug_or_slugs)
{query, args} = aggregated_marketcap_and_volume_query(slugs, from, to, source, opts)
ClickhouseRepo.query_transform(query, args, fn
[slug, marketcap_usd, volume_usd, has_changed] ->
%{
slug: slug,
marketcap_usd: marketcap_usd,
marketcap: marketcap_usd,
volume_usd: volume_usd,
volume: volume_usd,
has_changed: has_changed
}
end)
|> maybe_add_percent_of_total_marketcap()
|> maybe_nullify_values()
end
end
# TODO: Implement `opts`, read and use `:source`
def slugs_by_filter(metric, from, to, operator, threshold, aggregation) do
{query, args} = slugs_by_filter_query(metric, from, to, operator, threshold, aggregation)
ClickhouseRepo.query_transform(query, args, fn [slug, _value] -> slug end)
end
# TODO: Implement `opts`, read and use `:source`
def slugs_order(metric, from, to, direction, aggregation) do
{query, args} = slugs_order_query(metric, from, to, direction, aggregation)
ClickhouseRepo.query_transform(query, args, fn [slug, _value] -> slug end)
end
@doc ~s"""
Return the last record before the given `datetime`
"""
@spec last_record_before(slug, DateTime.t(), opts) ::
last_record_before_result()
def last_record_before(slug, datetime, opts \\ [])
def last_record_before(slug, datetime, opts) do
with {:ok, source} <- opts_to_source(opts) do
{query, args} = last_record_before_query(slug, datetime, source)
ClickhouseRepo.query_transform(
query,
args,
fn [price_usd, price_btc, marketcap_usd, volume_usd] ->
%{
price_usd: price_usd,
price_btc: price_btc,
marketcap_usd: marketcap_usd,
marketcap: marketcap_usd,
volume_usd: volume_usd,
volume: volume_usd
}
end
)
|> maybe_unwrap_ok_value()
end
end
@doc ~s"""
Return open-high-close-low price data in USD for the provided slug
in the given interval.
"""
@spec ohlc(slug, DateTime.t(), DateTime.t(), opts) :: ohlc_result()
def ohlc(slug, from, to, opts \\ []) do
with {:ok, source} <- opts_to_source(opts) do
{query, args} = ohlc_query(slug, from, to, source)
ClickhouseRepo.query_transform(query, args, fn [open, high, low, close, has_changed] ->
%{
open_price_usd: open,
high_price_usd: high,
close_price_usd: close,
low_price_usd: low,
has_changed: has_changed
}
end)
|> maybe_nullify_values()
|> maybe_unwrap_ok_value()
end
end
@doc ~s"""
Return open-high-close-low price data in USD for the provided slug
in the given interval.
"""
@spec timeseries_ohlc_data(slug, DateTime.t(), DateTime.t(), interval, opts) ::
timeseries_ohlc_data_result()
def timeseries_ohlc_data(slug, from, to, interval, opts \\ []) do
with {:ok, source} <- opts_to_source(opts) do
{query, args} = timeseries_ohlc_data_query(slug, from, to, interval, source)
ClickhouseRepo.query_transform(
query,
args,
fn [timestamp, open, high, low, close, has_changed] ->
%{
datetime: DateTime.from_unix!(timestamp),
open_price_usd: open,
high_price_usd: high,
close_price_usd: close,
low_price_usd: low,
has_changed: has_changed
}
end
)
|> remove_missing_values()
end
end
@doc ~s"""
Return the sum of all marketcaps and volums of the slugs in the given interval
"""
@spec combined_marketcap_and_volume(slugs, DateTime.t(), DateTime.t(), interval, opts) ::
combined_marketcap_and_volume_result()
def combined_marketcap_and_volume(slugs, from, to, interval, opts \\ [])
def combined_marketcap_and_volume([], _, _, _, _), do: {:ok, []}
def combined_marketcap_and_volume(slugs, from, to, interval, opts) when length(slugs) > 30 do
# Break here otherwise the Enum.filter/2 will remove all errors and report a wrong result
with {:ok, _source} <- opts_to_source(opts) do
slugs
|> Enum.chunk_every(30)
|> Sanbase.Parallel.map(
fn slugs_chunk ->
cache_key = Enum.sort(slugs_chunk) |> Sanbase.Cache.hash()
Sanbase.Cache.get_or_store({__MODULE__, __ENV__.function, cache_key}, fn ->
combined_marketcap_and_volume(slugs_chunk, from, to, interval, opts)
end)
end,
max_concurrency: 8
)
|> Enum.filter(&match?({:ok, _}, &1))
|> combine_marketcap_and_volume_results()
end
end
def combined_marketcap_and_volume(slug_or_slugs, from, to, interval, opts) do
with {:ok, source} <- opts_to_source(opts) do
slugs = List.wrap(slug_or_slugs)
{query, args} = combined_marketcap_and_volume_query(slugs, from, to, interval, source)
ClickhouseRepo.query_transform(
query,
args,
fn [timestamp, marketcap_usd, volume_usd, has_changed] ->
%{
datetime: DateTime.from_unix!(timestamp),
marketcap_usd: marketcap_usd,
marketcap: marketcap_usd,
volume_usd: volume_usd,
volume: volume_usd,
has_changed: has_changed
}
end
)
|> remove_missing_values()
end
|> maybe_add_percent_of_total_marketcap()
end
def available_slugs(opts \\ [])
def available_slugs(opts) do
with {:ok, source} <- opts_to_source(opts) do
slugs =
Sanbase.Model.Project.List.projects_with_source(source)
|> Enum.map(& &1.slug)
{:ok, slugs}
end
end
def slugs_with_volume_over(volume, opts \\ [])
def slugs_with_volume_over(volume, opts) when is_number(volume) do
with {:ok, source} <- opts_to_source(opts) do
{query, args} = slugs_with_volume_over_query(volume, source)
ClickhouseRepo.query_transform(query, args, fn [slug] -> slug end)
end
end
def has_data?(slug) do
{query, args} = select_any_record_query(slug)
ClickhouseRepo.query_transform(query, args, & &1)
|> case do
{:ok, [_]} -> {:ok, true}
{:ok, []} -> {:ok, false}
{:error, error} -> {:error, error}
end
end
@doc ~s"""
Return the first datetime for which `slug` has data
"""
@spec first_datetime(slug, opts) :: {:ok, DateTime.t()} | {:ok, nil} | {:error, error}
def first_datetime(slug, opts \\ [])
def first_datetime("TOTAL_ERC20", _), do: ~U[2015-07-30 00:00:00Z]
def first_datetime(slug, opts) do
with {:ok, source} <- opts_to_source(opts) do
{query, args} = first_datetime_query(slug, source)
ClickhouseRepo.query_transform(query, args, fn
[timestamp] -> DateTime.from_unix!(timestamp)
end)
|> maybe_unwrap_ok_value()
end
end
def last_datetime_computed_at(slug) do
{query, args} = last_datetime_computed_at_query(slug)
ClickhouseRepo.query_transform(query, args, fn [datetime] ->
DateTime.from_unix!(datetime)
end)
|> maybe_unwrap_ok_value()
end
# Private functions
# Combine 2 price points. If `left` is empty then this is a new/initial price point
defp combine_price_points(left, right) do
%{
marketcap: (left[:marketcap] || 0) + (right[:marketcap] || 0),
marketcap_usd: (left[:marketcap_usd] || 0) + (right[:marketcap_usd] || 0),
volume: (left[:volume] || 0) + (right[:volume] || 0),
volume_usd: (left[:volume_usd] || 0) + (right[:volume_usd] || 0)
}
end
defp update_price_point_in_map(map, price_point) do
%{datetime: datetime} = price_point
initial = combine_price_points(%{}, price_point)
Map.update(map, datetime, initial, fn
%{has_changed: 0} = elem -> elem
elem -> combine_price_points(elem, price_point)
end)
end
defp combine_marketcap_and_volume_results(results) do
results
|> Enum.reduce(%{}, fn {:ok, data}, acc ->
Enum.reduce(data, acc, &update_price_point_in_map(&2, &1))
end)
|> Enum.map(fn {datetime, data} -> Map.put(data, :datetime, datetime) end)
|> Enum.sort_by(& &1.datetime, {:asc, DateTime})
|> wrap_ok()
end
defp maybe_add_percent_of_total_marketcap({:ok, data}) do
total_marketcap_usd =
Enum.reduce(data, 0, fn elem, acc -> acc + (elem.marketcap_usd || 0) end)
result =
Enum.map(
data,
fn %{marketcap_usd: marketcap_usd} = elem ->
marketcap_percent =
Sanbase.Math.percent_of(marketcap_usd, total_marketcap_usd,
type: :between_0_and_1,
precision: 5
)
Map.put(elem, :marketcap_percent, marketcap_percent)
end
)
{:ok, result}
end
defp maybe_add_percent_of_total_marketcap({:error, error}), do: {:error, error}
defp opts_to_source(opts) do
case Keyword.get(opts, :source, @default_source) do
source when source in @supported_sources ->
{:ok, source}
source when source in @deprecated_sources ->
{:error,
"Price related data source #{inspect(source)} is deprecated. Supported price related sources are: #{@supported_sources_str}"}
source ->
{:error,
"Price related data source #{inspect(source)} is not supported. Supported price related sources are: #{@supported_sources_str}"}
end
end
end
| 32.625943 | 137 | 0.633628 |
0317bd76d2a7e37d86187882c806d7129099f5ee | 1,136 | ex | Elixir | deps/absinthe/lib/absinthe/type/custom/decimal.ex | JoakimEskils/elixir-absinthe | d81e24ec7c7b1164e6d152101dd50422f192d7e9 | [
"MIT"
] | 3 | 2017-06-22T16:33:58.000Z | 2021-07-07T15:21:09.000Z | lib/absinthe/type/custom/decimal.ex | bruce/absinthe | 19b63d3aaa9fb75aad01ffd5e91d89e0b30d7f91 | [
"MIT"
] | null | null | null | lib/absinthe/type/custom/decimal.ex | bruce/absinthe | 19b63d3aaa9fb75aad01ffd5e91d89e0b30d7f91 | [
"MIT"
] | null | null | null | if Code.ensure_loaded?(Decimal) do
defmodule Absinthe.Type.Custom.Decimal do
@moduledoc false
defdelegate serialize(value), to: Decimal, as: :to_string
@spec parse(any) :: {:ok, Decimal.t} | :error
@spec parse(Absinthe.Blueprint.Input.Null.t) :: {:ok, nil}
def parse(%Absinthe.Blueprint.Input.String{value: value}) do
case Decimal.parse(value) do
{:ok, decimal} -> {:ok, decimal}
_ -> :error
end
end
def parse(%Absinthe.Blueprint.Input.Float{value: value}) do
decimal = Decimal.new(value)
if Decimal.nan?(decimal), do: :error, else: {:ok, decimal}
end
def parse(%Absinthe.Blueprint.Input.Integer{value: value}) do
decimal = Decimal.new(value)
if Decimal.nan?(decimal), do: :error, else: {:ok, decimal}
end
def parse(%Absinthe.Blueprint.Input.Null{}) do
{:ok, nil}
end
def parse(_) do
:error
end
end
else
defmodule Absinthe.Type.Custom.Decimal do
@moduledoc false
@spec parse(any) :: :error
def parse(_), do: :error
@spec serialize(any) :: nil
def serialize(_), do: nil
end
end
| 24.170213 | 65 | 0.626761 |
03181695c7a493ceb8e82545a77ad4d5d4cf20a7 | 129 | exs | Elixir | apps/feedback/config/dev.exs | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 42 | 2019-05-29T16:05:30.000Z | 2021-08-09T16:03:37.000Z | apps/feedback/config/dev.exs | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 872 | 2019-05-29T17:55:50.000Z | 2022-03-30T09:28:43.000Z | apps/feedback/config/dev.exs | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 12 | 2019-07-01T18:33:21.000Z | 2022-03-10T02:13:57.000Z | use Mix.Config
config :feedback,
exaws_config_fn: &Feedback.MockAws.config/1,
exaws_perform_fn: &Feedback.MockAws.perform/2
| 21.5 | 47 | 0.790698 |
031822b177418d3ba574c797a8b7671b7a9d423f | 1,088 | exs | Elixir | exercises/collatz-conjecture/test/collatz_conjecture_test.exs | DuoPan/elixir | e96388f242c383c1f45935570ed2f42394171fc6 | [
"MIT"
] | null | null | null | exercises/collatz-conjecture/test/collatz_conjecture_test.exs | DuoPan/elixir | e96388f242c383c1f45935570ed2f42394171fc6 | [
"MIT"
] | null | null | null | exercises/collatz-conjecture/test/collatz_conjecture_test.exs | DuoPan/elixir | e96388f242c383c1f45935570ed2f42394171fc6 | [
"MIT"
] | 1 | 2021-03-15T11:02:40.000Z | 2021-03-15T11:02:40.000Z | defmodule CollatzConjectureTest do
use ExUnit.Case
test "zero steps for one" do
assert CollatzConjecture.calc(1) == 0
end
# @tag :pending
test "zero is an error" do
assert_raise FunctionClauseError, fn -> CollatzConjecture.calc(0) end
end
# @tag :pending
test "divide if even" do
assert CollatzConjecture.calc(16) == 4
end
# @tag :pending
test "even and odd steps" do
assert CollatzConjecture.calc(12) == 9
end
# @tag :pending
test "Large number of even and odd steps" do
assert CollatzConjecture.calc(1_000_000) == 152
end
# @tag :pending
test "start with odd step" do
assert CollatzConjecture.calc(21) == 7
end
# @tag :pending
test "more steps than starting number" do
assert CollatzConjecture.calc(7) == 16
end
# @tag :pending
test "negative value is an error " do
assert_raise FunctionClauseError, fn -> CollatzConjecture.calc(-15) end
end
# @tag :pending
test "string as input value is an error " do
assert_raise FunctionClauseError, fn -> CollatzConjecture.calc("fubar") end
end
end
| 22.666667 | 79 | 0.690257 |
0318315e881288a04c653b16e6ae2801f987e8f3 | 1,818 | exs | Elixir | apps/mcts/test/mcts/zipper_test.exs | rjdellecese/connect-four | b3ab59cffde8bed83382881a2cae567b0e383cc8 | [
"MIT"
] | 3 | 2020-05-03T04:35:05.000Z | 2020-09-06T19:19:47.000Z | apps/mcts/test/mcts/zipper_test.exs | rjdellecese/connect-four | b3ab59cffde8bed83382881a2cae567b0e383cc8 | [
"MIT"
] | 6 | 2019-09-02T13:36:55.000Z | 2021-11-19T10:13:38.000Z | apps/mcts/test/mcts/zipper_test.exs | rjdellecese/connect_four_umbrella | b3ab59cffde8bed83382881a2cae567b0e383cc8 | [
"MIT"
] | null | null | null | defmodule MCTS.ZipperTest do
use ExUnit.Case, async: true
doctest MCTS.Zipper
alias MCTS.{Node, Zipper}
setup do
####################
# Structure of tree:
# 1
# / \
# 2 3
# / /|\
# 4 5 6 7
####################
%{
tree: %Node{
payload: 1,
children: [
%Node{
payload: 2,
children: [
%Node{payload: 4, children: []}
]
},
%Node{
payload: 3,
children: [
%Node{payload: 5, children: []},
%Node{payload: 6, children: []},
%Node{payload: 7, children: []}
]
}
]
}
}
end
test "traversing down and up and down works as expected", %{tree: tree} do
zipper = %Zipper{focus: tree}
assert zipper.focus.payload == 1
zipper = Zipper.down(zipper, 0)
assert zipper.focus.payload == 2
zipper = Zipper.up(zipper)
assert zipper.focus.payload == 1
zipper = Zipper.down(zipper, 1)
assert zipper.focus.payload == 3
zipper = Zipper.down(zipper, 1)
assert zipper.focus.payload == 6
end
describe "down/2" do
test "raises an error when the focus has no children", %{tree: tree} do
zipper =
%Zipper{focus: tree}
|> Zipper.down(0)
|> Zipper.down(0)
assert_raise RuntimeError, "focus node has no children", fn ->
Zipper.down(zipper, 0)
end
end
test "raises an ArgumentError when there is no child at the given index", %{tree: tree} do
zipper =
%Zipper{focus: tree}
|> Zipper.down(1)
assert_raise ArgumentError, "no child node at index: 3 (index may not be negative)", fn ->
Zipper.down(zipper, 3)
end
end
end
end
| 24.24 | 96 | 0.511001 |
03183da0b42b151434f08041ea6e01a9b9ef46af | 2,233 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/machine_types_scoped_list_warning.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/compute/lib/google_api/compute/v1/model/machine_types_scoped_list_warning.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/machine_types_scoped_list_warning.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Compute.V1.Model.MachineTypesScopedListWarning do
@moduledoc """
[Output Only] An informational warning that appears when the machine types list is empty.
## Attributes
* `code` (*type:* `String.t`, *default:* `nil`) - [Output Only] A warning code, if applicable. For example, Compute Engine returns NO_RESULTS_ON_PAGE if there are no results in the response.
* `data` (*type:* `list(GoogleApi.Compute.V1.Model.MachineTypesScopedListWarningData.t)`, *default:* `nil`) - [Output Only] Metadata about this warning in key: value format. For example:
"data": [ { "key": "scope", "value": "zones/us-east1-d" }
* `message` (*type:* `String.t`, *default:* `nil`) - [Output Only] A human-readable description of the warning code.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:code => String.t(),
:data => list(GoogleApi.Compute.V1.Model.MachineTypesScopedListWarningData.t()),
:message => String.t()
}
field(:code)
field(:data, as: GoogleApi.Compute.V1.Model.MachineTypesScopedListWarningData, type: :list)
field(:message)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.MachineTypesScopedListWarning do
def decode(value, options) do
GoogleApi.Compute.V1.Model.MachineTypesScopedListWarning.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.MachineTypesScopedListWarning do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.351852 | 194 | 0.729512 |
03186a6ee1a6ffb4c906d13adeaf0d6d5829a963 | 820 | ex | Elixir | test/support/conn_case.ex | leolorenzoluis/poll | c1009e0796904414a94ef2e7509f7682d2910abf | [
"Apache-2.0"
] | 1 | 2016-08-05T23:37:29.000Z | 2016-08-05T23:37:29.000Z | test/support/conn_case.ex | leolorenzoluis/poll | c1009e0796904414a94ef2e7509f7682d2910abf | [
"Apache-2.0"
] | null | null | null | test/support/conn_case.ex | leolorenzoluis/poll | c1009e0796904414a94ef2e7509f7682d2910abf | [
"Apache-2.0"
] | null | null | null | defmodule Poll.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
imports other functionality to make it easier
to build and query models.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
import Poll.Router.Helpers
# The default endpoint for testing
@endpoint Poll.Endpoint
end
end
setup tags do
{:ok, conn: Phoenix.ConnTest.conn()}
end
end
| 24.117647 | 56 | 0.717073 |
03186b94deb751c0d2833e46ee48f9fc0ebd43b5 | 2,004 | exs | Elixir | apps/re/test/listings/history/server_test.exs | ruby2elixir/emcasa-backend | 70d7f4f233555417941ffa6ada84cf8740c21dd2 | [
"MIT"
] | 4 | 2019-11-01T16:29:31.000Z | 2020-10-10T21:20:12.000Z | apps/re/test/listings/history/server_test.exs | eduardomartines/emcasa-backend | 70d7f4f233555417941ffa6ada84cf8740c21dd2 | [
"MIT"
] | null | null | null | apps/re/test/listings/history/server_test.exs | eduardomartines/emcasa-backend | 70d7f4f233555417941ffa6ada84cf8740c21dd2 | [
"MIT"
] | 5 | 2019-11-04T21:25:45.000Z | 2020-02-13T23:49:36.000Z | defmodule Re.Listings.History.ServerTest do
use Re.ModelCase
import Re.Factory
alias Re.{
Listing,
Listings.History.Server,
Listings.PriceHistory,
Listings.StatusHistory,
Repo
}
describe "handle_info/2" do
test "save price when it's updated" do
listing = insert(:listing)
changeset = Listing.changeset(listing, %{price: listing.price + 50})
Server.handle_info(
%{
topic: "update_listing",
type: :update,
content: %{
new: listing,
changeset: changeset
}
},
[]
)
assert [_] = Repo.all(PriceHistory)
end
test "do not save price when it's not updated" do
listing = insert(:listing)
changeset = Listing.changeset(listing, %{description: "descr"})
Server.handle_info(
%{
topic: "update_listing",
type: :update,
content: %{
new: listing,
changeset: changeset
}
},
[]
)
assert [] == Repo.all(PriceHistory)
end
test "save status history when it's activated" do
listing = insert(:listing, status: "inactive")
changeset = Listing.changeset(listing, %{status: "active"})
Server.handle_info(
%{
topic: "activate_listing",
type: :update,
content: %{
new: listing,
changeset: changeset
}
},
[]
)
assert [_] = Repo.all(StatusHistory)
end
test "save status history when it's deactivated" do
listing = insert(:listing, status: "active")
changeset = Listing.changeset(listing, %{status: "inactive"})
Server.handle_info(
%{
topic: "deactivate_listing",
type: :update,
content: %{
new: listing,
changeset: changeset
}
},
[]
)
assert [_] = Repo.all(StatusHistory)
end
end
end
| 21.782609 | 74 | 0.530439 |
03187c6a5d4a4e0125a1186705d968b2e3af40f4 | 12,919 | ex | Elixir | lib/ex_admin/index.ex | devonestes/ex_admin | e135ae7c28de78fc87baf519ff8a32da12e8bf66 | [
"MIT"
] | null | null | null | lib/ex_admin/index.ex | devonestes/ex_admin | e135ae7c28de78fc87baf519ff8a32da12e8bf66 | [
"MIT"
] | 1 | 2019-03-02T09:07:47.000Z | 2019-03-02T09:07:47.000Z | lib/ex_admin/index.ex | devonestes/ex_admin | e135ae7c28de78fc87baf519ff8a32da12e8bf66 | [
"MIT"
] | 1 | 2018-12-20T15:01:39.000Z | 2018-12-20T15:01:39.000Z | defmodule ExAdmin.Index do
@moduledoc """
Override the default index page for an ExAdmin resource
By default, ExAdmin renders the index table without any additional
configuration. It renders each column in the model, except the id,
inserted_at, and updated_at columns.
## Default Table Type
ExAdmin displays a selection checkbox column on the left with a batch
action control that enables when a checkbox is selected.
To customize the index page, use the `index` macro.
For example, the following will show on the id an name fields, as
well place a selection column and batch actions row on the page:
defmodule MyProject.ExAdmin.MyModel do
use ExAdmin.Register
register_resource MyProject.MyModel do
index do
selectable_column()
column :id
column :name
actions # display the default actions column
end
end
end
### Image fields
For image fields, use the `image: true` option. For example:
index do
column :name
column :image, [image: true, height: 100], &(ExAdminDemo.Image.url({&1.image, &1}, :thumb))
end
### Custom columns
Columns can be customized with column/2 where the second argument is
an anonymous function called with model. Here are a couple examples:
index do
column :id
column :name, fn(category) ->
Phoenix.HTML.Tag.content_tag :span, category.name,
"data-id": category.id, class: "category"
end
column "Created", fn(category) ->
category.created_at
end
end
### Override the Actions column
The Actions column can be customized by adding `column "Actions", fn(x) -> ...`
column "Actions", fn(r) ->
safe_concat link_to("Restore", "/admin/backuprestores/restore/#\{r.id}", "data-method": :put,
"data-confirm": "You are about to restore #\{r.file_name}. Are you sure?",
class: "member_link restore-link"),
link_to("Delete", "/admin/backuprestores/#\{r.id}", "data-method": :delete,
"data-confirm": "Are you sure you want to delete this?",
class: "member_link")
end
### Associations
By default, ExAdmin will attempt to render a belongs_to association with a
select control, using name field in the association. If you would like to
render an association with another field name, or would like to use more than
one field, use the :field option.
column :account, fields: [:username]
### Change the column label
Use the :label option to override the column name:
column :name, label: "Custom Name"
## As Grid
By providing option `as: :grid` to the `index` macro, a grid index page
is rendered.
### For Example:
index as: :grid, default: true do
cell fn(p) ->
markup do
div do
a href: admin_resource_path(p, :show) do
img(src: ExAdminDemo.Image.url({p.image_file_name, p}, :thumb), height: 100)
end
end
a truncate(p.title), href: admin_resource_path(p, :show)
end
end
end
"""
require Logger
require Integer
import ExAdmin.Utils
import ExAdmin.Helpers
import ExAdmin.Gettext
import Kernel, except: [div: 2, to_string: 1]
use Xain
# alias ExAdmin.Schema
@doc false
defmacro __using__(_) do
quote do
import unquote(__MODULE__)
end
end
@default_actions [:show, :edit, :delete]
@doc """
The index macro is used to customize the index page of a resource.
"""
defmacro index(opts \\ [], do: contents) do
quote location: :keep do
import ExAdmin.CSV, only: [csv: 1, csv: 2]
import ExAdmin.Register
import ExAdmin.Index
def index_view(var!(conn), page, scope_counts) do
import ExAdmin.Form, except: [actions: 1]
import ExAdmin.Register, except: [actions: 1]
import ExAdmin.ViewHelpers
var!(columns, ExAdmin.Show) = []
var!(selectable_column, ExAdmin.Index) = nil
var!(actions, ExAdmin.Index) = nil
var!(cell, ExAdmin.Index) = nil
opts = unquote(opts)
unquote(contents)
selectable =
case Macro.expand(var!(selectable_column, ExAdmin.Index), __ENV__) do
nil -> false
other -> other
end
actions =
ExAdmin.Index.get_index_actions(var!(conn).assigns.defn, var!(actions, ExAdmin.Index))
opts =
Enum.into(opts, %{})
|> Map.put(:column_list, var!(columns, ExAdmin.Show) |> Enum.reverse())
|> Map.put(:selectable_column, selectable)
|> Map.put(:actions, actions)
markup safe: true do
ExAdmin.Index.render_index_pages(
var!(conn),
page,
scope_counts,
var!(cell, ExAdmin.Index),
opts
)
end
end
end
end
@doc false
def get_index_actions(defn, actions) do
actions =
case actions do
[] -> @default_actions
nil -> @default_actions
false -> []
list -> list
end
actions -- @default_actions -- defn.actions
end
@doc """
Define which actions will be displayed in the index view.
## Examples
actions
actions [:new, :delete]
"""
defmacro actions(opts \\ []) do
if opts != nil and opts != false and opts -- @default_actions != [] do
raise ArgumentError, "Only #{inspect(@default_actions)} are allowed!"
end
quote do
var!(actions, ExAdmin.Index) = unquote(opts)
end
end
@doc """
Define a grid cell for grid view.
## Example
index as: :grid, default: true, columns: 6 do
import Kernel, except: [div: 2]
cell fn(p) ->
div ".box" do
div ".box-body" do
a href: admin_resource_path(p, :show) do
img(src: ExAdminDemo.Image.url({p.image_file_name, p}, :thumb), height: 100)
end
end
div ".box-footer" do
a truncate(p.title), href: admin_resource_path(p, :show)
end
end
end
end
"""
defmacro cell(fun) do
quote do
var!(cell, ExAdmin.Index) = unquote(fun)
end
end
@doc """
Add a column of selection check boxes
Allows users to select individual rows on the index page. Selecting
columns activates the batch actions button.
"""
defmacro selectable_column do
quote do
var!(selectable_column, ExAdmin.Index) = true
end
end
@doc false
def default_index_view(conn, page, scope_counts) do
case conn.assigns.defn do
nil ->
throw(:invalid_route)
%{__struct__: _} = defn ->
columns =
case defn.index_filters do
[] ->
[]
[false] ->
[]
[_] ->
ExAdmin.Filter.fields(conn.assigns.defn)
|> Keyword.keys()
end
|> case do
[] ->
defn.resource_model.__schema__(:fields)
|> Enum.filter(&(&1 not in [:inserted_at, :updated_at]))
other ->
other
end
|> Enum.map(&{translate_field(defn, &1), %{}})
columns =
if :id in defn.resource_model.__schema__(:fields) and
Enum.any?(columns, &(elem(&1, 0) == :id)) do
Keyword.put(columns, :id, %{link: true})
else
columns
end
opts =
%{}
|> Map.put(:column_list, columns)
|> Map.put(:selectable_column, true)
|> Map.put(:actions, get_index_actions(defn, []))
markup safe: true do
ExAdmin.Index.render_index_pages(var!(conn), page, scope_counts, nil, opts)
end
end
end
defp get_resource_fields([]), do: []
defp get_resource_fields([resource | _]), do: resource.__struct__.__schema__(:fields)
@doc false
def render_index_pages(conn, page, scope_counts, cell, page_opts) do
# require IEx
# IEx.pry
name = resource_model(conn) |> titleize |> Inflex.pluralize()
defn = conn.assigns.defn
label = get_resource_label(conn) |> Inflex.pluralize()
batch_actions = false not in defn.batch_actions and :delete in page_opts[:actions]
opts = %{
columns: Map.get(page_opts, :columns, 3),
column_list: Map.get(page_opts, :column_list),
count: page.total_entries,
name: name,
order: ExQueb.get_sort_order(conn.params["order"]),
href: admin_resource_path(conn, :index) <> "?order=",
defn: defn,
batch_actions: batch_actions,
scopes: defn.scopes,
label: label,
resource_model: conn.params["resource"],
page: page,
cell: cell,
scope_counts: scope_counts,
opts: page_opts,
resources: page.entries,
selectable_column: page_opts[:selectable_column],
actions: page_opts[:actions]
}
_render_index_page(conn, opts, page_opts)
end
defp _render_index_page(conn, opts, %{as: :grid}) do
Module.concat(conn.assigns.theme, Index).wrap_index_grid(fn ->
Module.concat(conn.assigns.theme, Index).batch_action_form(
conn,
false,
opts[:scopes],
opts[:resource_model],
opts[:scope_counts],
fn ->
if opts[:count] == 0 do
Module.concat(conn.assigns.theme, Index).blank_slate_page(conn, opts)
else
Module.concat(conn.assigns.theme, Index).paginated_collection_grid(conn, opts)
end
end
)
end)
end
defp _render_index_page(conn, opts, page_opts) do
page = opts[:page]
actions = opts[:actions]
opts = Map.put(opts, :fields, get_resource_fields(page.entries))
columns = page_opts[:column_list]
custom_actions_column? = Enum.any?(columns, &(elem(&1, 0) == "Actions"))
columns =
if custom_actions_column? || Enum.empty?(actions) do
columns
else
columns ++
[
{"Actions",
%{
fun: fn resource ->
build_index_links(conn, resource, actions, page.page_number)
end,
label: ExAdmin.Gettext.gettext("Actions")
}}
]
end
opts = Map.put(opts, :column_list, columns)
Module.concat(conn.assigns.theme, Index).wrap_index_grid(fn ->
Module.concat(conn.assigns.theme, Index).batch_action_form(
conn,
opts[:batch_actions],
opts[:scopes],
opts[:resource_model],
opts[:scope_counts],
fn ->
if opts[:count] == 0 do
Module.concat(conn.assigns.theme, Index).blank_slate_page(conn, opts)
else
Module.concat(conn.assigns.theme, Index).paginated_collection_table(conn, opts)
end
end
)
end)
end
@doc """
Build the scope link.
"""
def build_scope_href(href, nil), do: href
def build_scope_href(href, scope) do
String.replace(href, "?", "?scope=#{scope}&")
end
@doc """
Build the order link.
"""
def build_order_href(href, {name, sort}), do: href <> "#{name}_#{sort}"
def build_order_href(href, _), do: href
@doc """
Build the filter link.
"""
def build_filter_href(href, nil), do: href
def build_filter_href(href, q) do
q
|> Map.to_list()
|> Enum.reduce(href, fn {name, value}, acc ->
acc <> "&q%5B" <> name <> "%5D=" <> value
end)
end
@doc false
def download_links(conn, opts) do
div ".download_links " <> gettext("Download:") <> " " do
a("CSV", href: build_csv_href(conn, opts))
end
end
@doc false
def build_csv_href(conn, opts) do
(admin_resource_path(conn, :csv) <> "?order=")
|> build_scope_href(conn.params["scope"])
|> build_order_href(opts[:order])
|> build_filter_href(conn.params["q"])
end
@doc false
def parameterize(name, seperator \\ "_")
def parameterize(atom, seperator) when is_atom(atom) do
Atom.to_string(atom)
|> parameterize(seperator)
end
def parameterize(string, seperator) do
Inflex.parameterize(string, seperator)
end
@doc false
def build_index_links(conn, resource, actions, page_num \\ 1) do
resource_model = resource.__struct__
links =
case actions do
nil -> []
other -> other
end
list = get_authorized_links(conn, links, resource_model) |> Enum.reverse()
labels = conn.assigns.defn.action_labels
Module.concat(conn.assigns.theme, Index).handle_action_links(list, resource, labels, page_num)
end
@doc false
def get_authorized_links(conn, links, resource_model) do
Enum.reduce(links, [], fn item, acc ->
if ExAdmin.Utils.authorized_action?(conn, item, resource_model), do: [item | acc], else: acc
end)
end
end
| 27.428875 | 101 | 0.598344 |
0318be4ac1f65182328ecddc0bf1816fd1294dd8 | 1,710 | ex | Elixir | clients/content/lib/google_api/content/v21/model/orders_update_merchant_order_id_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/content/lib/google_api/content/v21/model/orders_update_merchant_order_id_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/content/lib/google_api/content/v21/model/orders_update_merchant_order_id_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V21.Model.OrdersUpdateMerchantOrderIdRequest do
@moduledoc """
## Attributes
* `merchantOrderId` (*type:* `String.t`, *default:* `nil`) - The merchant order id to be assigned to the order. Must be unique per merchant.
* `operationId` (*type:* `String.t`, *default:* `nil`) - The ID of the operation. Unique across all operations for a given order.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:merchantOrderId => String.t() | nil,
:operationId => String.t() | nil
}
field(:merchantOrderId)
field(:operationId)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V21.Model.OrdersUpdateMerchantOrderIdRequest do
def decode(value, options) do
GoogleApi.Content.V21.Model.OrdersUpdateMerchantOrderIdRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V21.Model.OrdersUpdateMerchantOrderIdRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.2 | 144 | 0.737427 |
0318ca657885c1108087ffe21887dc3b6fe94a6f | 55 | ex | Elixir | web/views/page_view.ex | CCL77/trackme | af3e2dbfb3cbf92e96c0bdc8f2f6a19595d92140 | [
"MIT"
] | 3 | 2017-06-14T15:27:29.000Z | 2017-09-13T12:54:30.000Z | web/views/page_view.ex | joshnuss/xuber | a5fb61e4ddac87dbb8c140edb8d1550f5c2b1cff | [
"MIT"
] | null | null | null | web/views/page_view.ex | joshnuss/xuber | a5fb61e4ddac87dbb8c140edb8d1550f5c2b1cff | [
"MIT"
] | 2 | 2019-07-22T04:04:38.000Z | 2020-08-15T14:50:18.000Z | defmodule XUber.PageView do
use XUber.Web, :view
end
| 13.75 | 27 | 0.763636 |
0318e06f7a75ed253d9c81b06afd31b64c436443 | 1,190 | exs | Elixir | mix.exs | kim-company/ueberauth_linkedin | eb74855a8bf4c42945b37248dba27918ff150fb8 | [
"MIT"
] | null | null | null | mix.exs | kim-company/ueberauth_linkedin | eb74855a8bf4c42945b37248dba27918ff150fb8 | [
"MIT"
] | null | null | null | mix.exs | kim-company/ueberauth_linkedin | eb74855a8bf4c42945b37248dba27918ff150fb8 | [
"MIT"
] | null | null | null | defmodule UeberauthLinkedin.Mixfile do
use Mix.Project
@url "https://github.com/fajarmf/ueberauth_linkedin"
def project do
[app: :ueberauth_linkedin,
version: "0.4.0",
name: "Ueberauth LinkedIn Strategy",
elixir: "~> 1.2",
package: package(),
source_url: @url,
homepage_url: @url,
description: description(),
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps(),
docs: docs()]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[applications: [:logger, :oauth2, :ueberauth]]
end
defp docs do
[extras: docs_extras(), main: "readme"]
end
defp docs_extras do
["README.md"]
end
defp description do
"An Ueberauth strategy for LinkedIn authentication"
end
defp deps do
[{:ueberauth, "~> 0.7"},
{:oauth2, "~> 2.0"},
{:earmark, "~> 1.3", only: :dev},
{:ex_doc, "~> 0.21", only: :dev}]
end
defp package do
[files: ["lib", "mix.exs", "README.md", "LICENSE"],
maintainers: ["Fajar Firdaus"],
licenses: ["MIT"],
links: %{"Github": @url}]
end
end
| 22.037037 | 55 | 0.603361 |
03191cafef86fe3cbd8312de5f0869f137b049ba | 2,760 | ex | Elixir | lib/elixir/lib/map.ex | nurugger07/elixir | c859e49199bb24190656e6d3acb6cf35fe70e8bb | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/map.ex | nurugger07/elixir | c859e49199bb24190656e6d3acb6cf35fe70e8bb | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/map.ex | nurugger07/elixir | c859e49199bb24190656e6d3acb6cf35fe70e8bb | [
"Apache-2.0"
] | null | null | null | defmodule Map do
@moduledoc """
A `Dict` implementation that works on maps.
Maps are key-value stores where keys are compared using
the match operator (`===`). Maps can be created with
the `%{}` special form defined in the `Kernel.SpecialForms`
module.
For more information about the functions in this module and
their APIs, please consult the `Dict` module.
"""
use Dict
defdelegate [keys(map), values(map), size(map), merge(map1, map2), to_list(map)], to: :maps
@compile {:inline, fetch: 2, put: 3, delete: 2, has_key?: 2}
@doc """
Returns a new empty map.
"""
def new, do: %{}
def has_key?(map, key), do: :maps.is_key(key, map)
def fetch(map, key), do: :maps.find(key, map)
def put(map, key, val) do
:maps.put(key, val, map)
end
def delete(map, key), do: :maps.remove(key, map)
def merge(map1, map2, callback) do
:maps.fold fn k, v2, acc ->
update(acc, k, v2, fn(v1) -> callback.(k, v1, v2) end)
end, map1, map2
end
@doc """
Updates the value in the map with the given function.
"""
def update!(%{} = map, key, fun) do
case fetch(map, key) do
{:ok, value} ->
put(map, key, fun.(value))
:error ->
:erlang.error({:badkey, key})
end
end
def update!(map, _key, _fun), do: :erlang.error({:badmap, map})
@doc """
Gets a value and updates a map in one operation.
"""
def get_and_update(%{} = map, key, fun) do
current_value = case :maps.find(key, map) do
{:ok, value} -> value
:error -> nil
end
{get, update} = fun.(current_value)
{get, :maps.put(key, update, map)}
end
def get_and_update(map, _key, _fun), do: :erlang.error({:badmap, map})
@doc """
Gets a value and updates a map only if the key exists in one operation.
"""
def get_and_update!(%{} = map, key, fun) do
case :maps.find(key, map) do
{:ok, value} ->
{get, update} = fun.(value)
{get, :maps.put(key, update, map)}
:error ->
:erlang.error({:badkey, key})
end
end
def get_and_update!(map, _key, _fun), do: :erlang.error({:badmap, map})
@doc """
Converts a struct to map.
It accepts the struct module or a struct itself and
simply removes the `__struct__` field from the struct.
## Example
defmodule User do
defstruct [:name]
end
Map.from_struct(User)
#=> %{name: nil}
Map.from_struct(%User{name: "john"})
#=> %{name: "john"}
"""
def from_struct(struct) when is_atom(struct) do
:maps.remove(:__struct__, struct.__struct__)
end
def from_struct(%{__struct__: _} = struct) do
:maps.remove(:__struct__, struct)
end
def equal?(map1, map2)
def equal?(%{} = map1, %{} = map2), do: map1 === map2
end
| 24 | 93 | 0.602536 |
0319299adb3823253ec6f8c509bb7d4945ae9eef | 191 | exs | Elixir | test/controllers/page_controller_test.exs | evadne/supervised-scaler | 30fa3275a3f9c16c79457ec2205e7bc129658713 | [
"MIT"
] | 44 | 2016-09-22T21:37:48.000Z | 2021-10-09T03:43:32.000Z | test/controllers/page_controller_test.exs | evadne/supervised-scaler | 30fa3275a3f9c16c79457ec2205e7bc129658713 | [
"MIT"
] | 3 | 2016-09-23T08:41:22.000Z | 2017-05-24T10:58:54.000Z | test/controllers/page_controller_test.exs | evadne/supervised-scaler | 30fa3275a3f9c16c79457ec2205e7bc129658713 | [
"MIT"
] | 5 | 2016-09-25T18:52:00.000Z | 2021-07-28T21:12:33.000Z | defmodule Scaler.PageControllerTest do
use Scaler.ConnCase
test "GET /", %{conn: conn} do
conn = get conn, "/"
assert html_response(conn, 200) =~ "Welcome to Phoenix!"
end
end
| 21.222222 | 60 | 0.670157 |
03192da7b687248c0e3a73aa8f6b9afeed76fdaf | 2,245 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/shielded_instance_identity.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/compute/lib/google_api/compute/v1/model/shielded_instance_identity.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/shielded_instance_identity.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Compute.V1.Model.ShieldedInstanceIdentity do
@moduledoc """
A shielded Instance identity entry.
## Attributes
* `encryptionKey` (*type:* `GoogleApi.Compute.V1.Model.ShieldedInstanceIdentityEntry.t`, *default:* `nil`) - An Endorsement Key (EK) issued to the Shielded Instance's vTPM.
* `kind` (*type:* `String.t`, *default:* `compute#shieldedInstanceIdentity`) - [Output Only] Type of the resource. Always compute#shieldedInstanceIdentity for shielded Instance identity entry.
* `signingKey` (*type:* `GoogleApi.Compute.V1.Model.ShieldedInstanceIdentityEntry.t`, *default:* `nil`) - An Attestation Key (AK) issued to the Shielded Instance's vTPM.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:encryptionKey => GoogleApi.Compute.V1.Model.ShieldedInstanceIdentityEntry.t(),
:kind => String.t(),
:signingKey => GoogleApi.Compute.V1.Model.ShieldedInstanceIdentityEntry.t()
}
field(:encryptionKey, as: GoogleApi.Compute.V1.Model.ShieldedInstanceIdentityEntry)
field(:kind)
field(:signingKey, as: GoogleApi.Compute.V1.Model.ShieldedInstanceIdentityEntry)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.ShieldedInstanceIdentity do
def decode(value, options) do
GoogleApi.Compute.V1.Model.ShieldedInstanceIdentity.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.ShieldedInstanceIdentity do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42.358491 | 196 | 0.752339 |
0319365a20ee08a51c9f31f82e7fc864ba38396f | 3,779 | exs | Elixir | apps/ehealth/test/web/controllers/email_controller_test.exs | ehealth-ua/ehealth.api | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 8 | 2019-06-14T11:34:49.000Z | 2021-08-05T19:14:24.000Z | apps/ehealth/test/web/controllers/email_controller_test.exs | edenlabllc/ehealth.api.public | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 1 | 2019-07-08T15:20:22.000Z | 2019-07-08T15:20:22.000Z | apps/ehealth/test/web/controllers/email_controller_test.exs | ehealth-ua/ehealth.api | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 6 | 2018-05-11T13:59:32.000Z | 2022-01-19T20:15:22.000Z | defmodule EHealth.Web.EmailsControllerTest do
@moduledoc false
use EHealth.Web.ConnCase
import Mox
# For Mox lib. Make sure mocks are verified when the test exits
setup :verify_on_exit!
@man_id Ecto.UUID.generate()
@valid_params %{
subject: "some subject",
from: "[email protected]",
to: "[email protected]",
data: %{
verification_code: 1234
}
}
describe "send email" do
test "success", %{conn: conn} do
expect(RPCWorkerMock, :run, fn "man_api", Man.Rpc, :render_template, [@man_id, data] ->
assert Map.has_key?(data, "locale")
assert Map.has_key?(data, "format")
{:ok, "<html>#{inspect(data)}</html>"}
end)
assert "Email was successfully sent." =
conn
|> post(email_path(conn, :send, @man_id), @valid_params)
|> json_response(200)
|> get_in(~w(data message))
end
test "invalid Man template id", %{conn: conn} do
expect(RPCWorkerMock, :run, fn "man_api", Man.Rpc, :render_template, ["123", _data] ->
nil
end)
assert "Cannot render email template" <> _ =
conn
|> post(email_path(conn, :send, "123"), @valid_params)
|> json_response(400)
|> get_in(~w(error message))
end
test "invalid email format", %{conn: conn} do
assert [err] =
conn
|> post(email_path(conn, :send, @man_id), Map.put(@valid_params, :from, "not-so-good.com"))
|> json_response(422)
|> get_in(~w(error invalid))
assert "$.from" == err["entry"]
assert [err] =
conn
|> post(email_path(conn, :send, @man_id), Map.put(@valid_params, :to, "not-so-good.com"))
|> json_response(422)
|> get_in(~w(error invalid))
assert "$.to[0].receiver" == err["entry"]
assert [err] =
conn
|> post(email_path(conn, :send, @man_id), Map.put(@valid_params, :to, "[email protected], not-so-good.com"))
|> json_response(422)
|> get_in(~w(error invalid))
assert "$.to[1].receiver" == err["entry"]
end
test "no params", %{conn: conn} do
assert errors =
conn
|> post(email_path(conn, :send, @man_id), %{})
|> json_response(422)
|> get_in(~w(error invalid))
entries =
errors
|> Enum.reduce([], fn error, acc ->
assert %{
"entry_type" => "json_data_property",
"rules" => [%{"rule" => "required"}]
} = error
[error["entry"] | acc]
end)
|> MapSet.new()
assert MapSet.new(~w($.from $.to)) == entries
end
test "no from parameter", %{conn: conn} do
assert [err] =
conn
|> post(email_path(conn, :send, @man_id), Map.delete(@valid_params, :from))
|> json_response(422)
|> get_in(~w(error invalid))
assert %{
"entry" => "$.from",
"entry_type" => "json_data_property",
"rules" => [
%{
"description" => "required property from was not present",
"rule" => "required"
}
]
} = err
end
test "param from and param to are identical", %{conn: conn} do
data = Map.put(@valid_params, :to, "[email protected]")
assert [err] =
conn
|> post(email_path(conn, :send, @man_id), data)
|> json_response(422)
|> get_in(~w(error invalid))
assert "$.from" == err["entry"]
end
end
end
| 29.294574 | 117 | 0.496692 |
0319457ca2bceee2e4f4481cbae121dc13fa6420 | 1,389 | ex | Elixir | clients/safe_browsing/lib/google_api/safe_browsing/v4/model/raw_indices.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/safe_browsing/lib/google_api/safe_browsing/v4/model/raw_indices.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/safe_browsing/lib/google_api/safe_browsing/v4/model/raw_indices.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.SafeBrowsing.V4.Model.RawIndices do
@moduledoc """
A set of raw indices to remove from a local list.
## Attributes
- indices (List[Integer]): The indices to remove from a lexicographically-sorted local list. Defaults to: `null`.
"""
defstruct [
:"indices"
]
end
defimpl Poison.Decoder, for: GoogleApi.SafeBrowsing.V4.Model.RawIndices do
def decode(value, _options) do
value
end
end
defimpl Poison.Encoder, for: GoogleApi.SafeBrowsing.V4.Model.RawIndices do
def encode(value, options) do
GoogleApi.SafeBrowsing.V4.Deserializer.serialize_non_nil(value, options)
end
end
| 30.195652 | 115 | 0.75306 |
03194a0612e2562209eedf0b2f621c4082fd44fb | 3,502 | ex | Elixir | apps/omg/test/support/prop_test/black_box_me.ex | kendricktan/elixir-omg | 834c103fd5c4b9e063c1d32b9b4e5728abb64009 | [
"Apache-2.0"
] | null | null | null | apps/omg/test/support/prop_test/black_box_me.ex | kendricktan/elixir-omg | 834c103fd5c4b9e063c1d32b9b4e5728abb64009 | [
"Apache-2.0"
] | null | null | null | apps/omg/test/support/prop_test/black_box_me.ex | kendricktan/elixir-omg | 834c103fd5c4b9e063c1d32b9b4e5728abb64009 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.PropTest.BlackBoxMe do
@moduledoc """
Generates dumb wrapper for pure library that keeps state in process dictionary.
Wrapper creates module.
Example:
```
OMG.PropTest.BlackBoxMe.create(YourProject.State.Core, CoreGS) # generate module name CoreGS
```
would create a ```YourProject.State.CoreGS``` module, accessible in every ```MIX_ENV```.
Pure library is presumed to have following interface:
```
@spec funX(arg1, ..., argN, state) :: {:ok, side_effects(), state} | {{:error, term}, state}
```
Wrapper exports the same functions with arity-1 (state is hidden) and returns tuples that are shorted by one item (state is hidden). Example above would have been transformed into:
```
@spec funX(arg1, ..., argN) :: {:ok, side_effects()} | {:error, term}
```
This allows for black-box testing and more importantly - for interaction with proper_statem and proper_fsm.
Wrapper adds following helper functions:
set state in process dictionary
```elixir
@spec set_state( state() | nil) :: state() | nil
```
get state stored in process dictionary (for possible inspection)
```
@spec get_state() :: state() | nil
```
"""
defp state_functions(core) do
quote do
def set_state(state) do
Process.put(unquote(core), state)
end
def get_state do
Process.get(unquote(core))
end
end
end
defp create_wrapper_function({func_name, arity}, core) do
args = Macro.generate_arguments(arity - 1, nil)
quote do
def unquote(func_name)(unquote_splicing(args)) do
state = get_state()
case :erlang.apply(unquote(core), unquote(func_name), unquote(args) ++ [state]) do
{:ok, side_effects, new_state} ->
set_state(new_state)
{:ok, side_effects}
{:ok, new_state} ->
set_state(new_state)
:ok
{{:error, error}, new_state} ->
set_state(new_state)
{:error, error}
unexpected ->
IO.puts(
"unexpected output #{inspect(unquote(func_name)(unquote_splicing(args)))} :: #{inspect(unexpected)}"
)
:erlang.error({:badreturn, unexpected})
end
end
end
end
@doc """
generate module name CoreGS
```
OMG.PropTest.BlackBoxMe.create(YourProject.State.Core, CoreGS)
```
"""
defmacro create({:__aliases__, _, list_atoms}, {:__aliases__, _, dest}) do
core = Module.concat(list_atoms)
module_name = Module.concat(dest)
contents =
:functions
|> core.__info__()
|> Enum.filter(fn {function_name, _} -> !MapSet.member?(MapSet.new([:__info__, :__struct__]), function_name) end)
|> Enum.map(&create_wrapper_function(&1, core))
|> List.insert_at(0, state_functions(core))
{:module, _, _, _} = Module.create(module_name, contents, Macro.Env.location(__ENV__))
[]
end
end
| 31.267857 | 182 | 0.654769 |
03196a81b163910b6da5188735c7d4e7ccaad655 | 1,755 | exs | Elixir | test/lib/dwolla/utils_test.exs | ark7-inc/dwolla-elixir | a3ee9c149fbf5cd5d38dc2e854278ab321a94bf5 | [
"MIT"
] | 6 | 2018-07-23T07:43:56.000Z | 2021-07-09T01:24:21.000Z | test/lib/dwolla/utils_test.exs | axlepayments/exdwolla | e54fec29458616757338d9bda8f2ad81dd6db225 | [
"MIT"
] | 1 | 2019-11-05T05:31:35.000Z | 2019-11-06T21:26:38.000Z | test/lib/dwolla/utils_test.exs | axlepayments/exdwolla | e54fec29458616757338d9bda8f2ad81dd6db225 | [
"MIT"
] | 7 | 2019-02-12T07:48:09.000Z | 2021-11-08T15:58:06.000Z | defmodule Dwolla.UtilsTest do
use ExUnit.Case
alias Dwolla.Utils
describe "dwolla_utils" do
test "handle_resp/2 handles parsing error" do
payload = "<h1>Some XML payload</h1>"
resp = success_resp(200, {:invalid, payload})
assert {:error, body} = Utils.handle_resp(resp, :any)
assert body == payload
end
test "to_snake_case/1 converts string keys to snake case" do
params = %{
"firstName" => "Steve",
"lastName" => "Rogers",
"dateOfBirth" => "1918-07-04",
"amount" => %{
"value" => 100.0,
"currency" => "USD"
}
}
assert Utils.to_snake_case(params) == %{
"first_name" => "Steve",
"last_name" => "Rogers",
"date_of_birth" => "1918-07-04",
"amount" => %{
"value" => 100.0,
"currency" => "USD"
}
}
end
test "to_camel_case/1 converts atom keys to camel case" do
params = %{
_links: %{
foo: "bar"
},
_embedded: %{
baz: "qux"
},
first_name: "Steve",
last_name: "Rogers",
date_of_birth: "1918-07-04",
amount: %{
value: 100.0,
currency: "USD"
}
}
assert Utils.to_camel_case(params) == %{
"_links" => %{
"foo" => "bar"
},
"_embedded" => %{
"baz" => "qux"
},
"firstName" => "Steve",
"lastName" => "Rogers",
"dateOfBirth" => "1918-07-04",
"amount" => %{
"value" => 100.0,
"currency" => "USD"
}
}
end
end
defp success_resp(code, body) do
{:ok, %HTTPoison.Response{status_code: code, body: body}}
end
end
| 22.792208 | 64 | 0.475783 |
03196cb2e757f7822722709fd5c11c6972b5a88b | 556 | ex | Elixir | lib/ast_ndjson/lib/comment.ex | Ajwah/ex-gherkin | 401ee551312b72aa059c7f4621f014b111049acf | [
"Apache-2.0"
] | 8 | 2020-07-27T09:11:24.000Z | 2020-09-21T20:57:45.000Z | lib/ast_ndjson/lib/comment.ex | Ajwah/ex-gherkin | 401ee551312b72aa059c7f4621f014b111049acf | [
"Apache-2.0"
] | 1 | 2021-05-14T13:30:27.000Z | 2021-05-14T13:30:27.000Z | lib/ast_ndjson/lib/comment.ex | Ajwah/ex-gherkin | 401ee551312b72aa059c7f4621f014b111049acf | [
"Apache-2.0"
] | null | null | null | defmodule ExGherkin.AstNdjson.Comment do
@moduledoc """
Based on: https://github.com/cucumber/cucumber/blob/f15a9ec416a54da806f9f6aad9c393b9a753cbf0/gherkin/ruby/lib/gherkin/ast_builder.rb#L27-L31
location
text
"""
@derive Jason.Encoder
alias ExGherkin.AstNdjson.Location
defstruct location: Location.new(),
text: ""
def new(text, location = %Location{}) do
struct(__MODULE__, %{
location: Location.column(location, 1),
text: String.duplicate(" ", location.column - 1) <> "#" <> text
})
end
end
| 24.173913 | 142 | 0.679856 |
0319c615a8a826e3a3877f96b7571c97840e25e3 | 377 | exs | Elixir | test/absinthe/integration/execution/custom_types/basic_test.exs | hauptbenutzer/absinthe | f1d76e5c249ca337187971b0c96b1870f53716ed | [
"MIT"
] | null | null | null | test/absinthe/integration/execution/custom_types/basic_test.exs | hauptbenutzer/absinthe | f1d76e5c249ca337187971b0c96b1870f53716ed | [
"MIT"
] | null | null | null | test/absinthe/integration/execution/custom_types/basic_test.exs | hauptbenutzer/absinthe | f1d76e5c249ca337187971b0c96b1870f53716ed | [
"MIT"
] | 1 | 2018-11-16T02:34:40.000Z | 2018-11-16T02:34:40.000Z | defmodule Elixir.Absinthe.Integration.Execution.CustomTypes.BasicTest do
use ExUnit.Case, async: true
@query """
query {
customTypesQuery { datetime }
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"customTypesQuery" => %{"datetime" => "2017-01-27T20:31:55Z"}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.CustomTypesSchema, [])
end
end
| 25.133333 | 94 | 0.644562 |
0319eac5c886a487c4dba9f485e4c6d2848d64c9 | 5,114 | ex | Elixir | lib/sanbase_web/graphql/resolvers/social_data_resolver.ex | sitedata/sanbase2 | 8da5e44a343288fbc41b68668c6c80ae8547d557 | [
"MIT"
] | null | null | null | lib/sanbase_web/graphql/resolvers/social_data_resolver.ex | sitedata/sanbase2 | 8da5e44a343288fbc41b68668c6c80ae8547d557 | [
"MIT"
] | 1 | 2021-07-24T16:26:03.000Z | 2021-07-24T16:26:03.000Z | lib/sanbase_web/graphql/resolvers/social_data_resolver.ex | sitedata/sanbase2 | 8da5e44a343288fbc41b68668c6c80ae8547d557 | [
"MIT"
] | null | null | null | defmodule SanbaseWeb.Graphql.Resolvers.SocialDataResolver do
import SanbaseWeb.Graphql.Helpers.Async, only: [async: 1]
import Absinthe.Resolution.Helpers, except: [async: 1]
alias SanbaseWeb.Graphql.Helpers.Utils
alias Sanbase.{SocialData, TechIndicators}
alias SanbaseWeb.Graphql.SanbaseDataloader
@context_words_default_size 10
def popular_search_terms(_root, %{from: from, to: to}, _) do
Sanbase.SocialData.PopularSearchTerm.get(from, to)
end
def project_from_slug(_root, _args, %{source: %{slug: slug}, context: %{loader: loader}}) do
loader
|> Dataloader.load(SanbaseDataloader, :project_by_slug, slug)
|> on_load(fn loader ->
{:ok, Dataloader.get(loader, SanbaseDataloader, :project_by_slug, slug)}
end)
end
def twitter_mention_count(
_root,
%{ticker: ticker, from: from, to: to, interval: interval, result_size_tail: size},
_resolution
) do
TechIndicators.twitter_mention_count(ticker, from, to, interval, size)
end
def emojis_sentiment(
_root,
%{from: from, to: to, interval: interval, result_size_tail: size},
_resolution
) do
TechIndicators.emojis_sentiment(from, to, interval, size)
end
def social_volume(
_root,
%{slug: slug, from: from, to: to, interval: interval, social_volume_type: type},
_resolution
) do
# The `*_discussion_overview` are counting the total number of messages in a given medium
# Deprecated. To be replaced with `getMetric(metric: "community_messages_count_*")` and
# `getMetric(metric: "social_volume_*")`
source =
case type do
:professional_traders_chat_overview -> "professional_traders_chat"
_ -> type |> Atom.to_string() |> String.split("_") |> hd
end
case type in [:telegram_discussion_overview, :discord_discussion_overview] do
true ->
SocialData.community_messages_count(%{slug: slug}, from, to, interval, source)
false ->
SocialData.social_volume(%{slug: slug}, from, to, interval, source)
end
end
def social_volume_projects(_root, %{}, _resolution) do
SocialData.social_volume_projects()
end
def topic_search(
_root,
%{source: source, search_text: search_text, from: from, to: to, interval: interval},
_resolution
) do
case SocialData.social_volume(%{text: search_text}, from, to, interval, source) do
{:ok, data} -> {:ok, %{chart_data: data}}
{:error, error} -> {:error, error}
end
end
def get_trending_words(
_root,
%{from: from, to: to, interval: interval, size: size},
_resolution
) do
case SocialData.TrendingWords.get_trending_words(from, to, interval, size) do
{:ok, result} ->
result =
result
|> Enum.map(fn {datetime, top_words} -> %{datetime: datetime, top_words: top_words} end)
|> Enum.sort_by(& &1.datetime, {:asc, DateTime})
{:ok, result}
{:error, error} ->
{:error, error}
end
end
def get_word_trending_history(
_root,
%{word: word, from: from, to: to, interval: interval, size: size},
_resolution
) do
SocialData.TrendingWords.get_word_trending_history(word, from, to, interval, size)
end
def get_project_trending_history(
_root,
%{slug: slug, from: from, to: to, interval: interval, size: size},
_resolution
) do
SocialData.TrendingWords.get_project_trending_history(slug, from, to, interval, size)
end
def trending_words(
_root,
%{source: source, size: size, hour: hour, from: from, to: to},
_resolution
) do
size = Enum.min([size, 30])
SocialData.trending_words(source, size, hour, from, to)
end
def word_context(
_root,
%{word: word, source: source, size: size, from: from, to: to},
_resolution
) do
size = Enum.min([size, 30])
SocialData.word_context(word, source, size, from, to)
end
def word_context(%{word: word}, _args, resolution) do
%{source: source, from: from, to: to} =
Utils.extract_root_query_args(resolution, "trending_words")
async(fn ->
SocialData.word_context(word, source, @context_words_default_size, from, to)
end)
end
def word_trend_score(
_root,
%{word: word, source: source, from: from, to: to},
_resolution
) do
SocialData.word_trend_score(word, source, from, to)
end
def top_social_gainers_losers(_root, args, _resolution) do
SocialData.top_social_gainers_losers(args)
end
def social_gainers_losers_status(_root, args, _resolution) do
SocialData.social_gainers_losers_status(args)
end
def social_dominance(
_root,
%{slug: slug, from: from, to: to, interval: interval, source: source},
_resolution
) do
SocialData.social_dominance(%{slug: slug}, from, to, interval, source)
end
def news(_root, %{tag: tag, from: from, to: to, size: size}, _resolution) do
SocialData.google_news(tag, from, to, size)
end
end
| 30.440476 | 98 | 0.653891 |
031a1d3eb80c2f29d274324085586731ad3accb0 | 879 | ex | Elixir | lib/glimesh/events.ex | itsUnsmart/glimesh.tv | 22c532184bb5046f6c6d8232e8bd66ba534c01c1 | [
"MIT"
] | 1 | 2020-08-02T00:12:28.000Z | 2020-08-02T00:12:28.000Z | lib/glimesh/events.ex | itsUnsmart/glimesh.tv | 22c532184bb5046f6c6d8232e8bd66ba534c01c1 | [
"MIT"
] | null | null | null | lib/glimesh/events.ex | itsUnsmart/glimesh.tv | 22c532184bb5046f6c6d8232e8bd66ba534c01c1 | [
"MIT"
] | null | null | null | defmodule Glimesh.Events do
@moduledoc """
The Glimesh Events module is responsible for triaging events out to whoever needs to know about them.
"""
@doc """
Broadcast's some data to the application and api
"""
def broadcast(topic, event_type, data) do
broadcast(topic, nil, event_type, data)
end
def broadcast(topic, firehose_topic, event_type, data) do
# 1. Send to Phoenix
Phoenix.PubSub.broadcast(
Glimesh.PubSub,
topic,
{event_type, data}
)
# 2. Send to Absinthe API
Absinthe.Subscription.publish(
GlimeshWeb.Endpoint,
data,
Keyword.put([], event_type, topic)
)
# 3. Send to the Absinthe Firehose API
if firehose_topic do
Absinthe.Subscription.publish(
GlimeshWeb.Endpoint,
data,
Keyword.put([], event_type, firehose_topic)
)
end
end
end
| 23.131579 | 103 | 0.651877 |
031a234b042ec6e6a6bfe00388994aeda9ba6c22 | 459 | exs | Elixir | priv/test_repo/migrations/20210205230349_migrate_resources2.exs | regularfellow/ash_postgres | cc4f07abde97e7ac3141cbbb0ed7d351164fa55f | [
"MIT"
] | null | null | null | priv/test_repo/migrations/20210205230349_migrate_resources2.exs | regularfellow/ash_postgres | cc4f07abde97e7ac3141cbbb0ed7d351164fa55f | [
"MIT"
] | null | null | null | priv/test_repo/migrations/20210205230349_migrate_resources2.exs | regularfellow/ash_postgres | cc4f07abde97e7ac3141cbbb0ed7d351164fa55f | [
"MIT"
] | null | null | null | defmodule AshPostgres.TestRepo.Migrations.MigrateResources2 do
@moduledoc """
Updates resources based on their most recent snapshots.
This file was autogenerated with `mix ash_postgres.generate_migrations`
"""
use Ecto.Migration
def up do
create table(:integer_posts, primary_key: false) do
add :id, :serial, null: false, primary_key: true
add :title, :text
end
end
def down do
drop table("integer_posts")
end
end | 22.95 | 73 | 0.718954 |
031a2400bf623864bc2d1e354871e98c583ec3ff | 3,522 | ex | Elixir | kousa/lib/kousa/room_chat.ex | MatyiFKBT/dogehouse | 7be582d61eaa3512756c1247e449493be3c16c0b | [
"MIT"
] | 1 | 2021-03-15T19:14:33.000Z | 2021-03-15T19:14:33.000Z | kousa/lib/kousa/room_chat.ex | MatyiFKBT/dogehouse | 7be582d61eaa3512756c1247e449493be3c16c0b | [
"MIT"
] | 1 | 2021-03-15T19:08:15.000Z | 2021-03-15T19:08:15.000Z | kousa/lib/kousa/room_chat.ex | MatyiFKBT/dogehouse | 7be582d61eaa3512756c1247e449493be3c16c0b | [
"MIT"
] | null | null | null | defmodule Kousa.RoomChat do
alias Kousa.Utils.RegUtils
alias Beef.Rooms
@message_character_limit 512
@spec send_msg(String.t(), list(map), list(String.t())) :: any
def send_msg(user_id, tokens, whispered_to) do
tokens = validate_tokens(tokens)
# NB: length(list) is O(N) so use a match for stuff like this
if length(tokens) > 0 do
case Beef.Users.get_current_room_id(user_id) do
nil ->
nil
current_room_id ->
with {avatar_url, display_name} <-
Onion.UserSession.send_call!(user_id, {:get_info_for_msg}) do
RegUtils.lookup_and_cast(
Onion.RoomChat,
current_room_id,
{:new_msg, user_id,
%{
id: Ecto.UUID.generate(),
avatarUrl: avatar_url,
displayName: display_name,
userId: user_id,
tokens: tokens,
sentAt: DateTime.utc_now(),
isWhisper: whispered_to != []
}, whispered_to}
)
end
end
end
end
defp validate_tokens(tokens) when is_list(tokens) do
if Enum.reduce_while(tokens, 0, &count_message_characters/2) <= @message_character_limit do
tokens
|> Enum.reduce([], &validate_tokens/2)
|> Enum.reverse()
else
[]
end
end
defp validate_tokens(_), do: []
defp validate_tokens(token, acc) do
case validate_token(token) do
{:ok, token} -> [token | acc]
_ -> acc
end
end
defp count_message_characters(%{"v" => v}, acc) do
if acc <= @message_character_limit, do: {:cont, String.length(v) + acc}, else: {:halt, acc}
end
defp validate_token(token = %{"t" => type, "v" => _}) when type in ["text", "mention", "block"],
do: {:ok, token}
defp validate_token(token = %{"t" => "link", "v" => link}) do
link
|> URI.parse()
|> valid_url?()
|> case do
true -> {:ok, token}
_ -> :invalid
end
end
defp validate_token(_), do: :invalid
defp valid_url?(%URI{host: host, scheme: scheme}) when is_binary(host) and is_binary(scheme),
do: true
defp valid_url?(_), do: false
def ban_user(user_id, user_id_to_ban) do
case Rooms.get_room_status(user_id) do
{:creator, room} ->
if room.creatorId != user_id_to_ban do
RegUtils.lookup_and_cast(Onion.RoomChat, room.id, {:ban_user, user_id_to_ban})
end
{:mod, room} ->
if room.creatorId != user_id_to_ban do
RegUtils.lookup_and_cast(Onion.RoomChat, room.id, {:ban_user, user_id_to_ban})
end
_ ->
nil
end
:ok
end
# Delete room chat messages
def delete_message(deleter_id, message_id, user_id) do
case Rooms.get_room_status(deleter_id) do
{:creator, room} ->
RegUtils.lookup_and_cast(
Onion.RoomChat,
room.id,
{:message_deleted, deleter_id, message_id}
)
# Mods can delete other mod' messages
{:mod, room} ->
if user_id != room.creatorId do
RegUtils.lookup_and_cast(
Onion.RoomChat,
room.id,
{:message_deleted, deleter_id, message_id}
)
end
{:listener, room} ->
if user_id == deleter_id do
RegUtils.lookup_and_cast(
Onion.RoomChat,
room.id,
{:message_deleted, deleter_id, message_id}
)
end
_ ->
nil
end
end
end
| 26.088889 | 98 | 0.569847 |
031a400155f8ee31c19d0cbd2fcc8eae630322eb | 755 | exs | Elixir | config/test.exs | imprest/mgp | 61457315243d0e0c26713601b9930ca34a116a16 | [
"MIT"
] | null | null | null | config/test.exs | imprest/mgp | 61457315243d0e0c26713601b9930ca34a116a16 | [
"MIT"
] | 2 | 2020-12-22T12:30:58.000Z | 2021-05-19T10:07:26.000Z | config/test.exs | imprest/mgp | 61457315243d0e0c26713601b9930ca34a116a16 | [
"MIT"
] | null | null | null | use Mix.Config
# Only in tests, remove the complexity from the password hashing algorithm
config :bcrypt_elixir, :log_rounds, 1
# Configure your database
#
# The MIX_TEST_PARTITION environment variable can be used
# to provide built-in test partitioning in CI environment.
# Run `mix help test` for more information.
config :mgp, Mgp.Repo,
username: "postgres",
password: "postgres",
database: "mgp_test#{System.get_env("MIX_TEST_PARTITION")}",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :mgp, MgpWeb.Endpoint,
http: [port: 4002],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
| 29.038462 | 74 | 0.749669 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.