hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9ea3d28931694eb9ca547f2851172b5b0f60b371 | 3,733 | exs | Elixir | test/vapor/provider/file_test.exs | autodidaddict/vapor | 319853bfce86c9b164adfc6c5ccf2455d4e1d771 | [
"MIT"
] | 533 | 2018-05-27T17:54:58.000Z | 2021-09-26T12:21:20.000Z | test/vapor/provider/file_test.exs | autodidaddict/vapor | 319853bfce86c9b164adfc6c5ccf2455d4e1d771 | [
"MIT"
] | 68 | 2018-05-28T14:26:52.000Z | 2021-09-11T23:11:34.000Z | test/vapor/provider/file_test.exs | autodidaddict/vapor | 319853bfce86c9b164adfc6c5ccf2455d4e1d771 | [
"MIT"
] | 37 | 2018-06-22T00:08:38.000Z | 2021-10-06T17:14:19.000Z | defmodule Vapor.Provider.FileTest do
use ExUnit.Case, async: false
alias Vapor.Provider
alias Vapor.Provider.File
test "raises if the format is unknown" do
assert_raise Vapor.FileFormatNotFoundError, fn ->
%File{path: "test.test"} |> Provider.load()
end
end
test "raises if the file is not found" do
assert_raise Vapor.FileNotFoundError, fn ->
%File{path: "test.toml"} |> Provider.load()
end
end
describe "json" do
test "reads in a file with a given mapping" do
provider = %File{
path: "test/support/settings.json",
bindings: [
foo: "foo",
baz: "baz",
boz: ["biz", "boz"],
]
}
assert {:ok, conf} = Provider.load(provider)
assert conf.foo == "file foo"
assert conf.baz == "file baz"
assert conf.boz == "file biz boz"
end
test "returns an error if environment variables are missing" do
provider = %File{
path: "test/support/settings.json",
bindings: [
foo: "foo",
bar: "bar",
boz: ["boz"]
]
}
assert {:error, error} = Provider.load(provider)
assert error == "Missing keys in file: bar, boz"
end
test "translations can be provided inline" do
provider = %File{
path: "test/support/settings.json",
bindings: [
{:foo, "foo", map: fn "file foo" -> 1337 end},
]
}
assert {:ok, %{foo: 1337}} == Provider.load(provider)
end
test "can specify default values" do
provider = %File{
path: "test/support/settings.json",
bindings: [
{:foo, "foo"},
{:bar, ["some", "key"], default: 1337},
]
}
assert {:ok, %{foo: "file foo", bar: 1337}} = Provider.load(provider)
end
test "can mark a value as non-required" do
provider = %File{
path: "test/support/settings.json",
bindings: [
{:foo, "foo"},
{:bar, ["some", "key"], required: false},
]
}
assert {:ok, %{foo: "file foo", bar: nil}} = Provider.load(provider)
end
end
describe "toml" do
test "reads in a file with a given mapping" do
provider = %File{
path: "test/support/settings.toml",
bindings: [
foo: "foo",
bar: "bar",
boz: ["biz", "boz"],
]
}
assert {:ok, conf} = Provider.load(provider)
assert conf.foo == "foo toml"
assert conf.bar == "bar toml"
assert conf.boz == "biz boz toml"
end
test "returns an error if environment variables are missing" do
provider = %File{
path: "test/support/settings.toml",
bindings: [
foo: "foo",
baz: "baz",
boz: ["biz", "boz"],
]
}
assert {:error, error} = Provider.load(provider)
assert error == "Missing keys in file: baz"
end
end
describe "yaml/yml" do
test "reads in a file with a given mapping" do
provider = %File{
path: "test/support/settings.yaml",
bindings: [
foo: "foo",
bar: "bar",
boz: ["biz", "boz"],
]
}
assert {:ok, conf} = Provider.load(provider)
assert conf.foo == "foo yaml"
assert conf.bar == "bar yaml"
assert conf.boz == "biz boz yaml"
end
test "returns an error if environment variables are missing" do
provider = %File{
path: "test/support/settings.yml",
bindings: [
foo: "foo",
baz: "baz",
boz: ["biz", "boz"],
]
}
assert {:error, error} = Provider.load(provider)
assert error == "Missing keys in file: baz"
end
end
end
| 25.923611 | 75 | 0.532012 |
9ea3f11353888df2216870a031be2cba5e92f8ac | 949 | exs | Elixir | mix.exs | afronski/cartographer | b909f93f600da8daf2e2ba20f6c09ecd6d45a250 | [
"MIT"
] | 2 | 2017-05-24T08:57:23.000Z | 2017-05-24T10:47:52.000Z | mix.exs | afronski/cartographer | b909f93f600da8daf2e2ba20f6c09ecd6d45a250 | [
"MIT"
] | null | null | null | mix.exs | afronski/cartographer | b909f93f600da8daf2e2ba20f6c09ecd6d45a250 | [
"MIT"
] | null | null | null | defmodule Cartographer.Mixfile do
use Mix.Project
def project do
[
app: :cartographer,
version: "0.0.1",
elixir: "~> 1.0",
test_coverage: [tool: Coverex.Task],
deps: deps,
package: package,
description: description
]
end
def application do
[
applications: [:logger]
]
end
defp deps do
[
{:coverex, "~> 1.4.1", only: :test},
{:excheck, "~> 0.3", only: :test},
{:triq, github: "krestenkrab/triq", only: :test}
]
end
defp package do
[
files: [ "lib", "mix.exs", "README*", "LICENSE*" ],
maintainers: [ "Wojtek Gawroński" ],
licenses: [ "MIT" ],
links: %{
"GitHub" => "https://github.com/afronski/cartographer",
"Docs" => "https://github.com/afronski/cartographer/wiki"
}
]
end
defp description do
"""
Geohash related utilities implementation in Elixir.
"""
end
end
| 18.25 | 65 | 0.54373 |
9ea3fac4fb2616d0b68832e5b974b7085255a417 | 1,236 | ex | Elixir | debian/emacsen-install.ex | Stephan14/lighttpd_1.4.20_code_analysis | 6107be9d939d1413f6f68cddf9cc0b5cd7568a3b | [
"BSD-3-Clause"
] | null | null | null | debian/emacsen-install.ex | Stephan14/lighttpd_1.4.20_code_analysis | 6107be9d939d1413f6f68cddf9cc0b5cd7568a3b | [
"BSD-3-Clause"
] | null | null | null | debian/emacsen-install.ex | Stephan14/lighttpd_1.4.20_code_analysis | 6107be9d939d1413f6f68cddf9cc0b5cd7568a3b | [
"BSD-3-Clause"
] | 1 | 2021-04-08T21:45:02.000Z | 2021-04-08T21:45:02.000Z | #! /bin/sh -e
# /usr/lib/emacsen-common/packages/install/lighttpd
# Written by Jim Van Zandt <[email protected]>, borrowing heavily
# from the install scripts for gettext by Santiago Vila
# <[email protected]> and octave by Dirk Eddelbuettel <[email protected]>.
FLAVOR=$1
PACKAGE=lighttpd
if [ ${FLAVOR} = emacs ]; then exit 0; fi
echo install/${PACKAGE}: Handling install for emacsen flavor ${FLAVOR}
#FLAVORTEST=`echo $FLAVOR | cut -c-6`
#if [ ${FLAVORTEST} = xemacs ] ; then
# SITEFLAG="-no-site-file"
#else
# SITEFLAG="--no-site-file"
#fi
FLAGS="${SITEFLAG} -q -batch -l path.el -f batch-byte-compile"
ELDIR=/usr/share/emacs/site-lisp/${PACKAGE}
ELCDIR=/usr/share/${FLAVOR}/site-lisp/${PACKAGE}
# Install-info-altdir does not actually exist.
# Maybe somebody will write it.
if test -x /usr/sbin/install-info-altdir; then
echo install/${PACKAGE}: install Info links for ${FLAVOR}
install-info-altdir --quiet --section "" "" --dirname=${FLAVOR} /usr/info/${PACKAGE}.info.gz
fi
install -m 755 -d ${ELCDIR}
cd ${ELDIR}
FILES=`echo *.el`
cp ${FILES} ${ELCDIR}
cd ${ELCDIR}
cat << EOF > path.el
(setq load-path (cons "." load-path) byte-compile-warnings nil)
EOF
${FLAVOR} ${FLAGS} ${FILES}
rm -f *.el path.el
exit 0
| 26.869565 | 96 | 0.687702 |
9ea404f9bca423013125d69e4770e5aa336cbeca | 1,448 | ex | Elixir | clients/firestore/lib/google_api/firestore/v1beta1/model/rollback_request.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/firestore/lib/google_api/firestore/v1beta1/model/rollback_request.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/firestore/lib/google_api/firestore/v1beta1/model/rollback_request.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Firestore.V1beta1.Model.RollbackRequest do
@moduledoc """
The request for Firestore.Rollback.
## Attributes
* `transaction` (*type:* `String.t`, *default:* `nil`) - The transaction to roll back.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:transaction => String.t()
}
field(:transaction)
end
defimpl Poison.Decoder, for: GoogleApi.Firestore.V1beta1.Model.RollbackRequest do
def decode(value, options) do
GoogleApi.Firestore.V1beta1.Model.RollbackRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Firestore.V1beta1.Model.RollbackRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 30.808511 | 90 | 0.742403 |
9ea40c31cf194e584114b0dd30f34a4ca18d3ebe | 199 | ex | Elixir | lib/price_register/repo.ex | civictech-ie/price-register | 9112c183722b76c09e98063ec2829dc2d0b55113 | [
"Apache-2.0"
] | 4 | 2021-03-09T19:35:37.000Z | 2022-03-20T23:31:19.000Z | lib/price_register/repo.ex | civictech-ie/price-register | 9112c183722b76c09e98063ec2829dc2d0b55113 | [
"Apache-2.0"
] | null | null | null | lib/price_register/repo.ex | civictech-ie/price-register | 9112c183722b76c09e98063ec2829dc2d0b55113 | [
"Apache-2.0"
] | null | null | null | defmodule PriceRegister.Repo do
use Ecto.Repo,
otp_app: :price_register,
adapter: Ecto.Adapters.Postgres,
migration_timestamps: [type: :utc_datetime_usec]
use Quarto, limit: 250
end
| 22.111111 | 52 | 0.743719 |
9ea419e22b50eaad393d0cb67d2516758a7e8a66 | 955 | exs | Elixir | config/prod.secret.exs | tanguilp/plugoid_demo | 7ce537fc6e9e5dc68e212723b57fe76f3587246f | [
"Apache-2.0"
] | 4 | 2020-05-30T21:21:11.000Z | 2020-06-03T10:46:41.000Z | config/prod.secret.exs | tanguilp/plugoid_demo | 7ce537fc6e9e5dc68e212723b57fe76f3587246f | [
"Apache-2.0"
] | 2 | 2021-03-10T20:18:30.000Z | 2021-05-11T15:51:54.000Z | config/prod.secret.exs | tanguilp/plugoid_demo | 7ce537fc6e9e5dc68e212723b57fe76f3587246f | [
"Apache-2.0"
] | null | null | null | # In this file, we load production configuration and secrets
# from environment variables. You can also hardcode secrets,
# although such is generally not recommended and you have to
# remember to add this file to your .gitignore.
use Mix.Config
secret_key_base =
System.get_env("SECRET_KEY_BASE") ||
raise """
environment variable SECRET_KEY_BASE is missing.
You can generate one by calling: mix phx.gen.secret
"""
config :plugoid_demo, PlugoidDemoWeb.Endpoint,
http: [
port: String.to_integer(System.get_env("PORT") || "4000"),
transport_options: [socket_opts: [:inet6]]
],
secret_key_base: secret_key_base
# ## Using releases (Elixir v1.9+)
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start each relevant endpoint:
#
# config :plugoid_demo, PlugoidDemoWeb.Endpoint, server: true
#
# Then you can assemble a release by calling `mix release`.
# See `mix help release` for more information.
| 31.833333 | 65 | 0.735079 |
9ea425e7ec2d515ffc116252bc2c3ba10caffa27 | 2,567 | exs | Elixir | config/prod.exs | InFact-coop/your-sanctuary | 066e3b99ae52ee0d3fddac80b6aaf65ffef2bd0f | [
"BSD-3-Clause"
] | 2 | 2018-10-23T13:30:00.000Z | 2018-10-24T14:32:52.000Z | config/prod.exs | InFact-coop/your-sanctuary | 066e3b99ae52ee0d3fddac80b6aaf65ffef2bd0f | [
"BSD-3-Clause"
] | 60 | 2018-10-23T13:39:19.000Z | 2019-02-11T14:18:01.000Z | config/prod.exs | InFact-coop/your-sanctuary | 066e3b99ae52ee0d3fddac80b6aaf65ffef2bd0f | [
"BSD-3-Clause"
] | null | null | null | use Mix.Config
# For production, we often load configuration from external
# sources, such as your system environment. For this reason,
# you won't find the :http configuration below, but set inside
# YourSanctuaryWeb.Endpoint.init/2 when load_from_system_env is
# true. Any dynamic configuration should be done there.
#
# Don't forget to configure the url host to something meaningful,
# Phoenix uses this information when generating URLs.
#
# Finally, we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the mix phx.digest task
# which you typically run after static files are built.
config :your_sanctuary, YourSanctuaryWeb.Endpoint,
load_from_system_env: true,
url: [scheme: "https", host: "chat.yoursanctuary.org.uk", port: System.get_env("PORT")],
force_ssl: [rewrite_on: [:x_forwarded_proto]],
cache_static_manifest: "priv/static/cache_manifest.json",
secret_key_base: Map.fetch!(System.get_env(), "SECRET_KEY_BASE")
config :your_sanctuary, YourSanctuary.Repo,
adapter: Ecto.Adapters.Postgres,
url: System.get_env("DATABASE_URL"),
pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10"),
ssl: true
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :your_sanctuary, YourSanctuaryWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [:inet6,
# port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
#
# Where those two env variables return an absolute path to
# the key and cert in disk or a relative path inside priv,
# for example "priv/ssl/server.key".
#
# We also recommend setting `force_ssl`, ensuring no data is
# ever sent via http, always redirecting to https:
#
# config :your_sanctuary, YourSanctuaryWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :your_sanctuary, YourSanctuaryWeb.Endpoint, server: true
#
# Finally import the config/prod.secret.exs
# which should be versioned separately.
| 35.652778 | 90 | 0.729646 |
9ea42d50068f40e6942d8fa66fc9cd0ea9a87f2d | 1,946 | ex | Elixir | clients/street_view_publish/lib/google_api/street_view_publish/v1/model/photo_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/street_view_publish/lib/google_api/street_view_publish/v1/model/photo_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/street_view_publish/lib/google_api/street_view_publish/v1/model/photo_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.StreetViewPublish.V1.Model.PhotoResponse do
@moduledoc """
Response payload for a single Photo in batch operations including BatchGetPhotos and BatchUpdatePhotos.
## Attributes
* `photo` (*type:* `GoogleApi.StreetViewPublish.V1.Model.Photo.t`, *default:* `nil`) - The Photo resource, if the request was successful.
* `status` (*type:* `GoogleApi.StreetViewPublish.V1.Model.Status.t`, *default:* `nil`) - The status for the operation to get or update a single photo in the batch request.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:photo => GoogleApi.StreetViewPublish.V1.Model.Photo.t() | nil,
:status => GoogleApi.StreetViewPublish.V1.Model.Status.t() | nil
}
field(:photo, as: GoogleApi.StreetViewPublish.V1.Model.Photo)
field(:status, as: GoogleApi.StreetViewPublish.V1.Model.Status)
end
defimpl Poison.Decoder, for: GoogleApi.StreetViewPublish.V1.Model.PhotoResponse do
def decode(value, options) do
GoogleApi.StreetViewPublish.V1.Model.PhotoResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.StreetViewPublish.V1.Model.PhotoResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.92 | 175 | 0.747688 |
9ea432612f5968864022e9d5099b020d2b0183e0 | 756 | ex | Elixir | lib/toki/pattern.ex | tommy351/toki | 099591c6ab5dcbad91c4c45ff87ab7e6d05959bc | [
"MIT"
] | 3 | 2016-07-19T03:23:33.000Z | 2016-09-15T11:57:37.000Z | lib/toki/pattern.ex | tommy351/toki | 099591c6ab5dcbad91c4c45ff87ab7e6d05959bc | [
"MIT"
] | null | null | null | lib/toki/pattern.ex | tommy351/toki | 099591c6ab5dcbad91c4c45ff87ab7e6d05959bc | [
"MIT"
] | null | null | null | defmodule Toki.Pattern do
use Toki.Unit.Year, :compile
use Toki.Unit.Month, :compile
use Toki.Unit.Day, :compile
use Toki.Unit.Hour, :compile
use Toki.Unit.Minute, :compile
use Toki.Unit.Second, :compile
@spec compile(String.t) :: Regex.t
def compile(pattern) do
do_compile(pattern, "")
end
defp do_compile("", acc) do
{:ok, regex} = Regex.compile("^#{acc}$")
regex
end
defp do_compile("[" <> rest, acc) do
[str, rest] = case String.split(rest, "]", parts: 2) do
[str, rest] -> [str, rest]
[rest] -> ["[", rest]
end
do_compile(rest, acc <> Regex.escape(str))
end
defp do_compile(<<h::binary-size(1), rest::binary>>, acc) do
do_compile(rest, acc <> Regex.escape(h))
end
end
| 23.625 | 62 | 0.611111 |
9ea448f48159f23ec59a144795d312918bf532c7 | 208 | ex | Elixir | lib/compiler/structure.ex | ProtoDef-io/elixir-protodef | 9176d2439d66c713a5db7f57f26dfc5dc18f8d6f | [
"MIT"
] | 2 | 2016-03-31T16:23:39.000Z | 2016-05-20T17:42:38.000Z | lib/compiler/structure.ex | McEx/ProtoDef | 9176d2439d66c713a5db7f57f26dfc5dc18f8d6f | [
"MIT"
] | 1 | 2016-04-02T08:54:49.000Z | 2016-04-02T08:54:49.000Z | lib/compiler/structure.ex | McEx/ProtoDef | 9176d2439d66c713a5db7f57f26dfc5dc18f8d6f | [
"MIT"
] | null | null | null | defmodule ProtoDef.Compiler.Structure do
def gen_for_type({:type_ref, typ, _}, _ctx), do: {:type_ref, typ}
def gen_for_type(type, ctx) do
apply(type.__struct__, :structure, [type, ctx])
end
end
| 23.111111 | 67 | 0.697115 |
9ea45491d9dcab31889be979849146c092d2804c | 1,216 | ex | Elixir | ros/ros_service/lib/ros_service.ex | kujua/elixir-handbook | 4185ad8da7f652fdb59c799dc58bcb33fda10475 | [
"Apache-2.0"
] | 1 | 2019-07-01T18:47:28.000Z | 2019-07-01T18:47:28.000Z | ros/ros_service/lib/ros_service.ex | kujua/elixir-handbook | 4185ad8da7f652fdb59c799dc58bcb33fda10475 | [
"Apache-2.0"
] | 4 | 2020-07-17T16:57:18.000Z | 2021-05-09T23:50:52.000Z | ros/ros_service/lib/ros_service.ex | kujua/elixir-handbook | 4185ad8da7f652fdb59c799dc58bcb33fda10475 | [
"Apache-2.0"
] | null | null | null | defmodule Ros.Service do
@moduledoc false
use GenServer
alias Phoenix.Channels.GenSocketClient
def send_newdata_message(orderid) do
GenSocketClient.call(Ros.Service.DataSocketClient, {:new_data, orderid}, 3000)
end
# def get_data(for) do
# GenServer.call(__MODULE__, {:getdata, for}, 3000)
# end
@spec process(list(Ros.Lib.Model.Order.t)) :: {atom, list(Ros.Lib.Model.Order.t)}
def process(ordermodellist) do
case GenServer.call(__MODULE__, {:process, ordermodellist}, 3000) do
:ok -> {:ok, ordermodellist}
_ -> {:error, Ros.Lib.set_status(ordermodellist, :error)}
end
end
def start_link(state) do
GenServer.start_link(__MODULE__, state, name: __MODULE__)
end
@impl true
def init(_opts) do
{:ok, []}
end
@impl true
def handle_call({:process, ordermodel}, _from, _state) do
# _list = Enum.map(ordermodellist, fn o ->
:ok = Ros.Service.TransientDataStore.add(ordermodel)
send_newdata_message ordermodel.order_id
# end)
{:reply, :ok, ordermodel}
end
# @impl true
# def handle_call({:getdata, for}, _from, _state) do
# {:ok, list} = Ros.Service.TransientDataStore.getall()
# {:reply, :ok, list}
# end
end
| 25.333333 | 83 | 0.677632 |
9ea457614c4f6488310e026c994b32af626ed2d5 | 941 | exs | Elixir | config/config.exs | Arqui2018/2F_wallet_ms | 3b5cdb71f1590f7340460f13a086918448bc3fae | [
"MIT"
] | null | null | null | config/config.exs | Arqui2018/2F_wallet_ms | 3b5cdb71f1590f7340460f13a086918448bc3fae | [
"MIT"
] | null | null | null | config/config.exs | Arqui2018/2F_wallet_ms | 3b5cdb71f1590f7340460f13a086918448bc3fae | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# General application configuration
config :wallet_ms,
ecto_repos: [WalletMs.Repo]
# Configures the endpoint
config :wallet_ms, WalletMsWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "jBVfBB3PBBW2MA0SbwzekX/TlCxpVsq2Ncub7+glNcc6wn1Ue4GI/EW9Xcotb60o",
render_errors: [view: WalletMsWeb.ErrorView, accepts: ~w(html json)],
pubsub: [name: WalletMs.PubSub,
adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:user_id]
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env}.exs"
| 33.607143 | 86 | 0.763018 |
9ea4688c83f3ef8cf56478f034b590ba5189fc27 | 6,119 | ex | Elixir | clients/binary_authorization/lib/google_api/binary_authorization/v1/model/policy.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/binary_authorization/lib/google_api/binary_authorization/v1/model/policy.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/binary_authorization/lib/google_api/binary_authorization/v1/model/policy.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.BinaryAuthorization.V1.Model.Policy do
@moduledoc """
A policy for container image binary authorization.
## Attributes
* `admissionWhitelistPatterns` (*type:* `list(GoogleApi.BinaryAuthorization.V1.Model.AdmissionWhitelistPattern.t)`, *default:* `nil`) - Optional. Admission policy allowlisting. A matching admission request will always be permitted. This feature is typically used to exclude Google or third-party infrastructure images from Binary Authorization policies.
* `clusterAdmissionRules` (*type:* `%{optional(String.t) => GoogleApi.BinaryAuthorization.V1.Model.AdmissionRule.t}`, *default:* `nil`) - Optional. Per-cluster admission rules. Cluster spec format: `location.clusterId`. There can be at most one admission rule per cluster spec. A `location` is either a compute zone (e.g. us-central1-a) or a region (e.g. us-central1). For `clusterId` syntax restrictions see https://cloud.google.com/container-engine/reference/rest/v1/projects.zones.clusters.
* `defaultAdmissionRule` (*type:* `GoogleApi.BinaryAuthorization.V1.Model.AdmissionRule.t`, *default:* `nil`) - Required. Default admission rule for a cluster without a per-cluster, per- kubernetes-service-account, or per-istio-service-identity admission rule.
* `description` (*type:* `String.t`, *default:* `nil`) - Optional. A descriptive comment.
* `etag` (*type:* `String.t`, *default:* `nil`) - Optional. Used to prevent updating the policy when another request has updated it since it was retrieved.
* `globalPolicyEvaluationMode` (*type:* `String.t`, *default:* `nil`) - Optional. Controls the evaluation of a Google-maintained global admission policy for common system-level images. Images not covered by the global policy will be subject to the project admission policy. This setting has no effect when specified inside a global admission policy.
* `istioServiceIdentityAdmissionRules` (*type:* `%{optional(String.t) => GoogleApi.BinaryAuthorization.V1.Model.AdmissionRule.t}`, *default:* `nil`) - Optional. Per-istio-service-identity admission rules. Istio service identity spec format: `spiffe:///ns//sa/` or `/ns//sa/` e.g. `spiffe://example.com/ns/test-ns/sa/default`
* `kubernetesNamespaceAdmissionRules` (*type:* `%{optional(String.t) => GoogleApi.BinaryAuthorization.V1.Model.AdmissionRule.t}`, *default:* `nil`) - Optional. Per-kubernetes-namespace admission rules. K8s namespace spec format: `[a-z.-]+`, e.g. `some-namespace`
* `kubernetesServiceAccountAdmissionRules` (*type:* `%{optional(String.t) => GoogleApi.BinaryAuthorization.V1.Model.AdmissionRule.t}`, *default:* `nil`) - Optional. Per-kubernetes-service-account admission rules. Service account spec format: `namespace:serviceaccount`. e.g. `test-ns:default`
* `name` (*type:* `String.t`, *default:* `nil`) - Output only. The resource name, in the format `projects/*/policy`. There is at most one policy per project.
* `updateTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. Time when the policy was last updated.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:admissionWhitelistPatterns =>
list(GoogleApi.BinaryAuthorization.V1.Model.AdmissionWhitelistPattern.t()) | nil,
:clusterAdmissionRules =>
%{optional(String.t()) => GoogleApi.BinaryAuthorization.V1.Model.AdmissionRule.t()}
| nil,
:defaultAdmissionRule => GoogleApi.BinaryAuthorization.V1.Model.AdmissionRule.t() | nil,
:description => String.t() | nil,
:etag => String.t() | nil,
:globalPolicyEvaluationMode => String.t() | nil,
:istioServiceIdentityAdmissionRules =>
%{optional(String.t()) => GoogleApi.BinaryAuthorization.V1.Model.AdmissionRule.t()}
| nil,
:kubernetesNamespaceAdmissionRules =>
%{optional(String.t()) => GoogleApi.BinaryAuthorization.V1.Model.AdmissionRule.t()}
| nil,
:kubernetesServiceAccountAdmissionRules =>
%{optional(String.t()) => GoogleApi.BinaryAuthorization.V1.Model.AdmissionRule.t()}
| nil,
:name => String.t() | nil,
:updateTime => DateTime.t() | nil
}
field(:admissionWhitelistPatterns,
as: GoogleApi.BinaryAuthorization.V1.Model.AdmissionWhitelistPattern,
type: :list
)
field(:clusterAdmissionRules,
as: GoogleApi.BinaryAuthorization.V1.Model.AdmissionRule,
type: :map
)
field(:defaultAdmissionRule, as: GoogleApi.BinaryAuthorization.V1.Model.AdmissionRule)
field(:description)
field(:etag)
field(:globalPolicyEvaluationMode)
field(:istioServiceIdentityAdmissionRules,
as: GoogleApi.BinaryAuthorization.V1.Model.AdmissionRule,
type: :map
)
field(:kubernetesNamespaceAdmissionRules,
as: GoogleApi.BinaryAuthorization.V1.Model.AdmissionRule,
type: :map
)
field(:kubernetesServiceAccountAdmissionRules,
as: GoogleApi.BinaryAuthorization.V1.Model.AdmissionRule,
type: :map
)
field(:name)
field(:updateTime, as: DateTime)
end
defimpl Poison.Decoder, for: GoogleApi.BinaryAuthorization.V1.Model.Policy do
def decode(value, options) do
GoogleApi.BinaryAuthorization.V1.Model.Policy.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.BinaryAuthorization.V1.Model.Policy do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 57.186916 | 497 | 0.724465 |
9ea47e362d9da4602aec703f06ad8b9a1ae0473c | 1,768 | exs | Elixir | config/test.exs | IrinaS-D/tailwind-beginner-site | 999b16fd89b20b6af4e33bb51acca670ccda6216 | [
"MIT"
] | null | null | null | config/test.exs | IrinaS-D/tailwind-beginner-site | 999b16fd89b20b6af4e33bb51acca670ccda6216 | [
"MIT"
] | null | null | null | config/test.exs | IrinaS-D/tailwind-beginner-site | 999b16fd89b20b6af4e33bb51acca670ccda6216 | [
"MIT"
] | null | null | null | import Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
[
{:admin, Legendary.Admin},
{:app, AppWeb},
{:core, Legendary.AuthWeb},
{:content, Legendary.Content},
{:core, Legendary.CoreWeb},
{:object_storage, Legendary.ObjectStorageWeb}
]
|> Enum.map(fn {otp_app, module} ->
config otp_app, Module.concat(module, "Endpoint"),
http: [port: 4002],
server: false
end)
# Configure your database
#
# The MIX_TEST_PARTITION environment variable can be used
# to provide built-in test partitioning in CI environment.
# Run `mix help test` for more information.
[
{:admin, Legendary.Admin.Repo},
{:app, App.Repo},
{:content, Legendary.Content.Repo},
{:core, Legendary.Core.Repo},
{:object_storage, Legendary.ObjectStorage.Repo}
]
|> Enum.map(fn {otp_app, repo} ->
config otp_app, repo,
username: "postgres",
password: "postgres",
database: "legendary_test#{System.get_env("MIX_TEST_PARTITION")}",
hostname: System.get_env("DATABASE_URL") || "localhost",
pool: Ecto.Adapters.SQL.Sandbox
end)
config :core, Legendary.CoreMailer, adapter: Bamboo.TestAdapter
config :content, Oban, crontab: false, queues: false, plugins: false
config :logger, level: :warn
config :libcluster, topologies: []
config :waffle,
storage: Waffle.Storage.Local,
storage_dir_prefix: "priv/test/static/",
asset_host: "http://localhost:4000"
config :object_storage,
bucket_name: "uploads"
config :ex_aws,
access_key_id: "test-access-key-id",
secret_access_key: "test-secret-access-key"
config :ex_aws, :s3,
scheme: "http://",
host: "localhost",
port: 4000
config :object_storage, :signature_generator, Legendary.ObjectStorageWeb.CheckSignatures.MockSignatureGenerator
| 26.38806 | 111 | 0.720023 |
9ea4930f05bdbb6c3ff44284c153cb75073c18bf | 4,019 | ex | Elixir | projects/api/lib/margaret/stars/stars.ex | strattadb/margaret | dde5d7b42f6d9b4d320069a0117136dae03b13b5 | [
"MIT"
] | 82 | 2017-11-06T01:00:55.000Z | 2020-12-09T10:35:29.000Z | projects/api/lib/margaret/stars/stars.ex | dbstratta/margaret | dde5d7b42f6d9b4d320069a0117136dae03b13b5 | [
"MIT"
] | 98 | 2017-11-06T22:57:32.000Z | 2020-07-03T04:46:39.000Z | projects/api/lib/margaret/stars/stars.ex | strattadb/margaret | dde5d7b42f6d9b4d320069a0117136dae03b13b5 | [
"MIT"
] | 10 | 2017-11-16T05:31:58.000Z | 2020-10-29T18:02:35.000Z | defmodule Margaret.Stars do
@moduledoc """
The Stars context.
"""
alias Ecto.Multi
alias Margaret.{
Repo,
Accounts,
Stories,
Comments,
Stars,
Notifications,
Helpers
}
alias Accounts.User
alias Stories.Story
alias Comments.Comment
alias Stars.Star
@type starrable :: Story.t() | Comment.t()
@doc """
Gets a star.
## Examples
iex> get_star(user_id: 123, story_id: 123)
%Star{}
iex> get_star(user_id: 123, story_id: 456)
nil
"""
@spec get_star(Keyword.t()) :: Star.t() | nil
def get_star(clauses) when length(clauses) == 2, do: Repo.get_by(Star, clauses)
@doc """
Returns the starrable of the star.
## Examples
iex> starrable(%Star{})
%Story{}
iex> starrable(%Star{})
%Comment{}
"""
@spec starrable(Star.t()) :: Story.t() | Comment.t()
def starrable(%Star{story_id: story_id} = star) when not is_nil(story_id) do
star
|> Star.preload_story()
|> Map.fetch!(:story)
end
def starrable(%Star{comment_id: comment_id} = star) when not is_nil(comment_id) do
star
|> Star.preload_comment()
|> Map.fetch!(:comment)
end
@doc """
Returns `true` if the user has starred the starrable.
`false` otherwise.
## Examples
iex> has_starred?(user: %User{}, story: %Story{})
true
iex> has_starred?(user: %User{}, story: %Story{})
false
"""
@spec has_starred?(Keyword.t()) :: boolean
def has_starred?(clauses) do
%User{id: user_id} = Keyword.get(clauses, :user)
clauses =
clauses
|> get_starrable_from_clauses()
|> case do
%Story{id: story_id} -> [story_id: story_id]
%Comment{id: comment_id} -> [comment_id: comment_id]
end
|> Keyword.put(:user_id, user_id)
!!get_star(clauses)
end
@doc """
"""
@spec starred(map()) :: any()
def starred(args) do
args
|> Stars.Queries.starred()
|> Helpers.Connection.from_query(args)
end
@doc """
"""
@spec starred_count(map()) :: non_neg_integer()
def starred_count(args \\ %{}) do
args
|> Stars.Queries.starred()
|> Repo.count()
end
def stargazers(args) do
args
|> Stars.Queries.stargazers()
|> Helpers.Connection.from_query(args)
end
def stargazer_count(args) do
args
|> Stars.Queries.stargazers()
|> Repo.count()
end
@doc """
Inserts a star.
"""
@spec insert_star(map()) :: any()
def insert_star(attrs) do
star_changeset = Star.changeset(attrs)
Multi.new()
|> Multi.insert(:star, star_changeset)
|> notify_author_of_starrable()
|> Repo.transaction()
end
@spec notify_author_of_starrable(Multi.t()) :: Multi.t()
defp notify_author_of_starrable(multi) do
insert_notification = fn %{star: star} ->
starrable = starrable(star)
author =
case starrable do
%Story{} = story -> Stories.author(story)
%Comment{} = comment -> Comments.author(comment)
end
notified_users = [author]
notification_attrs =
starrable
|> case do
%Story{id: story_id} -> %{story_id: story_id}
%Comment{id: comment_id} -> %{comment_id: comment_id}
end
|> Map.merge(%{
actor_id: author.id,
action: "starred",
notified_users: notified_users
})
case Notifications.insert_notification(notification_attrs) do
{:ok, %{notification: notification}} -> {:ok, notification}
{:error, _, reason, _} -> {:error, reason}
end
end
Multi.run(multi, :notification, insert_notification)
end
@doc """
Deletes a star.
"""
def delete_star(%Star{} = star), do: Repo.delete(star)
@spec get_starrable_from_clauses(Keyword.t()) :: starrable()
defp get_starrable_from_clauses(clauses) do
cond do
Keyword.has_key?(clauses, :story) -> Keyword.get(clauses, :story)
Keyword.has_key?(clauses, :comment) -> Keyword.get(clauses, :comment)
end
end
end
| 21.842391 | 84 | 0.606867 |
9ea49462b8bf3544d13b73345ac19a7b128bc8f5 | 1,557 | ex | Elixir | clients/cloud_run/lib/google_api/cloud_run/v1alpha1/model/test_iam_permissions_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/cloud_run/lib/google_api/cloud_run/v1alpha1/model/test_iam_permissions_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/cloud_run/lib/google_api/cloud_run/v1alpha1/model/test_iam_permissions_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudRun.V1alpha1.Model.TestIamPermissionsResponse do
@moduledoc """
Response message for `TestIamPermissions` method.
## Attributes
* `permissions` (*type:* `list(String.t)`, *default:* `nil`) - A subset of `TestPermissionsRequest.permissions` that the caller is allowed.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:permissions => list(String.t())
}
field(:permissions, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.CloudRun.V1alpha1.Model.TestIamPermissionsResponse do
def decode(value, options) do
GoogleApi.CloudRun.V1alpha1.Model.TestIamPermissionsResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudRun.V1alpha1.Model.TestIamPermissionsResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.12766 | 143 | 0.751445 |
9ea49b8e83f8c1568eefe6198008e1e7d2159da9 | 122 | exs | Elixir | test/translator_test.exs | atanych/translator | 0535854adefea1036752fa03de1fd623a01f222c | [
"MIT"
] | 13 | 2016-04-17T17:02:11.000Z | 2021-04-06T08:40:51.000Z | test/translator_test.exs | atanych/translator | 0535854adefea1036752fa03de1fd623a01f222c | [
"MIT"
] | 4 | 2016-04-12T02:54:30.000Z | 2016-12-26T16:26:39.000Z | test/translator_test.exs | atanych/translator | 0535854adefea1036752fa03de1fd623a01f222c | [
"MIT"
] | 5 | 2016-04-17T17:02:13.000Z | 2018-09-27T17:10:16.000Z | defmodule TranslatorTest do
use ExUnit.Case
doctest Translator
test "the truth" do
assert 1 + 1 == 2
end
end
| 13.555556 | 27 | 0.688525 |
9ea4b29755edc92a1bd181848ef183cb0e116934 | 5,127 | ex | Elixir | lib/lexin/dictionary/xml_converter.ex | cr0t/lexin | bff2997db52a00bf770614630b8684821ab72abc | [
"MIT"
] | null | null | null | lib/lexin/dictionary/xml_converter.ex | cr0t/lexin | bff2997db52a00bf770614630b8684821ab72abc | [
"MIT"
] | 6 | 2022-01-05T12:51:37.000Z | 2022-01-13T09:52:36.000Z | lib/lexin/dictionary/xml_converter.ex | cr0t/lexin | bff2997db52a00bf770614630b8684821ab72abc | [
"MIT"
] | null | null | null | defmodule Lexin.Dictionary.XMLConverter do
@moduledoc """
In order to get quick lookups for the words in the dictionary files, we want to convert original
XML files into similar SQLite counterparts with simple structure.
Every word definition might have referential `Index`-es – the words that can point to the main
one. We should consider these variants when users search for the words.
The same spelling of the word can be references in multiple definitions (check "a", for example),
so we need a two-tables structure in our SQLite dictionary files; here is its basic structure:
| definitions |
|------------------|
| id INTEGER | <--- | vocabulary |
| word TEXT | | |-----------------------|
| definition TEXT | | | id INTEGER |
--- | definition_id INTEGER |
| word TEXT |
| type TEXT |
In the `definition` field we store the original XML snippet from the input XML file. In the
`word` field of `definitions` table we store translation of the word (it is needed to let users
type their queries in any language – it can be both in Swedish or in other language).
Note: `Floki.raw_html/1` that we use in the code makes all original XML tag names downcased.
The `vocabulary` table contains Swedish variants of the words and helps us to do fast lookups
(10-15ms for the set of ~100k words in the table) and find corresponding definitions, which we
lately might render to the user.
Here is an example of SQL-query we can use to get definitions:
```sql
SELECT DISTINCT definition FROM definitions
JOIN vocabulary ON definitions.id = vocabulary.definition_id
WHERE vocabulary.word LIKE 'fordon' OR definitions.translation LIKE 'fordon'
```
In addition, these tables can also be used to generate suggestions to the input field while user
is typing a search query.
Here is an example of SQL-query we can use to get Swedish suggestions (similar to target lang):
```sql
SELECT DISTINCT word FROM vocabulary
```
Here is the way to prepare these `.sqlite` files which can be consumed later by the Lexin application:
```console
mix run scripts/converter.exs --input swe_rus.xml --output russian.sqlite
```
P.S. We need to check and be careful of words that spell the same way in both languages – should
we show all definitions then? Maybe yes, maybe not.
"""
@doc """
Parse input XML, find all words' definitions and indexable references (variants of the word to
lookup for), prepare database and insert converted data.
"""
def convert(input_filename, output_filename) do
IO.puts("Resetting database...")
conn = reset_db!(output_filename)
IO.puts("Parsing input XML...")
all_words =
input_filename
|> parse_xml()
IO.puts("Inserting into SQLite...")
{_conn, _total, _processed} =
all_words
|> Enum.reduce({conn, Enum.count(all_words), 0}, &insert/2)
IO.puts("\nDone!")
end
defp reset_db!(output_filename) do
File.rm(output_filename)
{:ok, conn} = Exqlite.Sqlite3.open(output_filename)
vocabulary_table = """
CREATE TABLE "vocabulary" ("id" INTEGER PRIMARY KEY, "definition_id" INTEGER, "word" TEXT, "type" TEXT);
"""
:ok = Exqlite.Sqlite3.execute(conn, vocabulary_table)
definitions_table = """
CREATE TABLE "definitions" ("id" INTEGER PRIMARY KEY, "word" TEXT, "definition" TEXT);
"""
:ok = Exqlite.Sqlite3.execute(conn, definitions_table)
conn
end
defp parse_xml(input_filename) do
input_filename
|> File.read!()
|> Floki.parse_document!()
|> Floki.find("word")
|> Enum.map(&parse_word/1)
end
defp parse_word(word_block) do
id =
word_block
|> Floki.attribute("variantid")
|> List.first()
|> String.to_integer()
variants =
for index <- Floki.find(word_block, "index") do
{
Floki.attribute(index, "value") |> List.first(),
Floki.attribute(index, "type") |> List.first()
}
end
word = Floki.find(word_block, "translation") |> Floki.text()
definition = Floki.raw_html(word_block)
{id, variants, word, definition}
end
defp insert({id, variants, word, definition}, {conn, total, processed}) do
Enum.each(variants, fn {word, type} ->
word_sql = "INSERT INTO vocabulary (definition_id, word, type) VALUES (?1, ?2, ?3)"
{:ok, statement} = Exqlite.Sqlite3.prepare(conn, word_sql)
:ok = Exqlite.Sqlite3.bind(conn, statement, [id, word, type])
:done = Exqlite.Sqlite3.step(conn, statement)
end)
definition_sql = "INSERT INTO definitions (id, word, definition) VALUES (?1, ?2, ?3)"
{:ok, statement} = Exqlite.Sqlite3.prepare(conn, definition_sql)
:ok = Exqlite.Sqlite3.bind(conn, statement, [id, word, definition])
:done = Exqlite.Sqlite3.step(conn, statement)
# a simple "progress bar"
IO.write("#{processed + 1} / #{total}\r")
{conn, total, processed + 1}
end
end
| 33.730263 | 108 | 0.655159 |
9ea4bac661d2eea7c5499f47a03e50a7a2eb6ae5 | 1,987 | ex | Elixir | lib/short_uuid.ex | truealisa/short_uuid | 81628bcb0ed33fd8e04dfd3071e9f5aee78e8304 | [
"MIT"
] | null | null | null | lib/short_uuid.ex | truealisa/short_uuid | 81628bcb0ed33fd8e04dfd3071e9f5aee78e8304 | [
"MIT"
] | null | null | null | lib/short_uuid.ex | truealisa/short_uuid | 81628bcb0ed33fd8e04dfd3071e9f5aee78e8304 | [
"MIT"
] | null | null | null | defmodule ShortUUID do
@moduledoc """
ShortUUID allows UUIDs to be encoded in a more URL- and user-friendly Base58 format:
iex> ShortUUID.encode("64d7280f-736a-4ffa-b9c0-383f43486d0b")
"DTEETeS5R2XxjrVTZxXoJS"
iex> ShortUUID.decode("DTEETeS5R2XxjrVTZxXoJS")
"64d7280f-736a-4ffa-b9c0-383f43486d0b"
"""
@abc ["1", "2", "3", "4", "5", "6", "7", "8", "9"] ++
["A", "B", "C", "D", "E", "F", "G", "H"] ++
["J", "K", "L", "M", "N"] ++
["P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z"] ++
["a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k"] ++
["m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z"]
@abc_length Enum.count(@abc)
@doc """
Encodes the given UUID into a ShortUUID.
## Examples
iex> ShortUUID.encode("64d7280f-736a-4ffa-b9c0-383f43486d0b")
"DTEETeS5R2XxjrVTZxXoJS"
"""
@spec encode(String.t()) :: String.t()
def encode(input) when is_binary(input) do
input
|> String.replace("-", "")
|> String.to_integer(16)
|> encode("")
end
defp encode(input, output) when input > 0 do
index = rem(input, @abc_length)
input = div(input, @abc_length)
output = "#{Enum.at(@abc, index)}#{output}"
encode(input, output)
end
defp encode(0, output), do: output
@doc """
Decodes the given ShortUUID back into a UUID.
## Examples
iex> ShortUUID.decode("DTEETeS5R2XxjrVTZxXoJS")
"64d7280f-736a-4ffa-b9c0-383f43486d0b"
"""
@spec decode(String.t()) :: String.t()
def decode(input) when is_binary(input) do
input
|> String.codepoints()
|> Enum.reduce(0, fn codepoint, acc ->
acc * @abc_length + Enum.find_index(@abc, &(&1 == codepoint))
end)
|> Integer.to_string(16)
|> String.pad_leading(32, "0")
|> String.downcase()
|> format()
end
defp format(<<a::64, b::32, c::32, d::32, e::96>>) do
<<a::64, ?-, b::32, ?-, c::32, ?-, d::32, ?-, e::96>>
end
end
| 26.493333 | 86 | 0.537997 |
9ea4c0025e531b8be4f1c1614941343263421eb1 | 648 | ex | Elixir | lib/oriel_web/controllers/api.ex | sgeos/oriel | 145c563e1ef30119881b1c589b57e9ede7de083f | [
"BSD-3-Clause"
] | 1 | 2022-01-27T08:17:53.000Z | 2022-01-27T08:17:53.000Z | lib/oriel_web/controllers/api.ex | sgeos/oriel | 145c563e1ef30119881b1c589b57e9ede7de083f | [
"BSD-3-Clause"
] | null | null | null | lib/oriel_web/controllers/api.ex | sgeos/oriel | 145c563e1ef30119881b1c589b57e9ede7de083f | [
"BSD-3-Clause"
] | null | null | null | defmodule OrielWeb.ApiController do
use OrielWeb, :controller
defp graphql(query, variables, conn) do
context = %{remote_ip: conn.remote_ip}
query
|> Absinthe.run(OrielWeb.GraphQL.Schema, variables: variables, context: context)
|> case do
{:ok, %{data: result}} ->
result
{:ok, %{errors: _}=result} ->
result
result ->
result
end
end
def healthz(conn, _params) do
variables = %{}
result = """
query Healthz {
healthz: ping {
success
datetime
}
}
"""
|> graphql(variables, conn)
json(conn, result)
end
end
| 19.636364 | 84 | 0.557099 |
9ea4e428c60ad783ea5804f3007ac66754a7ad5f | 8,264 | exs | Elixir | test/ex_gpgme/context_test.exs | jshmrtn/ex-gpgme | 0a465254d24d192c2311acf640258fe016b9195a | [
"MIT"
] | 3 | 2017-11-30T16:47:13.000Z | 2019-02-20T20:43:05.000Z | test/ex_gpgme/context_test.exs | jshmrtn/ex-gpgme | 0a465254d24d192c2311acf640258fe016b9195a | [
"MIT"
] | 1 | 2020-07-08T18:33:26.000Z | 2020-07-08T18:47:09.000Z | test/ex_gpgme/context_test.exs | jshmrtn/ex-gpgme | 0a465254d24d192c2311acf640258fe016b9195a | [
"MIT"
] | 2 | 2018-12-31T02:03:38.000Z | 2020-07-08T17:45:14.000Z | defmodule ExGpgme.ContextTest do
@moduledoc false
use ExUnit.Case
alias ExGpgme.Context
alias ExGpgme.Results.{VerificationResult, Signature, ImportResult}
doctest Context, except: [
from_protocol: 1,
from_protocol!: 1,
import: 2,
find_key: 2,
encrypt: 4,
sign_and_encrypt: 4,
engine_info: 1,
delete_key: 2,
delete_secret_key: 2,
decrypt: 2,
sign: 3,
verify_opaque: 3,
]
@sender_fingerprint "95E93F470BCB2E96C648572DFBFA85913EE05E95"
@sender_secret_key File.read!("priv/test/keys/sender_secret.asc")
@sender_public_key File.read!("priv/test/keys/sender_public.asc")
@receiver_fingerprint "9D8A23BADCFA63B58B3B1CED391062831D088C71"
@receiver_secret_key File.read!("priv/test/keys/receiver_secret.asc")
@receiver_public_key File.read!("priv/test/keys/receiver_public.asc")
@keychain_base_dir "priv/test/keychains/"
@encrypted_receiver File.read!("priv/test/test_data/encrypted_receiver.asc")
setup_all do
@keychain_base_dir
|> File.ls!
|> Enum.reject(fn file ->
file == ".gitkeep"
end)
|> Enum.each(fn file ->
File.rm_rf!(@keychain_base_dir <> file)
end)
:ok
end
def import_test_key!(context, key) do
%ImportResult{imported: imported, unchanged: unchanged} = Context.import!(context, key)
1 = imported + unchanged
end
setup(tags) do
context = if tags[:context] do
dirname = :erlang.crc32("#{inspect make_ref()}")
path = "priv/test/keychains/#{dirname}"
File.mkdir!(path)
File.chmod!(path, 0o700)
on_exit fn ->
File.rm_rf!(path)
end
context = Context.from_protocol!(:open_pgp)
Context.set_pinentry_mode!(context, :loopback)
Context.set_engine_home_dir!(context, path)
if tags[:import_all] || tags[:import_sender_secret] do
import_test_key!(context, @sender_secret_key)
end
if tags[:import_sender_public] do
import_test_key!(context, @sender_public_key)
end
if tags[:import_all] || tags[:import_receiver_secret] do
import_test_key!(context, @receiver_secret_key)
end
if tags[:import_receiver_public] do
import_test_key!(context, @receiver_public_key)
end
if tags[:armor] do
Context.set_armor(context, tags[:armor])
end
context
end
{:ok, %{context: context}}
end
describe "from_protocol/1" do
protocols = [
{:open_pgp, :ref},
{:cms, :ref},
{:gpg_conf, :ref},
{:assuan, :ref},
{:g13, :ref},
{:ui_server, :ref},
{:spawn, :ref},
{:default, :error},
{:unknown, :error},
{{:other, 17}, :error},
]
for {protocol, expected_result} <- protocols do
test "uses correct constant for #{inspect protocol}" do
protocol = unquote(protocol)
result = Context.from_protocol(protocol)
case unquote(expected_result) do
:ref ->
assert {:ok, ref} = result
assert is_reference(ref)
:error ->
assert {:error, "Invalid value"} = result
end
end
end
test "gives argument error on wrong params" do
assert_raise ArgumentError, fn ->
Context.from_protocol(:foo)
end
assert_raise ArgumentError, fn ->
Context.from_protocol("foo")
end
assert_raise ArgumentError, fn ->
Context.from_protocol({:foo, 17})
end
end
end
describe "protocol/1" do
@tag context: true
test "gives protocol", %{context: context} do
assert :open_pgp = Context.protocol(context)
end
end
describe "import/2" do
@tag context: true
test "imports keys", %{context: context} do
assert {:ok, %ExGpgme.Results.ImportResult{imports: imports}} =
Context.import(context, @sender_public_key)
assert 1 = Enum.count(imports)
end
end
describe "import!/2" do
@tag context: true
test "imports keys", %{context: context} do
assert %ExGpgme.Results.ImportResult{imports: imports} =
Context.import!(context, @sender_public_key <> "\n" <> @receiver_public_key)
assert 2 = Enum.count(imports)
end
end
describe "find_key/2" do
@tag context: true, import_sender_public: true
test "finds key", %{context: context} do
assert {:ok, ref} = Context.find_key(context, @sender_fingerprint)
assert is_reference(ref)
end
@tag context: true
test "errors with missing key", %{context: context} do
assert {:error, "End of file"} = Context.find_key(context, "not existing fingerprint")
end
end
describe "find_key!/2" do
@tag context: true, import_sender_public: true
test "finds key", %{context: context} do
assert ref = Context.find_key!(context, @sender_fingerprint)
assert is_reference(ref)
end
@tag context: true
test "errors with missing key", %{context: context} do
assert {:error, "End of file"} = Context.find_key(context, "not existing fingerprint")
end
end
describe "encrypt/2" do
@tag context: true, import_receiver_secret: true, armor: true
test "encrypts correctly", %{context: context} do
assert recipient = Context.find_key!(context, @receiver_fingerprint)
assert {:ok, cyphertext} = Context.encrypt(context, [recipient], "Hello World!", [:always_trust])
assert is_binary(cyphertext)
assert cyphertext =~ "-BEGIN PGP MESSAGE-"
assert cyphertext =~ "-END PGP MESSAGE-"
assert {:ok, "Hello World!"} = Context.decrypt(context, cyphertext)
end
@tag context: true
test "errors with missing key", %{context: context} do
assert_raise ArgumentError, fn ->
Context.encrypt(context, [], "Hello World!")
end
end
end
describe "decrypt/2" do
@tag context: true, import_receiver_secret: true, armor: true
test "decrypts correctly", %{context: context} do
assert {:ok, "Hello World!"} = Context.decrypt(context, @encrypted_receiver)
end
end
describe "armor?/1" do
@tag context: true
test "read correctly", %{context: context} do
Context.set_armor(context, true)
assert Context.armor?(context)
end
end
describe "set_armor/2" do
@tag context: true
test "set correctly", %{context: context} do
Context.set_armor(context, true)
assert Context.armor?(context)
Context.set_armor(context, false)
refute Context.armor?(context)
end
end
describe "text_mode?/1" do
@tag context: true
test "read correctly", %{context: context} do
Context.set_text_mode(context, true)
assert Context.text_mode?(context)
end
end
describe "set_text_mode/1" do
@tag context: true
test "set correctly", %{context: context} do
Context.set_text_mode(context, true)
assert Context.text_mode?(context)
Context.set_text_mode(context, false)
refute Context.text_mode?(context)
end
end
describe "delete_key/2" do
@tag context: true, import_receiver_public: true
test "really deletes key", %{context: context} do
assert {:ok, key} = Context.find_key(context, @receiver_fingerprint)
assert :ok = Context.delete_key(context, key)
assert {:error, "End of file"} = Context.find_key(context, @receiver_fingerprint)
end
end
describe "delete_secret_key/2" do
@tag context: true, import_receiver_secret: true, manual_pinentry: true
test "really deletes key", %{context: context} do
assert {:ok, key} = Context.find_key(context, @receiver_fingerprint)
Context.set_pinentry_mode!(context, :default)
assert :ok = Context.delete_secret_key(context, key)
assert {:error, "End of file"} = Context.find_key(context, @receiver_fingerprint)
end
end
describe "sign/3" do
@tag context: true, import_receiver_secret: true, armor: true
test "creates correct signature", %{context: context} do
assert {:ok, signature} = Context.sign(context, "Hello World")
assert verification = Context.verify_opaque!(context, signature, "Hello World")
assert %VerificationResult{signatures: [signature_result]} = verification
assert %Signature{status: :valid} = signature_result
end
end
end
| 29.098592 | 103 | 0.657672 |
9ea5019fec00ba6baa9e889085f18a2f0a18910c | 1,980 | exs | Elixir | config/prod.exs | alissonfpmorais/tucano | d22480fc416d14b44862be2ed89040d92b7c08d1 | [
"MIT"
] | null | null | null | config/prod.exs | alissonfpmorais/tucano | d22480fc416d14b44862be2ed89040d92b7c08d1 | [
"MIT"
] | null | null | null | config/prod.exs | alissonfpmorais/tucano | d22480fc416d14b44862be2ed89040d92b7c08d1 | [
"MIT"
] | null | null | null | use Mix.Config
# For production, we configure the host to read the PORT
# from the system environment. Therefore, you will need
# to set PORT=80 before running your server.
#
# You should also configure the url host to something
# meaningful, we use this information when generating URLs.
#
# Finally, we also include the path to a manifest
# containing the digested version of static files. This
# manifest is generated by the mix phoenix.digest task
# which you typically run after static files are built.
config :tucano, Tucano.Endpoint,
http: [port: {:system, "PORT"}],
url: [host: "example.com", port: 80],
cache_static_manifest: "priv/static/manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :tucano, Tucano.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
#
# Where those two env variables return an absolute path to
# the key and cert in disk or a relative path inside priv,
# for example "priv/ssl/server.key".
#
# We also recommend setting `force_ssl`, ensuring no data is
# ever sent via http, always redirecting to https:
#
# config :tucano, Tucano.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :tucano, Tucano.Endpoint, server: true
#
# Finally import the config/prod.secret.exs
# which should be versioned separately.
import_config "prod.secret.exs"
| 31.935484 | 67 | 0.712121 |
9ea507bd0df176c7c1de5d0e304a73dfa8b216fe | 274 | ex | Elixir | lib/phx_i18n_example_web/views/page_view.ex | paulfioravanti/phx_liveview_i18n_example | a1813758f4f9ff8d2250c8776cb045a700c69077 | [
"MIT"
] | 9 | 2020-01-18T23:27:36.000Z | 2021-12-22T04:56:14.000Z | lib/phx_i18n_example_web/views/page_view.ex | paulfioravanti/phx_liveview_i18n_example | a1813758f4f9ff8d2250c8776cb045a700c69077 | [
"MIT"
] | 6 | 2020-03-29T10:53:33.000Z | 2021-05-22T12:22:54.000Z | lib/phx_i18n_example_web/views/page_view.ex | paulfioravanti/phx_i18n_example | a1813758f4f9ff8d2250c8776cb045a700c69077 | [
"MIT"
] | null | null | null | defmodule PhxI18nExampleWeb.PageView do
use PhxI18nExampleWeb, :view
import Gettext, only: [with_locale: 2]
alias PhxI18nExampleWeb.PageStyle
defdelegate article, to: PageStyle
defdelegate heading, to: PageStyle
defdelegate heading_container, to: PageStyle
end
| 27.4 | 46 | 0.80292 |
9ea51c16c73ab1aa9189ab410f49a21e83153034 | 115 | ex | Elixir | examples/apps/phx_example/lib/phx_example_web/endpoint.ex | tpitale/elixir_agent | f356b588fe9d7cc3b9db00d9a251e009a20b3e0e | [
"Apache-2.0"
] | 227 | 2018-09-05T15:33:23.000Z | 2022-02-25T18:12:06.000Z | examples/apps/phx_example/lib/phx_example_web/endpoint.ex | tpitale/elixir_agent | f356b588fe9d7cc3b9db00d9a251e009a20b3e0e | [
"Apache-2.0"
] | 211 | 2018-09-05T21:42:41.000Z | 2022-03-25T17:51:56.000Z | examples/apps/phx_example/lib/phx_example_web/endpoint.ex | tpitale/elixir_agent | f356b588fe9d7cc3b9db00d9a251e009a20b3e0e | [
"Apache-2.0"
] | 84 | 2018-09-05T04:26:26.000Z | 2022-03-09T14:28:14.000Z | defmodule PhxExampleWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :phx_example
plug PhxExampleWeb.Router
end
| 19.166667 | 45 | 0.817391 |
9ea52678c255a2bdbcf206388016aeb39ce26b61 | 12,938 | exs | Elixir | test/graphql/lang/parser_introspection_test.exs | marvinhagemeister/graphql | 43bccc041438f05d14c8c6f40f193c3d7957ca9d | [
"BSD-3-Clause"
] | 719 | 2016-03-18T03:10:46.000Z | 2022-02-02T10:07:29.000Z | test/graphql/lang/parser_introspection_test.exs | marvinhagemeister/graphql | 43bccc041438f05d14c8c6f40f193c3d7957ca9d | [
"BSD-3-Clause"
] | 51 | 2015-08-30T03:15:17.000Z | 2016-03-02T07:13:26.000Z | test/graphql/lang/parser_introspection_test.exs | marvinhagemeister/graphql | 43bccc041438f05d14c8c6f40f193c3d7957ca9d | [
"BSD-3-Clause"
] | 34 | 2016-03-30T12:56:11.000Z | 2021-08-30T09:21:54.000Z | defmodule GraphQL.Lang.Parser.IntrospectionTest do
use ExUnit.Case, async: true
import ExUnit.TestHelpers
test "Introspection Query" do
assert_parse """
# The introspection query to end all introspection queries, copied from
# https://github.com/graphql/graphql-js/blob/master/src/utilities/introspectionQuery.js
query IntrospectionQuery {
__schema {
queryType { name }
mutationType { name }
types {
...FullType
}
directives {
name
description
args {
...InputValue
}
onOperation
onFragment
onField
}
}
}
fragment FullType on __Type {
kind
name
description
fields {
name
description
args {
...InputValue
}
type {
...TypeRef
}
isDeprecated
deprecationReason
}
inputFields {
...InputValue
}
interfaces {
...TypeRef
}
enumValues {
name
description
isDeprecated
deprecationReason
}
possibleTypes {
...TypeRef
}
}
fragment InputValue on __InputValue {
name
description
type { ...TypeRef }
defaultValue
}
fragment TypeRef on __Type {
kind
name
ofType {
kind
name
ofType {
kind
name
ofType {
kind
name
}
}
}
}
""",
%{definitions: [%{kind: :OperationDefinition, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "IntrospectionQuery"}, operation: :query,
selectionSet: %{kind: :SelectionSet, loc: %{start: 0},
selections: [%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0}, value: "__schema"},
selectionSet: %{kind: :SelectionSet, loc: %{start: 0},
selections: [%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "queryType"},
selectionSet: %{kind: :SelectionSet,
loc: %{start: 0},
selections: [%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "name"}}]}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "mutationType"},
selectionSet: %{kind: :SelectionSet,
loc: %{start: 0},
selections: [%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "name"}}]}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "types"},
selectionSet: %{kind: :SelectionSet,
loc: %{start: 0},
selections: [%{kind: :FragmentSpread,
loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "FullType"}}]}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "directives"},
selectionSet: %{kind: :SelectionSet,
loc: %{start: 0},
selections: [%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "name"}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "description"}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "args"},
selectionSet: %{kind: :SelectionSet,
loc: %{start: 0},
selections: [%{kind: :FragmentSpread,
loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "InputValue"}}]}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "onOperation"}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "onFragment"}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "onField"}}]}}]}}]}},
%{kind: :FragmentDefinition, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0}, value: "FullType"},
selectionSet: %{kind: :SelectionSet, loc: %{start: 0},
selections: [%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0}, value: "kind"}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0}, value: "name"}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "description"}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0}, value: "fields"},
selectionSet: %{kind: :SelectionSet, loc: %{start: 0},
selections: [%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "name"}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "description"}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "args"},
selectionSet: %{kind: :SelectionSet,
loc: %{start: 0},
selections: [%{kind: :FragmentSpread,
loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "InputValue"}}]}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "type"},
selectionSet: %{kind: :SelectionSet,
loc: %{start: 0},
selections: [%{kind: :FragmentSpread,
loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "TypeRef"}}]}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "isDeprecated"}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "deprecationReason"}}]}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "inputFields"},
selectionSet: %{kind: :SelectionSet, loc: %{start: 0},
selections: [%{kind: :FragmentSpread, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "InputValue"}}]}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "interfaces"},
selectionSet: %{kind: :SelectionSet, loc: %{start: 0},
selections: [%{kind: :FragmentSpread, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "TypeRef"}}]}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "enumValues"},
selectionSet: %{kind: :SelectionSet, loc: %{start: 0},
selections: [%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "name"}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "description"}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "isDeprecated"}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "deprecationReason"}}]}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "possibleTypes"},
selectionSet: %{kind: :SelectionSet, loc: %{start: 0},
selections: [%{kind: :FragmentSpread, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "TypeRef"}}]}}]},
typeCondition: %{kind: :NamedType, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0}, value: "__Type"}}},
%{kind: :FragmentDefinition, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0}, value: "InputValue"},
selectionSet: %{kind: :SelectionSet, loc: %{start: 0},
selections: [%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0}, value: "name"}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "description"}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0}, value: "type"},
selectionSet: %{kind: :SelectionSet, loc: %{start: 0},
selections: [%{kind: :FragmentSpread, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "TypeRef"}}]}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "defaultValue"}}]},
typeCondition: %{kind: :NamedType, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "__InputValue"}}},
%{kind: :FragmentDefinition, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0}, value: "TypeRef"},
selectionSet: %{kind: :SelectionSet, loc: %{start: 0},
selections: [%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0}, value: "kind"}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0}, value: "name"}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0}, value: "ofType"},
selectionSet: %{kind: :SelectionSet, loc: %{start: 0},
selections: [%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "kind"}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "name"}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "ofType"},
selectionSet: %{kind: :SelectionSet,
loc: %{start: 0},
selections: [%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "kind"}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "name"}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "ofType"},
selectionSet: %{kind: :SelectionSet,
loc: %{start: 0},
selections: [%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "kind"}},
%{kind: :Field, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0},
value: "name"}}]}}]}}]}}]},
typeCondition: %{kind: :NamedType, loc: %{start: 0},
name: %{kind: :Name, loc: %{start: 0}, value: "__Type"}}}],
kind: :Document, loc: %{start: 0}}
end
end
| 44.460481 | 93 | 0.39782 |
9ea52a3c5b410e78311fd9317fb11b0ee93a3b12 | 3,107 | ex | Elixir | lib/instream/encoder/line.ex | XiaoTeTech/instream | be469d2d8f94c9537a64b052b2dfd583c784d086 | [
"Apache-2.0"
] | null | null | null | lib/instream/encoder/line.ex | XiaoTeTech/instream | be469d2d8f94c9537a64b052b2dfd583c784d086 | [
"Apache-2.0"
] | null | null | null | lib/instream/encoder/line.ex | XiaoTeTech/instream | be469d2d8f94c9537a64b052b2dfd583c784d086 | [
"Apache-2.0"
] | null | null | null | defmodule Instream.Encoder.Line do
@moduledoc """
Encoder for the InfluxDB line protocol.
"""
alias Instream.Decoder.RFC3339
@type point ::
%{
required(:fields) => map,
required(:measurement) => binary,
optional(:tags) => map,
optional(:timestamp) => non_neg_integer | binary | nil
}
| %{
__struct__: module,
fields: map,
tags: map,
timestamp: non_neg_integer | binary | nil
}
@doc """
Creates protocol contents for a list of data points.
"""
@spec encode([point()]) :: binary
def encode(points), do: encode(points, [])
defp encode([point | points], lines) do
line = encode_point(point)
encode(points, ["\n", line | lines])
end
defp encode([], ["\n" | lines]) do
lines
|> Enum.reverse()
|> IO.iodata_to_binary()
end
defp encode([], []), do: ""
defp append_fields(line, %{fields: fields}) do
content =
fields
|> Enum.reduce([], fn
{_, nil}, acc -> acc
{field, value}, acc -> [[encode_property(field), "=", encode_value(value)], "," | acc]
end)
|> Enum.reverse()
case content do
[] -> line
["," | encoded_fields] -> [line, " " | encoded_fields]
end
end
defp append_tags(line, %{tags: tags}) do
content =
tags
|> Enum.reduce([], fn
{_, nil}, acc -> acc
{tag, value}, acc -> [[encode_property(tag), "=", encode_property(value)], "," | acc]
end)
|> Enum.reverse()
case content do
[] -> line
encoded_tags -> [line | encoded_tags]
end
end
defp append_tags(line, _), do: line
defp append_timestamp(line, %{timestamp: nil}), do: line
defp append_timestamp(line, %{timestamp: ts}) when is_integer(ts),
do: [line, " ", Integer.to_string(ts)]
defp append_timestamp(line, %{timestamp: ts}) when is_binary(ts),
do: [line, " ", ts |> RFC3339.to_nanosecond() |> Integer.to_string()]
defp append_timestamp(line, _), do: line
defp encode_point(%{__struct__: series, fields: fields, tags: tags, timestamp: timestamp}) do
encode_point(%{
measurement: series.__meta__(:measurement),
fields: Map.from_struct(fields),
tags: Map.from_struct(tags),
timestamp: timestamp
})
end
defp encode_point(%{measurement: measurement} = point) do
[encode_property(measurement)]
|> append_tags(point)
|> append_fields(point)
|> append_timestamp(point)
end
defp encode_property(s) when is_binary(s) do
s
|> :binary.replace(",", "\\,", [:global])
|> :binary.replace(" ", "\\ ", [:global])
|> :binary.replace("=", "\\=", [:global])
end
defp encode_property(s), do: Kernel.to_string(s)
defp encode_value(i) when is_integer(i), do: [Integer.to_string(i), "i"]
defp encode_value(s) when is_binary(s),
do: ["\"", :binary.replace(s, "\"", "\\\"", [:global]), "\""]
defp encode_value(true), do: "true"
defp encode_value(false), do: "false"
defp encode_value(other), do: inspect(other)
end
| 26.330508 | 95 | 0.578371 |
9ea55703f34c1f5d22f50bc1df66dbd8ac370f8b | 5,384 | ex | Elixir | clients/connectors/lib/google_api/connectors/v1/model/connection.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/connectors/lib/google_api/connectors/v1/model/connection.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/connectors/lib/google_api/connectors/v1/model/connection.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Connectors.V1.Model.Connection do
@moduledoc """
Connection represents an instance of connector.
## Attributes
* `authConfig` (*type:* `GoogleApi.Connectors.V1.Model.AuthConfig.t`, *default:* `nil`) - Optional. Configuration for establishing the connection's authentication with an external system.
* `configVariables` (*type:* `list(GoogleApi.Connectors.V1.Model.ConfigVariable.t)`, *default:* `nil`) - Optional. Configuration for configuring the connection with an external system.
* `connectorVersion` (*type:* `String.t`, *default:* `nil`) - Required. Connector version on which the connection is created. The format is: projects/*/locations/global/providers/*/connectors/*/versions/*
* `createTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. Created time.
* `description` (*type:* `String.t`, *default:* `nil`) - Optional. Description of the resource.
* `egressBackends` (*type:* `list(String.t)`, *default:* `nil`) - Output only. Outbound domains/hosts needs to be allowlisted.
* `envoyImageLocation` (*type:* `String.t`, *default:* `nil`) - Output only. GCR location where the envoy image is stored. formatted like: gcr.io/{bucketName}/{imageName}
* `imageLocation` (*type:* `String.t`, *default:* `nil`) - Output only. GCR location where the runtime image is stored. formatted like: gcr.io/{bucketName}/{imageName}
* `inactive` (*type:* `boolean()`, *default:* `nil`) - Optional. Inactive indicates the connection is active to use or not.
* `labels` (*type:* `map()`, *default:* `nil`) - Optional. Resource labels to represent user-provided metadata. Refer to cloud documentation on labels for more details. https://cloud.google.com/compute/docs/labeling-resources
* `lockConfig` (*type:* `GoogleApi.Connectors.V1.Model.LockConfig.t`, *default:* `nil`) - Optional. Configuration that indicates whether or not the Connection can be edited.
* `name` (*type:* `String.t`, *default:* `nil`) - Output only. Resource name of the Connection. Format: projects/{project}/locations/{location}/connections/{connection}
* `serviceAccount` (*type:* `String.t`, *default:* `nil`) - Optional. Service account needed for runtime plane to access GCP resources.
* `serviceDirectory` (*type:* `String.t`, *default:* `nil`) - Output only. The name of the Service Directory service name. Used for Private Harpoon to resolve the ILB address. e.g. "projects/cloud-connectors-e2e-testing/locations/us-central1/namespaces/istio-system/services/istio-ingressgateway-connectors"
* `status` (*type:* `GoogleApi.Connectors.V1.Model.ConnectionStatus.t`, *default:* `nil`) - Output only. Current status of the connection.
* `updateTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. Updated time.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:authConfig => GoogleApi.Connectors.V1.Model.AuthConfig.t() | nil,
:configVariables => list(GoogleApi.Connectors.V1.Model.ConfigVariable.t()) | nil,
:connectorVersion => String.t() | nil,
:createTime => DateTime.t() | nil,
:description => String.t() | nil,
:egressBackends => list(String.t()) | nil,
:envoyImageLocation => String.t() | nil,
:imageLocation => String.t() | nil,
:inactive => boolean() | nil,
:labels => map() | nil,
:lockConfig => GoogleApi.Connectors.V1.Model.LockConfig.t() | nil,
:name => String.t() | nil,
:serviceAccount => String.t() | nil,
:serviceDirectory => String.t() | nil,
:status => GoogleApi.Connectors.V1.Model.ConnectionStatus.t() | nil,
:updateTime => DateTime.t() | nil
}
field(:authConfig, as: GoogleApi.Connectors.V1.Model.AuthConfig)
field(:configVariables, as: GoogleApi.Connectors.V1.Model.ConfigVariable, type: :list)
field(:connectorVersion)
field(:createTime, as: DateTime)
field(:description)
field(:egressBackends, type: :list)
field(:envoyImageLocation)
field(:imageLocation)
field(:inactive)
field(:labels, type: :map)
field(:lockConfig, as: GoogleApi.Connectors.V1.Model.LockConfig)
field(:name)
field(:serviceAccount)
field(:serviceDirectory)
field(:status, as: GoogleApi.Connectors.V1.Model.ConnectionStatus)
field(:updateTime, as: DateTime)
end
defimpl Poison.Decoder, for: GoogleApi.Connectors.V1.Model.Connection do
def decode(value, options) do
GoogleApi.Connectors.V1.Model.Connection.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Connectors.V1.Model.Connection do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 58.521739 | 311 | 0.701523 |
9ea597cc1444ccac757acfde0d5286d31ff7731e | 2,867 | ex | Elixir | lib/ash/options_helpers.ex | savish/ash | 7faf73097a6c6f801851e9d89569b6f5e6e87f81 | [
"MIT"
] | null | null | null | lib/ash/options_helpers.ex | savish/ash | 7faf73097a6c6f801851e9d89569b6f5e6e87f81 | [
"MIT"
] | null | null | null | lib/ash/options_helpers.ex | savish/ash | 7faf73097a6c6f801851e9d89569b6f5e6e87f81 | [
"MIT"
] | null | null | null | defmodule Ash.OptionsHelpers do
@moduledoc """
Helpers for working with nimble options
"""
@type schema :: NimbleOptions.schema()
def merge_schemas(left, right, section \\ nil) do
new_right =
Enum.map(right, fn {key, value} ->
{key, Keyword.put(value, :subsection, section)}
end)
Keyword.merge(left, new_right)
end
def validate(opts, schema) do
NimbleOptions.validate(opts, sanitize_schema(schema))
end
def validate!(opts, schema) do
NimbleOptions.validate!(opts, sanitize_schema(schema))
end
def docs(schema) do
schema
|> sanitize_schema()
|> Enum.map(fn {key, opts} ->
if opts[:doc] do
{key, Keyword.update!(opts, :doc, &String.replace(&1, "\n\n", " \n"))}
else
{key, opts}
end
end)
|> NimbleOptions.docs()
end
defp sanitize_schema(schema) do
Enum.map(schema, fn {key, opts} ->
new_opts =
case opts[:type] do
{:one_of, values} ->
Keyword.put(opts, :type, {:in, values})
_ ->
opts
end
{key, new_opts}
end)
end
def map(value) when is_map(value), do: {:ok, value}
def map(_), do: {:error, "must be a map"}
def list_of_atoms(value) do
if is_list(value) and Enum.all?(value, &is_atom/1) do
{:ok, value}
else
{:error, "Expected a list of atoms"}
end
end
def module_and_opts({module, opts}) when is_atom(module) do
if Keyword.keyword?(opts) do
{:ok, {module, opts}}
else
{:error, "Expected the second element to be a keyword list, got: #{inspect(opts)}"}
end
end
def module_and_opts({other, _}) do
{:error, "Expected the first element to be a module, got: #{inspect(other)}"}
end
def module_and_opts(module) do
module_and_opts({module, []})
end
def default(value) when is_function(value, 0), do: {:ok, value}
def default({module, function, args})
when is_atom(module) and is_atom(function) and is_list(args),
do: {:ok, {module, function, args}}
def default(value), do: {:ok, value}
def make_required!(options, field) do
Keyword.update!(options, field, &Keyword.put(&1, :required, true))
end
def make_optional!(options, field) do
Keyword.update!(options, field, &Keyword.delete(&1, :required))
end
def set_type!(options, field, type) do
Keyword.update!(options, field, &Keyword.put(&1, :type, type))
end
def set_default!(options, field, value) do
Keyword.update!(options, field, fn config ->
config
|> Keyword.put(:default, value)
|> Keyword.delete(:required)
end)
end
def append_doc!(options, field, to_append) do
Keyword.update!(options, field, fn opt_config ->
Keyword.update(opt_config, :doc, to_append, fn existing ->
existing <> " - " <> to_append
end)
end)
end
end
| 24.715517 | 89 | 0.617719 |
9ea5ab1bd8ff4dc9fbcce53c2b7057aaf33beb04 | 2,150 | exs | Elixir | elixir/space-age/space_age_test.exs | macborowy/exercism | c5d45e074e81b946a82a340b2730e0d2732b7e0a | [
"MIT"
] | null | null | null | elixir/space-age/space_age_test.exs | macborowy/exercism | c5d45e074e81b946a82a340b2730e0d2732b7e0a | [
"MIT"
] | null | null | null | elixir/space-age/space_age_test.exs | macborowy/exercism | c5d45e074e81b946a82a340b2730e0d2732b7e0a | [
"MIT"
] | null | null | null | if !System.get_env("EXERCISM_TEST_EXAMPLES") do
Code.load_file("space_age.exs")
end
ExUnit.start
ExUnit.configure exclude: :pending, trace: true
# You need to define a SpaceAge module containing a function age_on that given a
# planet (:earth, :saturn, etc) and a number of seconds returns the age in years
# on that planet as a floating point number.
defmodule SpageAgeTest do
use ExUnit.Case
# @tag :pending
test "age on Earth" do
input = 1_000_000_000
assert_in_delta 31.69, SpaceAge.age_on(:earth, input), 0.005
end
# @tag :pending
test "age on Mercury" do
input = 2_134_835_688
assert_in_delta 67.65, SpaceAge.age_on(:earth, input), 0.005
assert_in_delta 280.88, SpaceAge.age_on(:mercury, input), 0.005
end
# @tag :pending
test "age on Venus" do
input = 189_839_836
assert_in_delta 6.02, SpaceAge.age_on(:earth, input), 0.005
assert_in_delta 9.78, SpaceAge.age_on(:venus, input), 0.005
end
# @tag :pending
test "age on Mars" do
input = 2_329_871_239
assert_in_delta 73.83, SpaceAge.age_on(:earth, input), 0.005
assert_in_delta 39.25, SpaceAge.age_on(:mars, input), 0.005
end
# @tag :pending
test "age on Jupiter" do
input = 901_876_382
assert_in_delta 28.58, SpaceAge.age_on(:earth, input), 0.005
assert_in_delta 2.41, SpaceAge.age_on(:jupiter, input), 0.005
end
# @tag :pending
test "age on Saturn" do
input = 3_000_000_000
assert_in_delta 95.06, SpaceAge.age_on(:earth, input), 0.005
assert_in_delta 3.23, SpaceAge.age_on(:saturn, input), 0.005
end
# @tag :pending
test "age on Uranus" do
input = 3_210_123_456
assert_in_delta 101.72, SpaceAge.age_on(:earth, input), 0.005
assert_in_delta 1.21, SpaceAge.age_on(:uranus, input), 0.005
end
# @tag :pending
test "age on Neptune" do
input = 8_210_123_456
assert_in_delta 260.16, SpaceAge.age_on(:earth, input), 0.005
assert_in_delta 1.58, SpaceAge.age_on(:neptune, input), 0.005
end
test "age on Naboo gives CaseClauseError" do
input = 1_000_000_000
assert_raise CaseClauseError, fn -> SpaceAge.age_on(:naboo, input) end
end
end
| 28.666667 | 80 | 0.704186 |
9ea5dee4f08e6883f0905f9622b8e4b65216e2d3 | 886 | exs | Elixir | apps/site/test/lib/css_helpers_test.exs | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 42 | 2019-05-29T16:05:30.000Z | 2021-08-09T16:03:37.000Z | apps/site/test/lib/css_helpers_test.exs | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 872 | 2019-05-29T17:55:50.000Z | 2022-03-30T09:28:43.000Z | apps/site/test/lib/css_helpers_test.exs | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 12 | 2019-07-01T18:33:21.000Z | 2022-03-10T02:13:57.000Z | defmodule CSSHelpersTest do
use ExUnit.Case
import CSSHelpers
describe "string_to_class/1" do
test "converts string to a dash delimted string" do
assert string_to_class("Guides") == "guides"
assert string_to_class("Senior CharlieCard Event") == "senior-charliecard-event"
assert string_to_class("Auto-pay") == "auto-pay"
assert string_to_class("V3 API") == "v3-api"
assert string_to_class("commuter_rail") == "commuter-rail"
assert string_to_class("MBTA: Policy") == "mbta-policy"
end
end
describe "atom_to_class/1" do
test "converts the atom to a dash delimted string" do
assert atom_to_class(:the_ride) == "the-ride"
assert atom_to_class(:subway) == "subway"
assert atom_to_class(:commuter_rail) == "commuter-rail"
assert atom_to_class(:has_multiple_words) == "has-multiple-words"
end
end
end
| 34.076923 | 86 | 0.694131 |
9ea611b453a17a047a3ad3b942de158546f04766 | 687 | ex | Elixir | web/coherence_web.ex | mcousillas6/BioMonitor | 312a903fe19751b6896aca9346340ea502397350 | [
"MIT"
] | null | null | null | web/coherence_web.ex | mcousillas6/BioMonitor | 312a903fe19751b6896aca9346340ea502397350 | [
"MIT"
] | null | null | null | web/coherence_web.ex | mcousillas6/BioMonitor | 312a903fe19751b6896aca9346340ea502397350 | [
"MIT"
] | null | null | null | defmodule BioMonitor.Coherence.Web do
def view do
quote do
use Phoenix.View, root: "web/templates/coherence"
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_csrf_token: 0, get_flash: 2, view_module: 1]
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
import BioMonitor.Router.Helpers
import BioMonitor.ErrorHelpers
import BioMonitor.Gettext
import BioMonitor.Coherence.ViewHelpers
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 25.444444 | 88 | 0.701601 |
9ea6247a1ad857358a61a68dca7dac87a036371c | 385 | ex | Elixir | apps/core/lib/core/man/templates/hash_chain_verification_notification.ex | ehealth-ua/ehealth.api | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 8 | 2019-06-14T11:34:49.000Z | 2021-08-05T19:14:24.000Z | apps/core/lib/core/man/templates/hash_chain_verification_notification.ex | edenlabllc/ehealth.api.public | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 1 | 2019-07-08T15:20:22.000Z | 2019-07-08T15:20:22.000Z | apps/core/lib/core/man/templates/hash_chain_verification_notification.ex | ehealth-ua/ehealth.api | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 6 | 2018-05-11T13:59:32.000Z | 2022-01-19T20:15:22.000Z | defmodule Core.Man.Templates.HashChainVerificationNotification do
@moduledoc false
use Confex, otp_app: :core
alias Core.Man.Client, as: ManClient
def render(failure_details) do
ManClient.render_template(
config()[:id],
%{
format: config()[:format],
locale: config()[:locale],
failure_details: failure_details
}
)
end
end
| 20.263158 | 65 | 0.657143 |
9ea6329171503d9ad7e2fa34a90039eb1f7aeb79 | 2,852 | ex | Elixir | clients/android_publisher/lib/google_api/android_publisher/v3/model/subscription_offer_phase.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/android_publisher/lib/google_api/android_publisher/v3/model/subscription_offer_phase.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/android_publisher/lib/google_api/android_publisher/v3/model/subscription_offer_phase.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AndroidPublisher.V3.Model.SubscriptionOfferPhase do
@moduledoc """
A single phase of a subscription offer.
## Attributes
* `duration` (*type:* `String.t`, *default:* `nil`) - Required. The duration of a single recurrence of this phase. Specified in ISO 8601 format.
* `otherRegionsConfig` (*type:* `GoogleApi.AndroidPublisher.V3.Model.OtherRegionsSubscriptionOfferPhaseConfig.t`, *default:* `nil`) - Pricing information for any new locations Play may launch in.
* `recurrenceCount` (*type:* `integer()`, *default:* `nil`) - Required. The number of times this phase repeats. If this offer phase is not free, each recurrence charges the user the price of this offer phase.
* `regionalConfigs` (*type:* `list(GoogleApi.AndroidPublisher.V3.Model.RegionalSubscriptionOfferPhaseConfig.t)`, *default:* `nil`) - Required. The region-specific configuration of this offer phase. This list must contain exactly one entry for each region for which the subscription offer has a regional config.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:duration => String.t() | nil,
:otherRegionsConfig =>
GoogleApi.AndroidPublisher.V3.Model.OtherRegionsSubscriptionOfferPhaseConfig.t() | nil,
:recurrenceCount => integer() | nil,
:regionalConfigs =>
list(GoogleApi.AndroidPublisher.V3.Model.RegionalSubscriptionOfferPhaseConfig.t())
| nil
}
field(:duration)
field(:otherRegionsConfig,
as: GoogleApi.AndroidPublisher.V3.Model.OtherRegionsSubscriptionOfferPhaseConfig
)
field(:recurrenceCount)
field(:regionalConfigs,
as: GoogleApi.AndroidPublisher.V3.Model.RegionalSubscriptionOfferPhaseConfig,
type: :list
)
end
defimpl Poison.Decoder, for: GoogleApi.AndroidPublisher.V3.Model.SubscriptionOfferPhase do
def decode(value, options) do
GoogleApi.AndroidPublisher.V3.Model.SubscriptionOfferPhase.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AndroidPublisher.V3.Model.SubscriptionOfferPhase do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42.567164 | 314 | 0.744039 |
9ea679db90f8e2de2fca24b51c247e10cd68cd2a | 167 | ex | Elixir | web/shipment/splitter/do_not_split.ex | harry-gao/ex-cart | 573e7f977bb3b710d11618dd215d4ddd8f819fb3 | [
"Apache-2.0"
] | 356 | 2016-03-16T12:37:28.000Z | 2021-12-18T03:22:39.000Z | web/shipment/splitter/do_not_split.ex | harry-gao/ex-cart | 573e7f977bb3b710d11618dd215d4ddd8f819fb3 | [
"Apache-2.0"
] | 30 | 2016-03-16T09:19:10.000Z | 2021-01-12T08:10:52.000Z | web/shipment/splitter/do_not_split.ex | harry-gao/ex-cart | 573e7f977bb3b710d11618dd215d4ddd8f819fb3 | [
"Apache-2.0"
] | 72 | 2016-03-16T13:32:14.000Z | 2021-03-23T11:27:43.000Z | defmodule Nectar.Shipment.Splitter.DoNotSplit do
def split(order) do
line_items = order.line_items
[line_items] # only one group of all line items
end
end
| 23.857143 | 51 | 0.748503 |
9ea67bf9bdfb75a7ff9236fb1de252c1df230d23 | 6,145 | ex | Elixir | clients/sheets/lib/google_api/sheets/v4/model/treemap_chart_spec.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/sheets/lib/google_api/sheets/v4/model/treemap_chart_spec.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/sheets/lib/google_api/sheets/v4/model/treemap_chart_spec.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Sheets.V4.Model.TreemapChartSpec do
@moduledoc """
A Treemap chart.
## Attributes
* `colorData` (*type:* `GoogleApi.Sheets.V4.Model.ChartData.t`, *default:* `nil`) - The data that determines the background color of each treemap data cell. This field is optional. If not specified, size_data is used to determine background colors. If specified, the data is expected to be numeric. color_scale will determine how the values in this data map to data cell background colors.
* `colorScale` (*type:* `GoogleApi.Sheets.V4.Model.TreemapChartColorScale.t`, *default:* `nil`) - The color scale for data cells in the treemap chart. Data cells are assigned colors based on their color values. These color values come from color_data, or from size_data if color_data is not specified. Cells with color values less than or equal to min_value will have minValueColor as their background color. Cells with color values greater than or equal to max_value will have maxValueColor as their background color. Cells with color values between min_value and max_value will have background colors on a gradient between minValueColor and maxValueColor, the midpoint of the gradient being midValueColor. Cells with missing or non-numeric color values will have noDataColor as their background color.
* `headerColor` (*type:* `GoogleApi.Sheets.V4.Model.Color.t`, *default:* `nil`) - The background color for header cells.
* `headerColorStyle` (*type:* `GoogleApi.Sheets.V4.Model.ColorStyle.t`, *default:* `nil`) - The background color for header cells. If header_color is also set, this field takes precedence.
* `hideTooltips` (*type:* `boolean()`, *default:* `nil`) - True to hide tooltips.
* `hintedLevels` (*type:* `integer()`, *default:* `nil`) - The number of additional data levels beyond the labeled levels to be shown on the treemap chart. These levels are not interactive and are shown without their labels. Defaults to 0 if not specified.
* `labels` (*type:* `GoogleApi.Sheets.V4.Model.ChartData.t`, *default:* `nil`) - The data that contains the treemap cell labels.
* `levels` (*type:* `integer()`, *default:* `nil`) - The number of data levels to show on the treemap chart. These levels are interactive and are shown with their labels. Defaults to 2 if not specified.
* `maxValue` (*type:* `float()`, *default:* `nil`) - The maximum possible data value. Cells with values greater than this will have the same color as cells with this value. If not specified, defaults to the actual maximum value from color_data, or the maximum value from size_data if color_data is not specified.
* `minValue` (*type:* `float()`, *default:* `nil`) - The minimum possible data value. Cells with values less than this will have the same color as cells with this value. If not specified, defaults to the actual minimum value from color_data, or the minimum value from size_data if color_data is not specified.
* `parentLabels` (*type:* `GoogleApi.Sheets.V4.Model.ChartData.t`, *default:* `nil`) - The data the contains the treemap cells' parent labels.
* `sizeData` (*type:* `GoogleApi.Sheets.V4.Model.ChartData.t`, *default:* `nil`) - The data that determines the size of each treemap data cell. This data is expected to be numeric. The cells corresponding to non-numeric or missing data will not be rendered. If color_data is not specified, this data is used to determine data cell background colors as well.
* `textFormat` (*type:* `GoogleApi.Sheets.V4.Model.TextFormat.t`, *default:* `nil`) - The text format for all labels on the chart. The link field is not supported.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:colorData => GoogleApi.Sheets.V4.Model.ChartData.t() | nil,
:colorScale => GoogleApi.Sheets.V4.Model.TreemapChartColorScale.t() | nil,
:headerColor => GoogleApi.Sheets.V4.Model.Color.t() | nil,
:headerColorStyle => GoogleApi.Sheets.V4.Model.ColorStyle.t() | nil,
:hideTooltips => boolean() | nil,
:hintedLevels => integer() | nil,
:labels => GoogleApi.Sheets.V4.Model.ChartData.t() | nil,
:levels => integer() | nil,
:maxValue => float() | nil,
:minValue => float() | nil,
:parentLabels => GoogleApi.Sheets.V4.Model.ChartData.t() | nil,
:sizeData => GoogleApi.Sheets.V4.Model.ChartData.t() | nil,
:textFormat => GoogleApi.Sheets.V4.Model.TextFormat.t() | nil
}
field(:colorData, as: GoogleApi.Sheets.V4.Model.ChartData)
field(:colorScale, as: GoogleApi.Sheets.V4.Model.TreemapChartColorScale)
field(:headerColor, as: GoogleApi.Sheets.V4.Model.Color)
field(:headerColorStyle, as: GoogleApi.Sheets.V4.Model.ColorStyle)
field(:hideTooltips)
field(:hintedLevels)
field(:labels, as: GoogleApi.Sheets.V4.Model.ChartData)
field(:levels)
field(:maxValue)
field(:minValue)
field(:parentLabels, as: GoogleApi.Sheets.V4.Model.ChartData)
field(:sizeData, as: GoogleApi.Sheets.V4.Model.ChartData)
field(:textFormat, as: GoogleApi.Sheets.V4.Model.TextFormat)
end
defimpl Poison.Decoder, for: GoogleApi.Sheets.V4.Model.TreemapChartSpec do
def decode(value, options) do
GoogleApi.Sheets.V4.Model.TreemapChartSpec.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Sheets.V4.Model.TreemapChartSpec do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 74.036145 | 807 | 0.73214 |
9ea6812c55aa7139aaa0b7f204058e5bec5a150f | 1,622 | exs | Elixir | test/connect_test.exs | jarroput/molliex | 52741bf9267159438590449eb1cc47761ccf2aec | [
"MIT"
] | 12 | 2018-11-27T13:51:07.000Z | 2020-07-14T18:57:15.000Z | test/connect_test.exs | jarroput/molliex | 52741bf9267159438590449eb1cc47761ccf2aec | [
"MIT"
] | 7 | 2018-11-19T12:40:18.000Z | 2021-01-20T14:48:09.000Z | test/connect_test.exs | jarroput/molliex | 52741bf9267159438590449eb1cc47761ccf2aec | [
"MIT"
] | 4 | 2019-03-08T09:52:59.000Z | 2021-01-18T17:47:49.000Z | defmodule ConnectTest do
use ExUnit.Case
doctest Mollie
test "authorize_url with map" do
client = Mollie.Client.new()
params = %{
"client_id" => "app_dnR5f6uPDWhrvZkiL9ex7Wjj",
"state" => "a63f5cfcdaa209e2302be84da28008e8",
"scope" => "payments.read payments.write profiles.read organizations.read",
"response_type" => "code",
"approval_prompt" => "auto",
"redirect_uri" => "https://example.com"
}
authorization_url = Mollie.Connect.authorization_url(client, params)
assert authorization_url ==
"https://api.mollie.com/oauth2/authorize?approval_prompt=auto&client_id=app_dnR5f6uPDWhrvZkiL9ex7Wjj&redirect_uri=https%3A%2F%2Fexample.com&response_type=code&scope=payments.read+payments.write+profiles.read+organizations.read&state=a63f5cfcdaa209e2302be84da28008e8"
end
test "authorize_url with keyword list" do
client = Mollie.Client.new()
params = [
response_type: "code",
state: "a63f5cfcdaa209e2302be84da28008e8",
client_id: "app_dnR5f6uPDWhrvZkiL9ex7Wjj",
approval_prompt: "auto",
scope: "payments.read payments.write profiles.read organizations.read",
redirect_uri: "https://example.com"
]
authorization_url = Mollie.Connect.authorization_url(client, params)
assert authorization_url ==
"https://api.mollie.com/oauth2/authorize?approval_prompt=auto&client_id=app_dnR5f6uPDWhrvZkiL9ex7Wjj&redirect_uri=https%3A%2F%2Fexample.com&response_type=code&scope=payments.read+payments.write+profiles.read+organizations.read&state=a63f5cfcdaa209e2302be84da28008e8"
end
end
| 39.560976 | 279 | 0.736745 |
9ea68229eaa860a6cd0e367d3fd82d073acf2b64 | 944 | exs | Elixir | test/oli_web/controllers/static_page_controller_test.exs | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | null | null | null | test/oli_web/controllers/static_page_controller_test.exs | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | null | null | null | test/oli_web/controllers/static_page_controller_test.exs | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | null | null | null | defmodule OliWeb.StaticPageControllerTest do
use OliWeb.ConnCase
test "GET /", %{conn: conn} do
conn = get(conn, "/")
assert html_response(conn, 200) =~ "Welcome to"
assert html_response(conn, 200) =~ "Learner/Educator Sign In"
assert html_response(conn, 200) =~ "Authoring Sign In"
end
describe "set_session" do
test "stores the message id correctly when the session value is not set", %{conn: conn} do
conn = post(conn, Routes.static_page_path(conn, :set_session), dismissed_message: "1")
assert get_session(conn, :dismissed_messages) == [1]
end
test "stores the message id correctly when the session value is not empty", %{conn: conn} do
conn = Plug.Test.init_test_session(conn, %{dismissed_messages: [2]})
conn = post(conn, Routes.static_page_path(conn, :set_session), dismissed_message: "1")
assert get_session(conn, :dismissed_messages) == [1, 2]
end
end
end
| 33.714286 | 96 | 0.688559 |
9ea6c96b01647c146629b73d1bbd0e1c28292a0b | 181 | exs | Elixir | test/controllers/page_controller_test.exs | avval-alumni/alumni_book | 17b27da849919312a332aaa3b39ce5c65032f2b4 | [
"MIT"
] | null | null | null | test/controllers/page_controller_test.exs | avval-alumni/alumni_book | 17b27da849919312a332aaa3b39ce5c65032f2b4 | [
"MIT"
] | null | null | null | test/controllers/page_controller_test.exs | avval-alumni/alumni_book | 17b27da849919312a332aaa3b39ce5c65032f2b4 | [
"MIT"
] | null | null | null | defmodule AlumniBook.PageControllerTest do
use AlumniBookWeb.ConnCase
test "GET /" do
conn = get(build_conn(), "/")
assert html_response(conn, 200) =~ "body"
end
end
| 20.111111 | 45 | 0.690608 |
9ea6db93c9d22fb77279651f8d7b5b2cb1c83275 | 9,744 | ex | Elixir | apps/broker/lib/collector/validator/transaction/validator.ex | iotaledger/chronicle | 73566e5613268e4b0c5951265ae4760cedb4051f | [
"Apache-2.0"
] | 19 | 2019-09-17T18:14:36.000Z | 2021-12-06T07:29:27.000Z | apps/broker/lib/collector/validator/transaction/validator.ex | iotaledger/chronicle | 73566e5613268e4b0c5951265ae4760cedb4051f | [
"Apache-2.0"
] | 5 | 2019-09-30T04:57:14.000Z | 2020-11-10T15:41:03.000Z | apps/broker/lib/collector/validator/transaction/validator.ex | iotaledger/chronicle | 73566e5613268e4b0c5951265ae4760cedb4051f | [
"Apache-2.0"
] | 2 | 2019-09-17T19:03:16.000Z | 2021-03-01T01:04:31.000Z | defmodule Broker.Collector.TransactionValidator do
@max_demand Application.get_env(:broker, :__MAX_DEMAND__) || 64
@min_demand Application.get_env(:broker, :__MIN_DEMAND__) || 0
use GenStage
require Logger
@spec start_link(Keyword.t) :: tuple
def start_link(args) do
name = name_by_topic?(args)
GenStage.start_link(__MODULE__, [name: name] ++ args, name: name)
end
@spec init(Keyword.t) :: tuple
def init(args) do
# put name
Process.put(:name, args[:name])
partitions = args[:transaction_partitions]
dispatch_fn = fn(event) -> {event, :erlang.phash2(elem(event,0), partitions)} end
opts = [
# this option make Validator act as partitionDispatcher to tx Collectors
dispatcher: {GenStage.PartitionDispatcher, partitions: partitions,
hash: dispatch_fn},
# this option to subscribe to Validator producer(distributor)
subscribe_to: [{dis_name_by_topic?(args), max_demand: @max_demand, min_demand: @min_demand}]
]
{:producer_consumer, %{producer: nil}, opts}
end
@spec handle_subscribe(atom, tuple | list, tuple, map) :: tuple
def handle_subscribe(:producer, _, from, state) do
# we add producer(distributor from_reference to be able to send ask requests)
state = %{state | producer: from}
Logger.info("Validator: #{Process.get(:name)} got subscribed_to Distributor")
{:automatic, state}
end
@spec handle_subscribe(atom, tuple | list, tuple, map) :: tuple
def handle_subscribe(:consumer, _, _from, state) do
Logger.info("Validator: #{Process.get(:name)} got subscribed_to Transaction Collector")
# we keep it automatic to dispatch events on the fly
{:automatic, state}
end
@doc """
Handle events from producer(distributor)
- validate the events(txs)
- ask for max_demand
- send as events to tx_collector(s)
"""
@spec handle_events(list, tuple, map) :: tuple
def handle_events(events, _from, state) do
# process the events and return list of booleans
events_status = process_events(events)
# extract_valid events only
events = extract_valid(events_status,events)
# pass events to tx_collector(s)
{:noreply, events, state}
end
def handle_info(:ask, state) do
GenStage.ask(state[:producer], @max_demand)
{:noreply,[],state}
end
def child_spec(args) do
%{
id: name_by_topic?(args),
start: {__MODULE__, :start_link, [args]},
type: :worker,
restart: :permanent,
shutdown: 500
}
end
# private functions to process the events
defp process_events(events,hashes\\<<>>,
p0\\<<>>,p1\\<<>>,p2\\<<>>,p3\\<<>>,p4\\<<>>,p5\\<<>>,
p6\\<<>>,p7\\<<>>,p8\\<<>>,p9\\<<>>,p10\\<<>>,p11\\<<>>,p12\\<<>>,
p13\\<<>>,p14\\<<>>,p15\\<<>>,p16\\<<>>,p17\\<<>>,p18\\<<>>,p19\\<<>>,
p20\\<<>>,p21\\<<>>,p22\\<<>>,p23\\<<>>,p24\\<<>>,p25\\<<>>,p26\\<<>>,
p27\\<<>>,p28\\<<>>,p29\\<<>>,p30\\<<>>,p31\\<<>>,p32\\<<>>)
defp process_events([{hash, <<c0::81-bytes,c1::81-bytes,
c2::81-bytes,c3::81-bytes,c4::81-bytes,c5::81-bytes,c6::81-bytes,c7::81-bytes,
c8::81-bytes,c9::81-bytes,c10::81-bytes,c11::81-bytes,c12::81-bytes,
c13::81-bytes,c14::81-bytes,c15::81-bytes,c16::81-bytes,c17::81-bytes,
c18::81-bytes,c19::81-bytes,c20::81-bytes,c21::81-bytes,c22::81-bytes,
c23::81-bytes,c24::81-bytes,c25::81-bytes,c26::81-bytes,c27::81-bytes,
c28::81-bytes,c29::81-bytes,c30::81-bytes,c31::81-bytes,c32::81-bytes>>,_nil}
| rest],hashes,p0,p1,p2,p3,p4,p5,p6,p7,p8,p9,p10,p11,p12,p13,p14,p15,p16,p17,
p18,p19,p20,p21,p22,p23,p24,p25,p26,p27,p28,p29,p30,p31,p32) do
# keep processing events
process_events(rest,hashes<>hash,p0<>c0,p1<>c1,p2<>c2,p3<>c3,p4<>c4,
p5<>c5,p6<>c6,p7<>c7,p8<>c8,p9<>c9,p10<>c10,p11<>c11,p12<>c12,p13<>c13,
p14<>c14,p15<>c15,p16<>c16,p17<>c17,p18<>c18,p19<>c19,p20<>c20,p21<>c21,
p22<>c22,p23<>c23,p24<>c24,p25<>c25,p26<>c26,p27<>c27,p28<>c28,p29<>c29,
p30<>c30,p31<>c31,p32<>c32)
end
defp process_events([], hashes, p0,p1,p2,p3,p4,p5,p6,p7,p8,p9,p10,
p11,p12,p13,p14,p15,p16,p17,p18,p19,p20,p21,p22,p23,p24,p25,p26,p27,
p28,p29,p30,p31,p32) do
# get the length(tx_count) of events , we can do that by checking the length of hashes:
tx_count = div(byte_size(hashes),81)
# now we have the completed 0..32 chunks
# therefore we should call add_trytes and absorb for each part(p0..p32)
# ############## init curl_p ################
{:ok, pecurl} = Nifs.curl_p_init()
# ####### add_trytes and absorb #############
# add_trytes for p0
Nifs.add_trytes(pecurl,tx_count,p0)
# absorb p0
Nifs.absorb(pecurl)
# add_trytes for p1
Nifs.add_trytes(pecurl,tx_count,p1)
# absorb p1
Nifs.absorb(pecurl)
# add_trytes for p2
Nifs.add_trytes(pecurl,tx_count,p2)
# absorb p2
Nifs.absorb(pecurl)
# add_trytes for p3
Nifs.add_trytes(pecurl,tx_count,p3)
# absorb p3
Nifs.absorb(pecurl)
# add_trytes for p4
Nifs.add_trytes(pecurl,tx_count,p4)
# absorb p4
Nifs.absorb(pecurl)
# add_trytes for p5
Nifs.add_trytes(pecurl,tx_count,p5)
# absorb p5
Nifs.absorb(pecurl)
# add_trytes for p6
Nifs.add_trytes(pecurl,tx_count,p6)
# absorb p6
Nifs.absorb(pecurl)
# add_trytes for p7
Nifs.add_trytes(pecurl,tx_count,p7)
# absorb p7
Nifs.absorb(pecurl)
# add_trytes for p8
Nifs.add_trytes(pecurl,tx_count,p8)
# absorb p8
Nifs.absorb(pecurl)
# add_trytes for p9
Nifs.add_trytes(pecurl,tx_count,p9)
# absorb p9
Nifs.absorb(pecurl)
# add_trytes for p10
Nifs.add_trytes(pecurl,tx_count,p10)
# absorb p10
Nifs.absorb(pecurl)
# add_trytes for p11
Nifs.add_trytes(pecurl,tx_count,p11)
# absorb p11
Nifs.absorb(pecurl)
# add_trytes for p12
Nifs.add_trytes(pecurl,tx_count,p12)
# absorb p12
Nifs.absorb(pecurl)
# add_trytes for p13
Nifs.add_trytes(pecurl,tx_count,p13)
# absorb p13
Nifs.absorb(pecurl)
# add_trytes for p14
Nifs.add_trytes(pecurl,tx_count,p14)
# absorb p14
Nifs.absorb(pecurl)
# add_trytes for p15
Nifs.add_trytes(pecurl,tx_count,p15)
# absorb p15
Nifs.absorb(pecurl)
# add_trytes for p16
Nifs.add_trytes(pecurl,tx_count,p16)
# absorb p16
Nifs.absorb(pecurl)
# add_trytes for p17
Nifs.add_trytes(pecurl,tx_count,p17)
# absorb p17
Nifs.absorb(pecurl)
# add_trytes for p18
Nifs.add_trytes(pecurl,tx_count,p18)
# absorb p18
Nifs.absorb(pecurl)
# add_trytes for p19
Nifs.add_trytes(pecurl,tx_count,p19)
# absorb p19
Nifs.absorb(pecurl)
# add_trytes for p20
Nifs.add_trytes(pecurl,tx_count,p20)
# absorb p20
Nifs.absorb(pecurl)
# add_trytes for p21
Nifs.add_trytes(pecurl,tx_count,p21)
# absorb p21
Nifs.absorb(pecurl)
# add_trytes for p22
Nifs.add_trytes(pecurl,tx_count,p22)
# absorb p22
Nifs.absorb(pecurl)
# add_trytes for p23
Nifs.add_trytes(pecurl,tx_count,p23)
# absorb p23
Nifs.absorb(pecurl)
# add_trytes for p24
Nifs.add_trytes(pecurl,tx_count,p24)
# absorb p24
Nifs.absorb(pecurl)
# add_trytes for p25
Nifs.add_trytes(pecurl,tx_count,p25)
# absorb p25
Nifs.absorb(pecurl)
# add_trytes for p26
Nifs.add_trytes(pecurl,tx_count,p26)
# absorb p26
Nifs.absorb(pecurl)
# add_trytes for p27
Nifs.add_trytes(pecurl,tx_count,p27)
# absorb p27
Nifs.absorb(pecurl)
# add_trytes for p28
Nifs.add_trytes(pecurl,tx_count,p28)
# absorb p28
Nifs.absorb(pecurl)
# add_trytes for p29
Nifs.add_trytes(pecurl,tx_count,p29)
# absorb p29
Nifs.absorb(pecurl)
# add_trytes for p30
Nifs.add_trytes(pecurl,tx_count,p30)
# absorb p30
Nifs.absorb(pecurl)
# add_trytes for p31
Nifs.add_trytes(pecurl,tx_count,p31)
# absorb p31
Nifs.absorb(pecurl)
# add_trytes for p32
Nifs.add_trytes(pecurl,tx_count,p32)
# absorb p32
Nifs.absorb(pecurl)
############ squeeze #############
Nifs.squeeze(pecurl)
############ get trytes and compare hashes, should return [bool] #############
Nifs.get_status(pecurl, tx_count, hashes)
end
defp extract_valid(events_status, events, valid_events_acc \\ [])
defp extract_valid([event_status | rest_status],[event | rest_events], valid_events_acc) do
valid_events_acc =
if event_status do
[event | valid_events_acc]
else
valid_events_acc
end
extract_valid(rest_status,rest_events,valid_events_acc)
end
defp extract_valid([],[], valid_events_acc) do
valid_events_acc
end
# generate the validator name by topic name
defp name_by_topic?(args) do
case args[:topic] do
:tx_trytes ->
:"tv#{args[:num]}"
:sn_trytes ->
:"sv#{args[:num]}"
end
end
# generate the distributor name by topic name
defp dis_name_by_topic?(args) do
case args[:topic] do
:tx_trytes ->
:tx_distributor
:sn_trytes ->
:sn_distributor
end
end
end
| 34.431095 | 98 | 0.60663 |
9ea6fdb94afbc2e50aff166c3fb6f4a2a51258ce | 77 | exs | Elixir | config/dev.exs | OffgridElectric/nerves_hub_user_api | dd77a47fd9bc51c9a76732056bc0b3fc0019f770 | [
"Apache-2.0"
] | 2 | 2019-06-18T19:59:13.000Z | 2020-01-13T18:45:50.000Z | config/dev.exs | OffgridElectric/nerves_hub_user_api | dd77a47fd9bc51c9a76732056bc0b3fc0019f770 | [
"Apache-2.0"
] | 9 | 2019-03-08T19:06:33.000Z | 2022-03-16T21:35:04.000Z | config/dev.exs | OffgridElectric/nerves_hub_user_api | dd77a47fd9bc51c9a76732056bc0b3fc0019f770 | [
"Apache-2.0"
] | 3 | 2019-11-25T12:15:30.000Z | 2021-01-22T16:17:37.000Z | use Mix.Config
config :nerves_hub_user_api,
host: "0.0.0.0",
port: 4002
| 12.833333 | 28 | 0.688312 |
9ea728067a22e661093c5b4722e8b2ebe190257f | 3,084 | ex | Elixir | lib/periodic/test.ex | QuinnWilton/parent | 7c4c983a38c25a409e8fb61c57daf8a8c083a275 | [
"MIT"
] | null | null | null | lib/periodic/test.ex | QuinnWilton/parent | 7c4c983a38c25a409e8fb61c57daf8a8c083a275 | [
"MIT"
] | null | null | null | lib/periodic/test.ex | QuinnWilton/parent | 7c4c983a38c25a409e8fb61c57daf8a8c083a275 | [
"MIT"
] | null | null | null | defmodule Periodic.Test do
@moduledoc """
Helpers for testing a periodic job.
See the "Testing" section in `Periodic` documentation for details.
"""
public_telemetry_events = ~w/started finished skipped stopped_previous/a
@telemetry_events if Mix.env() != :test,
do: public_telemetry_events,
else: [:next_tick | public_telemetry_events]
@doc """
Sends a tick signal to the given scheduler.
This function returns after the tick signal has been sent, and the job has been started.
However, the function doesn't wait for the job to finish. If you want complete synchronism, use
`sync_tick/2`
"""
@spec tick(GenServer.server()) :: :ok
def tick(pid_or_name), do: :ok = GenServer.call(pid_or_name, {:tick, []})
@doc """
Sends a tick signal to the given scheduler and waits for the job to finish.
The function returns the job exit reason, or error if the job hasn't been started.
"""
@spec sync_tick(GenServer.server(), non_neg_integer | :infinity) ::
{:ok, job_exit_reason :: any} | {:error, :job_not_started}
def sync_tick(pid_or_name, timeout \\ :timer.seconds(5)) do
GenServer.call(pid_or_name, {:tick, [await_job?: true]}, timeout)
end
@doc "Subscribes to telemetry events of the given scheduler."
@spec observe(any) :: :ok
def observe(telemetry_id),
do: Enum.each(@telemetry_events, &attach_telemetry_handler(telemetry_id, &1))
@doc "Waits for the given telemetry event."
defmacro assert_periodic_event(
telemetry_id,
event,
metadata \\ quote(do: _),
measurements \\ quote(do: _)
) do
quote do
assert_receive {
unquote(__MODULE__),
unquote(telemetry_id),
unquote(event),
unquote(metadata),
unquote(measurements)
},
100
end
end
@doc "Asserts that the given telemetry event won't be emitted."
defmacro refute_periodic_event(
telemetry_id,
event,
metadata \\ quote(do: _),
measurements \\ quote(do: _)
) do
quote do
refute_receive {
unquote(__MODULE__),
unquote(telemetry_id),
unquote(event),
unquote(metadata),
unquote(measurements)
},
100
end
end
defp attach_telemetry_handler(telemetry_id, event) do
handler_id = make_ref()
event_name = [Periodic, telemetry_id, event]
:telemetry.attach(handler_id, event_name, telemetry_handler(event_name), nil)
ExUnit.Callbacks.on_exit(fn -> :telemetry.detach(handler_id) end)
end
defp telemetry_handler(event_name) do
test_pid = self()
fn [Periodic, telemetry_id, event] = ^event_name, measurements, metadata, nil ->
send(test_pid, {__MODULE__, telemetry_id, event, metadata, measurements})
end
end
end
| 33.16129 | 97 | 0.605707 |
9ea75f3c314658893d7e62557b7de67415a9e885 | 109 | exs | Elixir | test/cx_leaderboard_test.exs | crossfield/cx_leaderboard | 600a29a767562424ea72ee4e6f7e5fb1a728e03a | [
"Apache-2.0"
] | 12 | 2018-04-14T16:59:56.000Z | 2018-09-23T06:45:03.000Z | test/cx_leaderboard_test.exs | crossfield/cx_leaderboard | 600a29a767562424ea72ee4e6f7e5fb1a728e03a | [
"Apache-2.0"
] | 1 | 2019-02-22T07:36:41.000Z | 2019-02-22T07:36:41.000Z | test/cx_leaderboard_test.exs | crossfield/cx_leaderboard | 600a29a767562424ea72ee4e6f7e5fb1a728e03a | [
"Apache-2.0"
] | 3 | 2019-02-22T07:32:57.000Z | 2021-07-19T20:04:22.000Z | defmodule CxLeaderboardTest do
use ExUnit.Case
alias CxLeaderboard.Leaderboard
doctest Leaderboard
end
| 18.166667 | 33 | 0.834862 |
9ea7686de3778222d1bff38623f6801251d3fd2f | 655 | exs | Elixir | priv/repo/migrations/20200106204106_create_cells_table.exs | sntpiraquara/mapa_celulas | 5e0b1206748bd5169cefb75b006a5489117bfda3 | [
"MIT"
] | null | null | null | priv/repo/migrations/20200106204106_create_cells_table.exs | sntpiraquara/mapa_celulas | 5e0b1206748bd5169cefb75b006a5489117bfda3 | [
"MIT"
] | null | null | null | priv/repo/migrations/20200106204106_create_cells_table.exs | sntpiraquara/mapa_celulas | 5e0b1206748bd5169cefb75b006a5489117bfda3 | [
"MIT"
] | null | null | null | defmodule MapaCelulas.Repo.Migrations.CreateCellsTable do
use Ecto.Migration
def up do
create table(:cells) do
add :uuid, :uuid
add :title, :string
add :lat, :float
add :lng, :float
add :day_of_week, :integer
add :start_time, :string
# Address Stuff
add :address_line, :string
add :address_line2, :string
add :address_number, :integer
add :address_district, :string
add :address_state, :string
add :address_country, :string
add :address_city, :string
timestamps()
end
end
def down do
drop table("cells")
end
end
| 19.848485 | 57 | 0.60458 |
9ea7745c92b9e732ceecd577b53b459be7c05bcd | 1,284 | ex | Elixir | restserver_supervised/lib/restserver.ex | arquitecturas-concurrentes/iasc-otp-elixir-2019c2 | c8c6c88db978785f439596e0b5f582473b54a35f | [
"BSD-3-Clause"
] | null | null | null | restserver_supervised/lib/restserver.ex | arquitecturas-concurrentes/iasc-otp-elixir-2019c2 | c8c6c88db978785f439596e0b5f582473b54a35f | [
"BSD-3-Clause"
] | null | null | null | restserver_supervised/lib/restserver.ex | arquitecturas-concurrentes/iasc-otp-elixir-2019c2 | c8c6c88db978785f439596e0b5f582473b54a35f | [
"BSD-3-Clause"
] | null | null | null | defmodule RESTServer do
use GenServer
## Client API
def start_link([], name) do
start_link(name)
end
def start_link(name) do
GenServer.start_link(__MODULE__, :ok, [name: name])
end
# helper functions, use the callbacks
def get(server, url) do
GenServer.call(server, {:get, url})
end
def post(server, url, body) do
GenServer.cast(server, {:post, url, body})
end
def put(server, url, body) do
GenServer.cast(server, {:put, url, body})
end
def delete(server, url) do
GenServer.cast(server, {:delete, url})
end
def break(server) do
Process.exit(server, :shutdown)
end
## Server Callbacks
def init(:ok) do
{:ok, %{}}
end
def handle_call({:get, url}, _from, state) do
if Map.has_key?(state, url) do
{:reply, {:ok, Map.get(state, url)}, state}
else
{:reply, {:ok, :not_found}, state}
end
end
def handle_cast({:post, url, body}, state) do
{:noreply, Map.put(state, url, body)}
end
def handle_cast({:put, url, body}, state) do
{:noreply, Map.put(state, url, body)}
end
def handle_cast({:delete, url}, state) do
{:noreply, Map.delete(state, url)}
end
def handle_info(msg, state) do
IO.puts "Message not understood :("
{:noreply, state}
end
end
| 19.753846 | 55 | 0.625389 |
9ea7965bd78996d251ae33515c37b25268f48c37 | 1,255 | exs | Elixir | mix.exs | maartenvanvliet/libcluster_ec2 | 10e21ab66c2e980b530961a964ebad3f610479da | [
"MIT"
] | null | null | null | mix.exs | maartenvanvliet/libcluster_ec2 | 10e21ab66c2e980b530961a964ebad3f610479da | [
"MIT"
] | null | null | null | mix.exs | maartenvanvliet/libcluster_ec2 | 10e21ab66c2e980b530961a964ebad3f610479da | [
"MIT"
] | null | null | null | defmodule ClusterEC2.Mixfile do
use Mix.Project
def project do
[
app: :libcluster_ec2,
version: "0.6.0",
elixir: "~> 1.4",
name: "libcluster_ec2",
source_url: "https://github.com/kyleaa/libcluster_ec2",
homepage_url: "https://github.com/kyleaa/libcluster_ec2",
description: description(),
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
deps: deps(),
package: package()
]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
# Specify extra applications you'll use from Erlang/Elixir
[extra_applications: [:logger]]
end
defp deps do
[
{:libcluster, "~> 2.0 or ~> 3.0"},
{:ex_aws, "~> 2.0"},
{:ex_aws_ec2, "~> 2.0"},
{:sweet_xml, "~> 0.6"},
{:hackney, "~> 1.8"},
{:tesla, "~> 1.0"},
{:ex_doc, ">= 0.0.0", only: :dev}
]
end
defp description do
"""
EC2 clustering strategy for libcluster
"""
end
def package do
[
maintainers: ["Kyle Anderson"],
licenses: ["MIT License"],
links: %{
"GitHub" => "https://github.com/kyleaa/libcluster_ec2.git"
}
]
end
end
| 22.410714 | 66 | 0.564143 |
9ea7a1a686130298de1312aabc3746df370d68de | 212 | ex | Elixir | lib/spidey/application.ex | Manzanit0/spidey | 9d5b1de5bc2f8fa8f925c449aa1ffd5613edf99b | [
"MIT"
] | 4 | 2020-09-14T09:21:01.000Z | 2020-09-19T07:31:40.000Z | lib/spidey/application.ex | Manzanit0/spidey | 9d5b1de5bc2f8fa8f925c449aa1ffd5613edf99b | [
"MIT"
] | 1 | 2021-03-10T23:19:36.000Z | 2021-04-04T10:59:00.000Z | lib/spidey/application.ex | Manzanit0/spidey | 9d5b1de5bc2f8fa8f925c449aa1ffd5613edf99b | [
"MIT"
] | null | null | null | defmodule Spidey.Application do
use Application
def start(_type, _args) do
children = []
opts = [strategy: :one_for_one, name: Spidey.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 19.272727 | 60 | 0.712264 |
9ea7ca1b7965d5d46413db832a65ffa31e81de1c | 1,232 | exs | Elixir | test/lib/new_relix/collector_test.exs | wfgilman/NewRelix | 76dc43a1fad38c30f96ab9d653e3c4eb4ae56b30 | [
"MIT"
] | null | null | null | test/lib/new_relix/collector_test.exs | wfgilman/NewRelix | 76dc43a1fad38c30f96ab9d653e3c4eb4ae56b30 | [
"MIT"
] | null | null | null | test/lib/new_relix/collector_test.exs | wfgilman/NewRelix | 76dc43a1fad38c30f96ab9d653e3c4eb4ae56b30 | [
"MIT"
] | null | null | null | defmodule NewRelix.CollectorTest do
@moduledoc false
use ExUnit.Case
alias NewRelix.Collector
setup_all do
key = "Database/ETL"
value = 1_800
{:ok, key: key, value: value}
end
describe "collector" do
test "record_value/1 accepts instrumentation", %{key: key, value: value} do
assert :ok = Collector.record_value(key, value)
end
test "handle_cast/2 stores instrumentation in state", %{key: k, value: v} do
{:noreply, state} = Collector.handle_cast({:record_value, k, v},
%Collector{})
assert state.data == %{k => [v]}
end
test "poll/0 returns state with data", %{key: k, value: v} do
Collector.poll() # Purge
:ok = Collector.record_value(k, v)
state = Collector.poll()
assert state.data == %{k => [v]}
end
test "record_value/1 updates key with additional values",
%{key: k, value: v} do
Collector.poll() # Purge
:ok = Collector.record_value(k, v)
:ok = Collector.record_value(k, 2_000)
state = Collector.poll()
assert state.data == %{k => [2_000, 1_800]}
end
end
end
| 26.212766 | 80 | 0.560065 |
9ea7ef58b51cd7323b01bf899d2bf012c3cc90ae | 1,713 | exs | Elixir | mix.exs | cdale77/bart_scrape | 8696b303f1111a29cfdbea80f15823da3ea5747d | [
"MIT"
] | null | null | null | mix.exs | cdale77/bart_scrape | 8696b303f1111a29cfdbea80f15823da3ea5747d | [
"MIT"
] | null | null | null | mix.exs | cdale77/bart_scrape | 8696b303f1111a29cfdbea80f15823da3ea5747d | [
"MIT"
] | null | null | null | defmodule BartScrape.Mixfile do
use Mix.Project
def project do
[
app: :bart_scrape,
version: "0.0.1",
elixir: "~> 1.5",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix, :gettext] ++ Mix.compilers,
start_permanent: Mix.env == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {BartScrape, []},
extra_applications: [:logger]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
defp elixirc_paths(_), do: ["lib", "web"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.3.0"},
{:phoenix_pubsub, "~> 1.0"},
{:phoenix_ecto, "~> 3.2"},
{:postgrex, ">= 0.13.0"},
{:gettext, "~> 0.13"},
{:cowboy, "~> 1.0"},
{:poison, "~> 3.1"},
{:httpoison, "~> 0.13"},
{:mock, "~> 0.3.0", only: :test},
{:tzdata, "~> 0.1.8", override: true},
{:timex, "~> 3.1"},
{:distillery, "~> 1.5", runtime: false}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
"test": ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
end
| 26.353846 | 79 | 0.562172 |
9ea7f38e78ac1d3e2e43ffd32e91804bb25ef555 | 1,339 | exs | Elixir | mix.exs | smeevil/alods | 284224bc63c668e1e273251c67614aa2e40b37eb | [
"WTFPL"
] | 1 | 2017-10-03T12:30:53.000Z | 2017-10-03T12:30:53.000Z | mix.exs | smeevil/alods | 284224bc63c668e1e273251c67614aa2e40b37eb | [
"WTFPL"
] | null | null | null | mix.exs | smeevil/alods | 284224bc63c668e1e273251c67614aa2e40b37eb | [
"WTFPL"
] | null | null | null | defmodule Alods.Mixfile do
use Mix.Project
def project do
[
app: :alods,
version: "0.1.0",
elixir: "~> 1.5",
start_permanent: Mix.env == :prod,
deps: deps(),
dialyzer: [
plt_add_deps: true,
ignore_warnings: ".dialyzer_ignore",
flags: [
:error_handling,
:race_conditions,
:unknown,
:unmatched_returns,
],
],
test_coverage: [
tool: ExCoveralls
],
preferred_cli_env: [
"coveralls": :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test,
"vcr": :test,
"vcr.delete": :test,
"vcr.check": :test,
"vcr.show": :test
],
]
end
def application do
[
extra_applications: [:logger],
mod: {Alods.Supervisor, []}
]
end
defp deps do
[
{:cortex, ">= 0.0.0", only: [:dev, :test]},
{:credo, ">= 0.0.0", only: [:dev]},
{:dialyxir, ">= 0.0.0", only: [:dev]},
{:ecto, ">= 0.0.0"},
{:ex2ms, ">= 0.0.0"},
{:excoveralls, ">= 0.0.0", only: [:test]},
{:ex_doc, ">= 0.0.0", only: [:dev]},
{:gen_stage, ">= 0.0.0"},
{:httpoison, ">= 0.0.0"},
{:poison, ">= 0.0.0"},
{:exvcr, ">= 0.0.0", only: [:test]},
]
end
end
| 22.316667 | 49 | 0.45407 |
9ea7fc5660e4dfa1d5f7bf473a6fe03e43ed2f67 | 186 | exs | Elixir | apps/re/priv/repo/migrations/20171014132520_add_lat_lng_to_address.exs | ruby2elixir/emcasa-backend | 70d7f4f233555417941ffa6ada84cf8740c21dd2 | [
"MIT"
] | 4 | 2019-11-01T16:29:31.000Z | 2020-10-10T21:20:12.000Z | apps/re/priv/repo/migrations/20171014132520_add_lat_lng_to_address.exs | caspg/backend | 34df9dc14ab8ed75de4578fefa2e087580c7e867 | [
"MIT"
] | null | null | null | apps/re/priv/repo/migrations/20171014132520_add_lat_lng_to_address.exs | caspg/backend | 34df9dc14ab8ed75de4578fefa2e087580c7e867 | [
"MIT"
] | 5 | 2019-11-04T21:25:45.000Z | 2020-02-13T23:49:36.000Z | defmodule Re.Repo.Migrations.AddLatLngToAddress do
use Ecto.Migration
def change do
alter table(:addresses) do
add :lat, :string
add :lng, :string
end
end
end
| 16.909091 | 50 | 0.677419 |
9ea80b947e698e27bd46e2128ad1124311c2aa3c | 525 | ex | Elixir | Microsoft.Azure.Management.Compute/lib/microsoft/azure/management/compute/model/sub_resource_read_only.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | 4 | 2018-09-29T03:43:15.000Z | 2021-04-01T18:30:46.000Z | Microsoft.Azure.Management.Compute/lib/microsoft/azure/management/compute/model/sub_resource_read_only.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | Microsoft.Azure.Management.Compute/lib/microsoft/azure/management/compute/model/sub_resource_read_only.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule Microsoft.Azure.Management.Compute.Model.SubResourceReadOnly do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:"id"
]
@type t :: %__MODULE__{
:"id" => String.t
}
end
defimpl Poison.Decoder, for: Microsoft.Azure.Management.Compute.Model.SubResourceReadOnly do
def decode(value, _options) do
value
end
end
| 20.192308 | 92 | 0.710476 |
9ea831a799bbde0f1370f651f61dfd4676e37460 | 166 | exs | Elixir | config/test.exs | Nehonar/cipher | 68f584f8eda1ee741c24bbed71587aae9336c397 | [
"MIT"
] | 65 | 2015-02-09T21:36:49.000Z | 2021-12-19T12:46:25.000Z | config/test.exs | Nehonar/cipher | 68f584f8eda1ee741c24bbed71587aae9336c397 | [
"MIT"
] | 20 | 2016-04-22T18:15:02.000Z | 2021-06-04T15:25:08.000Z | config/test.exs | Nehonar/cipher | 68f584f8eda1ee741c24bbed71587aae9336c397 | [
"MIT"
] | 30 | 2015-05-28T11:48:09.000Z | 2021-12-15T17:34:45.000Z | use Mix.Config
config :cipher, keyphrase: "testiekeyphraseforcipher",
ivphrase: "testieivphraseforcipher",
magic_token: "magictoken"
| 27.666667 | 54 | 0.656627 |
9ea8558157fab239a5d75eb26dab99cc5efa1c99 | 497 | ex | Elixir | lib/watch_faces_web/views/error_view.ex | DaniruKun/watchfaces-ex | 699e345596000ec3e50141e44217b155400261d6 | [
"MIT"
] | null | null | null | lib/watch_faces_web/views/error_view.ex | DaniruKun/watchfaces-ex | 699e345596000ec3e50141e44217b155400261d6 | [
"MIT"
] | null | null | null | lib/watch_faces_web/views/error_view.ex | DaniruKun/watchfaces-ex | 699e345596000ec3e50141e44217b155400261d6 | [
"MIT"
] | null | null | null | defmodule WatchFacesWeb.ErrorView do
use WatchFacesWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.html", _assigns) do
# "Internal Server Error"
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.html" becomes
# "Not Found".
def template_not_found(template, _assigns) do
Phoenix.Controller.status_message_from_template(template)
end
end
| 29.235294 | 61 | 0.738431 |
9ea88f3d49a065761489711681b9d3e4361b54fb | 1,224 | ex | Elixir | clients/dialogflow/lib/google_api/dialogflow/v2/connection.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/dialogflow/lib/google_api/dialogflow/v2/connection.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/dialogflow/lib/google_api/dialogflow/v2/connection.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dialogflow.V2.Connection do
@moduledoc """
Handle Tesla connections for GoogleApi.Dialogflow.V2.
"""
@type t :: Tesla.Env.client()
use GoogleApi.Gax.Connection,
scopes: [
# View and manage your data across Google Cloud Platform services
"https://www.googleapis.com/auth/cloud-platform",
# View, manage and query your Dialogflow agents
"https://www.googleapis.com/auth/dialogflow"
],
otp_app: :google_api_dialogflow,
base_url: "https://dialogflow.googleapis.com/"
end
| 34 | 74 | 0.735294 |
9ea8a3032de241276e077c625d5e6fe0b815c63c | 1,109 | ex | Elixir | lib/accent/schemas/comment.ex | isshindev/accent | ae4c13139b0a0dfd64ff536b94c940a4e2862150 | [
"BSD-3-Clause"
] | 806 | 2018-04-07T20:40:33.000Z | 2022-03-30T01:39:57.000Z | lib/accent/schemas/comment.ex | isshindev/accent | ae4c13139b0a0dfd64ff536b94c940a4e2862150 | [
"BSD-3-Clause"
] | 194 | 2018-04-07T13:49:37.000Z | 2022-03-30T19:58:45.000Z | lib/accent/schemas/comment.ex | isshindev/accent | ae4c13139b0a0dfd64ff536b94c940a4e2862150 | [
"BSD-3-Clause"
] | 89 | 2018-04-09T13:55:49.000Z | 2022-03-24T07:09:31.000Z | defmodule Accent.Comment do
use Accent.Schema
import Ecto.Query, only: [where: 2]
schema "comments" do
field(:text, :string)
belongs_to(:translation, Accent.Translation)
belongs_to(:user, Accent.User)
timestamps()
end
@required_fields ~w(text user_id translation_id)a
def create_changeset(model, params) do
model
|> cast(params, @required_fields ++ [])
|> validate_required(@required_fields)
|> assoc_constraint(:user)
|> assoc_constraint(:translation)
|> prepare_changes(fn changeset ->
Accent.Translation
|> where(id: ^changeset.changes[:translation_id])
|> changeset.repo.update_all(inc: [comments_count: 1])
changeset
end)
end
def update_changeset(model, params) do
model
|> cast(params, ~w(text)a)
|> validate_required(~w(text)a)
end
def delete_changeset(model) do
model
|> change()
|> prepare_changes(fn changeset ->
Accent.Translation
|> where(id: ^model.translation_id)
|> changeset.repo.update_all(inc: [comments_count: -1])
changeset
end)
end
end
| 22.18 | 61 | 0.660956 |
9ea8b869f52b15deaa4c8dd23c727189888c420c | 9,525 | ex | Elixir | lib/exq/redis/job_stat.ex | instinctscience/exq | 6dac201d0b59a16f30ec4d6d24514689ef7dd9b0 | [
"Apache-2.0"
] | null | null | null | lib/exq/redis/job_stat.ex | instinctscience/exq | 6dac201d0b59a16f30ec4d6d24514689ef7dd9b0 | [
"Apache-2.0"
] | null | null | null | lib/exq/redis/job_stat.ex | instinctscience/exq | 6dac201d0b59a16f30ec4d6d24514689ef7dd9b0 | [
"Apache-2.0"
] | null | null | null | defmodule Exq.Redis.JobStat do
@moduledoc """
The JobStat module encapsulates storing system-wide stats on top of Redis
It aims to be compatible with the Sidekiq stats format.
"""
require Logger
alias Exq.Support.{Binary, Process, Job, Time, Node}
alias Exq.Redis.{Connection, JobQueue}
def record_processed_commands(namespace, _job, current_date \\ DateTime.utc_now()) do
{time, date} = Time.format_current_date(current_date)
[
["INCR", JobQueue.full_key(namespace, "stat:processed")],
["INCR", JobQueue.full_key(namespace, "stat:processed_rt:#{time}")],
["EXPIRE", JobQueue.full_key(namespace, "stat:processed_rt:#{time}"), 120],
["INCR", JobQueue.full_key(namespace, "stat:processed:#{date}")]
]
end
def record_processed(redis, namespace, job, current_date \\ DateTime.utc_now()) do
instr = record_processed_commands(namespace, job, current_date)
{:ok, [count, _, _, _]} = Connection.qp(redis, instr)
{:ok, count}
end
def record_failure_commands(namespace, _error, _job, current_date \\ DateTime.utc_now()) do
{time, date} = Time.format_current_date(current_date)
[
["INCR", JobQueue.full_key(namespace, "stat:failed")],
["INCR", JobQueue.full_key(namespace, "stat:failed_rt:#{time}")],
["EXPIRE", JobQueue.full_key(namespace, "stat:failed_rt:#{time}"), 120],
["INCR", JobQueue.full_key(namespace, "stat:failed:#{date}")]
]
end
def record_failure(redis, namespace, error, job, current_date \\ DateTime.utc_now()) do
instr = record_failure_commands(namespace, error, job, current_date)
{:ok, [count, _, _, _]} = Connection.qp(redis, instr)
{:ok, count}
end
def add_process_commands(namespace, process_info, serialized_process \\ nil) do
serialized = serialized_process || Exq.Support.Process.encode(process_info)
[["HSET", workers_key(namespace, process_info.host), process_info.pid, serialized]]
end
def add_process(redis, namespace, process_info, serialized_process \\ nil) do
instr = add_process_commands(namespace, process_info, serialized_process)
Connection.qp!(redis, instr)
:ok
end
def remove_process_commands(namespace, process_info) do
[["HDEL", workers_key(namespace, process_info.host), process_info.pid]]
end
def remove_process(redis, namespace, process_info) do
instr = remove_process_commands(namespace, process_info)
Connection.qp!(redis, instr)
:ok
end
def cleanup_processes(redis, namespace, host) do
Connection.del!(redis, workers_key(namespace, host))
:ok
end
def node_ping(redis, namespace, node) do
key = node_info_key(namespace, node.identity)
case Connection.qp(
redis,
[
["MULTI"],
["SADD", nodes_key(namespace), node.identity],
[
"HMSET",
key,
"info",
Node.encode(node),
"busy",
node.busy,
"beat",
Time.unix_seconds(),
"quiet",
node.quiet
],
["EXPIRE", key, 60],
["RPOP", "#{key}-signals"],
["EXEC"]
]
) do
{:ok, ["OK", "QUEUED", "QUEUED", "QUEUED", "QUEUED", [_, "OK", 1, signal]]} ->
signal
error ->
Logger.error("Failed to send node stats. Unexpected error from redis: #{inspect(error)}")
nil
end
end
def node_ids(redis, namespace) do
Connection.smembers!(redis, nodes_key(namespace))
end
def nodes(redis, namespace) do
commands =
node_ids(redis, namespace)
|> Enum.map(fn node_id -> ["HGET", node_info_key(namespace, node_id), "info"] end)
if Enum.empty?(commands) do
[]
else
Connection.qp!(redis, commands)
|> Enum.flat_map(fn result ->
if result && result != "" do
[Node.decode(result)]
else
[]
end
end)
end
end
def prune_dead_nodes(redis, namespace) do
node_ids = node_ids(redis, namespace)
commands =
node_ids
|> Enum.map(fn node_id -> ["HEXISTS", node_info_key(namespace, node_id), "info"] end)
if Enum.empty?(commands) do
[]
else
dead_node_ids =
Connection.qp!(redis, commands)
|> Enum.zip(node_ids)
|> Enum.flat_map(fn {exists, node_id} ->
if exists == 0 do
[node_id]
else
[]
end
end)
if !Enum.empty?(dead_node_ids) do
commands = [
["SREM", nodes_key(namespace)] ++ dead_node_ids,
["DEL"] ++ Enum.map(node_ids, &workers_key(namespace, &1))
]
Connection.qp(redis, commands)
end
end
end
def busy(redis, namespace) do
commands =
node_ids(redis, namespace)
|> Enum.map(fn node_id -> ["HGET", node_info_key(namespace, node_id), "busy"] end)
if Enum.empty?(commands) do
0
else
Connection.qp!(redis, commands)
|> Enum.reduce(0, fn count, sum -> sum + decode_integer(count) end)
end
end
def processes(redis, namespace) do
commands =
node_ids(redis, namespace)
|> Enum.map(fn node_id -> ["HVALS", workers_key(namespace, node_id)] end)
if Enum.empty?(commands) do
[]
else
Connection.qp!(redis, commands)
|> List.flatten()
|> Enum.map(&Process.decode/1)
end
end
def find_failed(redis, namespace, jid) do
redis
|> Connection.zrange!(JobQueue.full_key(namespace, "dead"), 0, -1)
|> JobQueue.search_jobs(jid)
end
def find_failed(redis, namespace, score, jid, options) do
find_by_score_and_jid(redis, JobQueue.full_key(namespace, "dead"), score, jid, options)
end
def find_retry(redis, namespace, score, jid, options) do
find_by_score_and_jid(redis, JobQueue.full_key(namespace, "retry"), score, jid, options)
end
def find_scheduled(redis, namespace, score, jid, options) do
find_by_score_and_jid(redis, JobQueue.full_key(namespace, "schedule"), score, jid, options)
end
def remove_queue(redis, namespace, queue) do
Connection.qp(redis, [
["SREM", JobQueue.full_key(namespace, "queues"), queue],
["DEL", JobQueue.queue_key(namespace, queue)]
])
end
def remove_failed(redis, namespace, jid) do
{:ok, failure} = find_failed(redis, namespace, jid)
Connection.qp(redis, [
["DECR", JobQueue.full_key(namespace, "stat:failed")],
["ZREM", JobQueue.full_key(namespace, "dead"), Job.encode(failure)]
])
end
def clear_failed(redis, namespace) do
Connection.qp(redis, [
["SET", JobQueue.full_key(namespace, "stat:failed"), 0],
["DEL", JobQueue.full_key(namespace, "dead")]
])
end
def clear_processes(redis, namespace) do
commands =
node_ids(redis, namespace)
|> Enum.map(fn node_id -> ["DEL", workers_key(namespace, node_id)] end)
if Enum.empty?(commands) do
0
else
Connection.qp!(redis, commands)
end
end
def realtime_stats(redis, namespace) do
failure_keys = realtime_stats_scanner(redis, JobQueue.full_key(namespace, "stat:failed_rt:*"))
success_keys =
realtime_stats_scanner(redis, JobQueue.full_key(namespace, "stat:processed_rt:*"))
formatter = realtime_stats_formatter(redis, namespace)
failures = formatter.(failure_keys, "stat:failed_rt:")
successes = formatter.(success_keys, "stat:processed_rt:")
{:ok, failures, successes}
end
defp realtime_stats_scanner(redis, namespace) do
{:ok, [[cursor, result]]} =
Connection.qp(redis, [["SCAN", 0, "MATCH", namespace, "COUNT", 1_000]])
realtime_stats_scan_keys(redis, namespace, cursor, result)
end
defp realtime_stats_scan_keys(_redis, _namespace, "0", accumulator) do
accumulator
end
defp realtime_stats_scan_keys(redis, namespace, cursor, accumulator) do
{:ok, [[new_cursor, result]]} =
Connection.qp(redis, [["SCAN", cursor, "MATCH", namespace, "COUNT", 1_000]])
realtime_stats_scan_keys(redis, namespace, new_cursor, accumulator ++ result)
end
defp realtime_stats_formatter(redis, namespace) do
fn keys, ns ->
if Enum.empty?(keys) do
[]
else
{:ok, counts} = Connection.qp(redis, Enum.map(keys, &["GET", &1]))
Enum.map(keys, &Binary.take_prefix(&1, JobQueue.full_key(namespace, ns)))
|> Enum.zip(counts)
end
end
end
def get_count(redis, namespace, key) do
Connection.get!(redis, JobQueue.full_key(namespace, "stat:#{key}"))
|> decode_integer()
end
def get_counts(redis, namespace, keys) do
{:ok, results} =
Connection.q(redis, ["MGET" | Enum.map(keys, &JobQueue.full_key(namespace, "stat:#{&1}"))])
Enum.map(results, &decode_integer/1)
end
def decode_integer(:undefined), do: 0
def decode_integer(nil), do: 0
def decode_integer(count) when is_integer(count), do: count
def decode_integer(count) when is_binary(count) do
{count, _} = Integer.parse(count)
count
end
defp find_by_score_and_jid(redis, zset, score, jid, options) do
redis
|> Connection.zrangebyscore!(zset, score, score)
|> JobQueue.search_jobs(jid, !Keyword.get(options, :raw, false))
end
defp workers_key(namespace, node_id) do
JobQueue.full_key(namespace, "#{node_id}:workers")
end
defp nodes_key(namespace) do
"#{namespace}:processes"
end
defp node_info_key(namespace, node_id) do
"#{namespace}:#{node_id}"
end
end
| 29.398148 | 98 | 0.637165 |
9ea8df581295f60085a5695512727e7b8e655ddf | 1,049 | ex | Elixir | lib/docdog_web/controllers/project_invite_controller.ex | wunsh/docdog-engine | a3044d21a6ac00098a295249358a367059453b8d | [
"MIT"
] | 40 | 2017-04-19T15:35:42.000Z | 2020-02-14T11:07:44.000Z | lib/docdog_web/controllers/project_invite_controller.ex | wunsh/docdog-engine | a3044d21a6ac00098a295249358a367059453b8d | [
"MIT"
] | 43 | 2017-01-17T23:03:47.000Z | 2018-08-16T01:25:24.000Z | lib/docdog_web/controllers/project_invite_controller.ex | wunsh/docdog-engine | a3044d21a6ac00098a295249358a367059453b8d | [
"MIT"
] | 6 | 2018-02-15T09:43:15.000Z | 2018-04-10T21:08:36.000Z | defmodule DocdogWeb.ProjectInviteController do
use DocdogWeb, :controller
plug(DocdogWeb.AuthorizationRequiredPlug)
alias Docdog.Editor
alias Docdog.Editor.Project
def show(conn, %{"invite_code" => invite_code}) do
project = Editor.get_project_by_invite_code!(invite_code)
render(conn, "show.html", project: project)
end
def create(conn, %{"invite_code" => invite_code}) do
user = conn.assigns.current_user
project = Editor.get_project_by_invite_code!(invite_code)
with :ok <- Bodyguard.permit(Editor, :project_accept_invite, user, project: project),
{:ok, _project} <- Editor.add_member_to_project(project, user) do
conn
|> put_flash(:info, "You successfully became a project member.")
|> redirect(to: project_document_path(conn, :index, project.id))
else
{:error, %Ecto.Changeset{} = _} ->
conn
|> put_flash(:error, "You've got error on accepting invite.")
|> render("show.html", project: project)
error ->
error
end
end
end
| 29.971429 | 89 | 0.679695 |
9ea8f9eab940e5de96a64e1e8af4858610ab6b44 | 146 | ex | Elixir | lib/credentials.ex | littlelines/frex | e3d9005b782fa0c0aaff0c0368f154c45fb7a302 | [
"Apache-2.0"
] | 2 | 2017-02-10T16:46:21.000Z | 2020-05-04T11:50:50.000Z | lib/credentials.ex | littlelines/frex | e3d9005b782fa0c0aaff0c0368f154c45fb7a302 | [
"Apache-2.0"
] | null | null | null | lib/credentials.ex | littlelines/frex | e3d9005b782fa0c0aaff0c0368f154c45fb7a302 | [
"Apache-2.0"
] | 1 | 2019-11-27T15:43:55.000Z | 2019-11-27T15:43:55.000Z | defmodule Frex.Credentials do
@moduledoc """
A struct for storing OAuth credentials.
"""
defstruct oauth_token: "", oauth_secret: ""
end
| 18.25 | 45 | 0.705479 |
9ea90a0eef013c0ed7970d3d7790c562ff2db1f3 | 1,368 | ex | Elixir | lib/google_api/you_tube/v3/model/subscription_subscriber_snippet.ex | jesteracer/ytb | 67e3cab899e4f69e586383f7be2c3855c6beea49 | [
"Apache-2.0"
] | null | null | null | lib/google_api/you_tube/v3/model/subscription_subscriber_snippet.ex | jesteracer/ytb | 67e3cab899e4f69e586383f7be2c3855c6beea49 | [
"Apache-2.0"
] | null | null | null | lib/google_api/you_tube/v3/model/subscription_subscriber_snippet.ex | jesteracer/ytb | 67e3cab899e4f69e586383f7be2c3855c6beea49 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.YouTube.V3.Model.SubscriptionSubscriberSnippet do
@moduledoc """
Basic details about a subscription's subscriber including title, description, channel ID and thumbnails.
"""
@derive [Poison.Encoder]
defstruct [
:"channelId",
:"description",
:"thumbnails",
:"title"
]
end
defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.SubscriptionSubscriberSnippet do
import GoogleApi.YouTube.V3.Deserializer
def decode(value, options) do
value
|> deserialize(:"thumbnails", :struct, GoogleApi.YouTube.V3.Model.ThumbnailDetails, options)
end
end
| 32.571429 | 110 | 0.751462 |
9ea91b4b11301f4b74a297e43d6ec4580ef47bc7 | 1,283 | ex | Elixir | lib/sunshines_battleacademy/web/views/error_helpers.ex | djdduty/sunshines_battleacademy | bcc085a3a6a519fc1591d00a39783523644f87a0 | [
"Apache-2.0"
] | null | null | null | lib/sunshines_battleacademy/web/views/error_helpers.ex | djdduty/sunshines_battleacademy | bcc085a3a6a519fc1591d00a39783523644f87a0 | [
"Apache-2.0"
] | null | null | null | lib/sunshines_battleacademy/web/views/error_helpers.ex | djdduty/sunshines_battleacademy | bcc085a3a6a519fc1591d00a39783523644f87a0 | [
"Apache-2.0"
] | null | null | null | defmodule SunshinesBattleacademy.Web.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
if error = form.errors[field] do
content_tag :span, translate_error(error), class: "help-block"
end
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# Because error messages were defined within Ecto, we must
# call the Gettext module passing our Gettext backend. We
# also use the "errors" domain as translations are placed
# in the errors.po file.
# Ecto will pass the :count keyword if the error message is
# meant to be pluralized.
# On your own code and templates, depending on whether you
# need the message to be pluralized or not, this could be
# written simply as:
#
# dngettext "errors", "1 file", "%{count} files", count
# dgettext "errors", "is invalid"
#
if count = opts[:count] do
Gettext.dngettext(SunshinesBattleacademy.Web.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(SunshinesBattleacademy.Web.Gettext, "errors", msg, opts)
end
end
end
| 31.292683 | 92 | 0.679657 |
9ea9579f85e5166094aa1c22edfa51077cd9f4e2 | 15,755 | ex | Elixir | deps/ecto/lib/ecto/adapters/sql.ex | scouten/crash_esqlite_case | 986f0b0721399c7ed520f6b9df133980906e3f51 | [
"MIT"
] | null | null | null | deps/ecto/lib/ecto/adapters/sql.ex | scouten/crash_esqlite_case | 986f0b0721399c7ed520f6b9df133980906e3f51 | [
"MIT"
] | null | null | null | deps/ecto/lib/ecto/adapters/sql.ex | scouten/crash_esqlite_case | 986f0b0721399c7ed520f6b9df133980906e3f51 | [
"MIT"
] | null | null | null | defmodule Ecto.Adapters.SQL do
@moduledoc """
Behaviour and implementation for SQL adapters.
The implementation for SQL adapter provides a
pooled based implementation of SQL and also expose
a query function to developers.
Developers that use `Ecto.Adapters.SQL` should implement
a connection module with specifics on how to connect
to the database and also how to translate the queries
to SQL. See `Ecto.Adapters.SQL.Connection` for more info.
"""
@doc false
defmacro __using__(adapter) do
quote do
@behaviour Ecto.Adapter
@behaviour Ecto.Adapter.Migration
@behaviour Ecto.Adapter.Transaction
@conn __MODULE__.Connection
@adapter unquote(adapter)
## Worker
@doc false
defmacro __before_compile__(_env) do
:ok
end
@doc false
def start_link(repo, opts) do
{:ok, _} = Application.ensure_all_started(@adapter)
Ecto.Adapters.SQL.start_link(@conn, @adapter, repo, opts)
end
## Types
@doc false
def autogenerate(:id), do: nil
def autogenerate(:embed_id), do: Ecto.UUID.autogenerate()
def autogenerate(:binary_id), do: Ecto.UUID.autogenerate()
@doc false
def loaders({:embed, _} = type, _), do: [&Ecto.Adapters.SQL.load_embed(type, &1)]
def loaders(:binary_id, type), do: [Ecto.UUID, type]
def loaders(_, type), do: [type]
@doc false
def dumpers({:embed, _} = type, _), do: [&Ecto.Adapters.SQL.dump_embed(type, &1)]
def dumpers(:binary_id, type), do: [type, Ecto.UUID]
def dumpers(_, type), do: [type]
## Query
@doc false
def prepare(:all, query), do: {:cache, @conn.all(query)}
def prepare(:update_all, query), do: {:cache, @conn.update_all(query)}
def prepare(:delete_all, query), do: {:cache, @conn.delete_all(query)}
@doc false
def execute(repo, meta, prepared, params, preprocess, opts) do
Ecto.Adapters.SQL.execute(repo, meta, prepared, params, preprocess, opts)
end
@doc false
def insert_all(repo, %{source: {prefix, source}}, header, rows, returning, opts) do
Ecto.Adapters.SQL.insert_all(repo, @conn, prefix, source, header, rows, returning, opts)
end
@doc false
def insert(repo, %{source: {prefix, source}}, params, returning, opts) do
{fields, values} = :lists.unzip(params)
sql = @conn.insert(prefix, source, fields, [fields], returning)
Ecto.Adapters.SQL.struct(repo, @conn, sql, values, returning, opts)
end
@doc false
def update(repo, %{source: {prefix, source}}, fields, filter, returning, opts) do
{fields, values1} = :lists.unzip(fields)
{filter, values2} = :lists.unzip(filter)
sql = @conn.update(prefix, source, fields, filter, returning)
Ecto.Adapters.SQL.struct(repo, @conn, sql, values1 ++ values2, returning, opts)
end
@doc false
def delete(repo, %{source: {prefix, source}}, filter, opts) do
{filter, values} = :lists.unzip(filter)
sql = @conn.delete(prefix, source, filter, [])
Ecto.Adapters.SQL.struct(repo, @conn, sql, values, [], opts)
end
## Transaction
@doc false
def transaction(repo, opts, fun) do
Ecto.Adapters.SQL.transaction(repo, opts, fun)
end
@doc false
def rollback(repo, value) do
Ecto.Adapters.SQL.rollback(repo, value)
end
## Migration
@doc false
def execute_ddl(repo, definition, opts) do
sqls = @conn.execute_ddl(definition)
for sql <- List.wrap(sqls) do
Ecto.Adapters.SQL.query!(repo, sql, [], opts)
end
:ok
end
defoverridable [prepare: 2, execute: 6, insert: 5, update: 6, delete: 4, insert_all: 6,
execute_ddl: 3, loaders: 2, dumpers: 2, autogenerate: 1]
end
end
alias Ecto.LogProxy
alias Ecto.LogQuery
alias Ecto.Adapters.SQL.Sandbox
@doc """
Converts the given query to SQL according to its kind and the
adapter in the given repository.
## Examples
The examples below are meant for reference. Each adapter will
return a different result:
Ecto.Adapters.SQL.to_sql(:all, repo, Post)
{"SELECT p.id, p.title, p.inserted_at, p.created_at FROM posts as p", []}
Ecto.Adapters.SQL.to_sql(:update_all, repo,
from(p in Post, update: [set: [title: ^"hello"]]))
{"UPDATE posts AS p SET title = $1", ["hello"]}
"""
@spec to_sql(:all | :update_all | :delete_all, Ecto.Repo.t, Ecto.Queryable.t) ::
{String.t, [term]}
def to_sql(kind, repo, queryable) do
adapter = repo.__adapter__
{_meta, prepared, params} =
Ecto.Queryable.to_query(queryable)
|> Ecto.Query.Planner.query(kind, repo, adapter)
{prepared, params}
end
@doc """
Same as `query/4` but raises on invalid queries.
"""
@spec query!(Ecto.Repo.t, String.t, [term], Keyword.t) ::
%{rows: nil | [tuple], num_rows: non_neg_integer} | no_return
def query!(repo, sql, params, opts \\ []) do
query!(repo, sql, params, fn x -> x end, opts)
end
defp query!(repo, sql, params, mapper, opts) do
case query(repo, sql, params, mapper, opts) do
{:ok, result} -> result
{:error, err} -> raise err
end
end
@doc """
Runs custom SQL query on given repo.
In case of success, it must return an `:ok` tuple containing
a map with at least two keys:
* `:num_rows` - the number of rows affected
* `:rows` - the result set as a list. `nil` may be returned
instead of the list if the command does not yield any row
as result (but still yields the number of affected rows,
like a `delete` command without returning would)
## Options
* `:timeout` - The time in milliseconds to wait for a query to finish,
`:infinity` will wait indefinitely. (default: 15_000)
* `:pool_timeout` - The time in milliseconds to wait for a call to the pool
to finish, `:infinity` will wait indefinitely. (default: 5_000)
* `:log` - When false, does not log the query
## Examples
iex> Ecto.Adapters.SQL.query(MyRepo, "SELECT $1::integer + $2", [40, 2])
{:ok, %{rows: [{42}], num_rows: 1}}
"""
@spec query(Ecto.Repo.t, String.t, [term], Keyword.t) ::
{:ok, %{rows: nil | [tuple], num_rows: non_neg_integer}} | {:error, Exception.t}
def query(repo, sql, params, opts \\ []) do
query(repo, sql, params, fn x -> x end, opts)
end
defp query(repo, sql, params, mapper, opts) do
{pool, default_opts} = repo.__pool__
conn = get_conn(pool) || pool
connection = Module.concat(repo.__adapter__, Connection)
query = connection.query(sql)
opts =
opts ++ default_opts
|> Keyword.put(:logger, &repo.log/1)
|> Keyword.put(:encode_mapper, &connection.encode_mapper/1)
|> Keyword.put(:decode_mapper, mapper)
do_query(conn, query, params, opts)
end
defp do_query(%DBConnection{proxy_mod: proxy} = conn, query, params, opts) do
do_query(proxy, conn, query, params, opts)
end
defp do_query(pool, query, params, opts) do
proxy = Keyword.get(opts, :proxy)
do_query(proxy, pool, query, params, opts)
end
defp do_query(LogProxy, conn, query, params, opts) do
log_query = %LogQuery{query: query, params: params}
DBConnection.query(conn, log_query, params, opts)
end
defp do_query(_, conn, query, params, opts) do
DBConnection.query(conn, query, params, opts)
end
@doc ~S"""
Starts a transaction for test.
This function work by starting a transaction and storing the connection
back in the pool with an open transaction. On every test, we restart
the test transaction rolling back to the appropriate savepoint.
**IMPORTANT:** Test transactions only work if the connection pool is
`Ecto.Adapters.SQL.Sandbox`
## Example
The first step is to configure your database to use the
`Ecto.Adapters.SQL.Sandbox` pool. You set those options in your
`config/config.exs`:
config :my_app, Repo,
pool: Ecto.Adapters.SQL.Sandbox
Since you don't want those options in your production database, we
typically recommend to create a `config/test.exs` and add the
following to the bottom of your `config/config.exs` file:
import_config "config/#{Mix.env}.exs"
Now with the test database properly configured, you can write
transactional tests:
# At the end of your test_helper.exs
# From now, all tests happen inside a transaction
Ecto.Adapters.SQL.begin_test_transaction(TestRepo)
defmodule PostTest do
# Tests that use the shared repository cannot be async
use ExUnit.Case
setup do
# Go back to a clean slate at the beginning of every test
Ecto.Adapters.SQL.restart_test_transaction(TestRepo)
:ok
end
test "create comment" do
assert %Post{} = TestRepo.insert!(%Post{})
end
end
In some cases, you may want to start the test transaction only
for specific tests and then roll it back. You can do it as:
defmodule PostTest do
# Tests that use the shared repository cannot be async
use ExUnit.Case
setup_all do
# Wrap this case in a transaction
Ecto.Adapters.SQL.begin_test_transaction(TestRepo)
# Roll it back once we are done
on_exit fn ->
Ecto.Adapters.SQL.rollback_test_transaction(TestRepo)
end
:ok
end
setup do
# Go back to a clean slate at the beginning of every test
Ecto.Adapters.SQL.restart_test_transaction(TestRepo)
:ok
end
test "create comment" do
assert %Post{} = TestRepo.insert!(%Post{})
end
end
"""
@spec begin_test_transaction(Ecto.Repo.t, Keyword.t) :: :ok
def begin_test_transaction(repo, opts \\ []) do
test_transaction(:begin, repo, opts)
end
@doc """
Restarts a test transaction, see `begin_test_transaction/2`.
"""
@spec restart_test_transaction(Ecto.Repo.t, Keyword.t) :: :ok
def restart_test_transaction(repo, opts \\ []) do
test_transaction(:restart, repo, opts)
end
@spec rollback_test_transaction(Ecto.Repo.t, Keyword.t) :: :ok
def rollback_test_transaction(repo, opts \\ []) do
test_transaction(:rollback, repo, opts)
end
defp test_transaction(req, repo, opts) do
{pool, defaults} = repo.__pool__
opts = opts ++ defaults
case Keyword.fetch!(opts, :pool) do
Sandbox ->
query = %Sandbox.Query{request: req}
DBConnection.execute!(pool, query, [], opts)
pool_mod ->
raise """
cannot #{req} test transaction with pool #{inspect pool_mod}.
In order to use test transactions with Ecto SQL, you need to
configure your repository to use #{inspect Sandbox}:
pool: #{inspect Sandbox}
"""
end
end
## Worker
@doc false
def start_link(connection, adapter, repo, opts) do
unless Code.ensure_loaded?(connection) do
raise """
could not find #{inspect connection}.
Please verify you have added #{inspect adapter} as a dependency:
{#{inspect adapter}, ">= 0.0.0"}
And remember to recompile Ecto afterwards by cleaning the current build:
mix deps.clean ecto
"""
end
{mod, opts} = connection.connection(opts)
if function_exported?(repo, :after_connect, 1) do
IO.puts :stderr, "warning: #{inspect repo}.after_connect/1 is deprecated. If you want to " <>
"perform some action after connecting, please set after_connect: {module, fun, args}" <>
"in your repository configuration"
opts = Keyword.put(opts, :after_connect, {repo, :after_connect, []})
end
DBConnection.start_link(mod, opts)
end
## Types
@doc false
def load_embed(type, value) do
Ecto.Type.load(type, value, fn
{:embed, _} = type, value -> load_embed(type, value)
type, value -> Ecto.Type.cast(type, value)
end)
end
@doc false
def dump_embed(type, value) do
Ecto.Type.dump(type, value, fn
{:embed, _} = type, value -> dump_embed(type, value)
_type, value -> {:ok, value}
end)
end
## Query
@doc false
def insert_all(repo, conn, prefix, source, header, rows, returning, opts) do
{rows, params} = unzip_inserts(header, rows)
sql = conn.insert(prefix, source, header, rows, returning)
%{rows: rows, num_rows: num} = query!(repo, sql, Enum.reverse(params), nil, opts)
{num, rows}
end
defp unzip_inserts(header, rows) do
Enum.map_reduce rows, [], fn fields, params ->
Enum.map_reduce header, params, fn key, acc ->
case :lists.keyfind(key, 1, fields) do
{^key, value} -> {key, [value|acc]}
false -> {nil, acc}
end
end
end
end
@doc false
def execute(repo, _meta, prepared, params, nil, opts) do
%{rows: rows, num_rows: num} = query!(repo, prepared, params, nil, opts)
{num, rows}
end
def execute(repo, %{select: %{fields: fields}}, prepared, params, preprocess, opts) do
mapper = &process_row(&1, preprocess, fields)
%{rows: rows, num_rows: num} = query!(repo, prepared, params, mapper, opts)
{num, rows}
end
@doc false
def struct(repo, conn, sql, values, returning, opts) do
case query(repo, sql, values, fn x -> x end, opts) do
{:ok, %{rows: nil, num_rows: 1}} ->
{:ok, []}
{:ok, %{rows: [values], num_rows: 1}} ->
{:ok, Enum.zip(returning, values)}
{:ok, %{num_rows: 0}} ->
{:error, :stale}
{:error, err} ->
case conn.to_constraints(err) do
[] -> raise err
constraints -> {:invalid, constraints}
end
end
end
defp process_row(row, preprocess, fields) do
Enum.map_reduce(fields, row, fn
{:&, _, [_, fields]} = field, acc ->
case split_and_not_nil(acc, length(fields), true, []) do
{nil, rest} -> {nil, rest}
{val, rest} -> {preprocess.(field, val, nil), rest}
end
field, [h|t] ->
{preprocess.(field, h, nil), t}
end) |> elem(0)
end
defp split_and_not_nil(rest, 0, true, _acc), do: {nil, rest}
defp split_and_not_nil(rest, 0, false, acc), do: {:lists.reverse(acc), rest}
defp split_and_not_nil([nil|t], count, all_nil?, acc) do
split_and_not_nil(t, count - 1, all_nil?, [nil|acc])
end
defp split_and_not_nil([h|t], count, _all_nil?, acc) do
split_and_not_nil(t, count - 1, false, [h|acc])
end
## Transactions
@doc false
def transaction(repo, opts, fun) do
{pool, default_opts} = repo.__pool__
opts = opts ++ default_opts
case get_conn(pool) do
nil ->
do_transaction(pool, opts, fun)
conn ->
DBConnection.transaction(conn, fn(_) -> fun.() end, opts)
end
end
defp do_transaction(pool, opts, fun) do
run = fn(conn) ->
try do
put_conn(pool, conn)
fun.()
after
delete_conn(pool)
end
end
DBConnection.transaction(pool, run, opts)
end
@doc false
def rollback(repo, value) do
{pool, _} = repo.__pool__
case get_conn(pool) do
nil -> raise "cannot call rollback outside of transaction"
conn -> DBConnection.rollback(conn, value)
end
end
## Connection helpers
defp put_conn(pool, conn) do
_ = Process.put(key(pool), conn)
:ok
end
defp get_conn(pool) do
Process.get(key(pool))
end
defp delete_conn(pool) do
_ = Process.delete(key(pool))
:ok
end
defp key(pool), do: {__MODULE__, pool}
end
| 30.066794 | 111 | 0.626976 |
9ea96895b664d0260b93072f486cf07c26d58c55 | 10,487 | exs | Elixir | integration_test/cases/preload.exs | ashneyderman/ecto | 16f27f64c5ca2480568fad10e40c26522ffbf793 | [
"Apache-2.0"
] | null | null | null | integration_test/cases/preload.exs | ashneyderman/ecto | 16f27f64c5ca2480568fad10e40c26522ffbf793 | [
"Apache-2.0"
] | null | null | null | integration_test/cases/preload.exs | ashneyderman/ecto | 16f27f64c5ca2480568fad10e40c26522ffbf793 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Integration.PreloadTest do
use Ecto.Integration.Case
require Ecto.Integration.TestRepo, as: TestRepo
import Ecto.Query
alias Ecto.Integration.Post
alias Ecto.Integration.Comment
alias Ecto.Integration.Permalink
alias Ecto.Integration.User
alias Ecto.Integration.Custom
test "preload empty" do
assert TestRepo.preload([], :anything_goes) == []
end
test "preload has_many" do
p1 = TestRepo.insert!(%Post{title: "1"})
p2 = TestRepo.insert!(%Post{title: "2"})
p3 = TestRepo.insert!(%Post{title: "3"})
%Comment{id: cid1} = TestRepo.insert!(%Comment{text: "1", post_id: p1.id})
%Comment{id: cid2} = TestRepo.insert!(%Comment{text: "2", post_id: p1.id})
%Comment{id: cid3} = TestRepo.insert!(%Comment{text: "3", post_id: p2.id})
%Comment{id: cid4} = TestRepo.insert!(%Comment{text: "4", post_id: p2.id})
assert %Ecto.Association.NotLoaded{} = p1.comments
assert [p3, p1, p2] = TestRepo.preload([p3, p1, p2], :comments)
assert [%Comment{id: ^cid1}, %Comment{id: ^cid2}] = p1.comments
assert [%Comment{id: ^cid3}, %Comment{id: ^cid4}] = p2.comments
assert [] = p3.comments
end
test "preload has_one" do
p1 = TestRepo.insert!(%Post{title: "1"})
p2 = TestRepo.insert!(%Post{title: "2"})
p3 = TestRepo.insert!(%Post{title: "3"})
%Permalink{id: pid1} = TestRepo.insert!(%Permalink{url: "1", post_id: p1.id})
%Permalink{} = TestRepo.insert!(%Permalink{url: "2", post_id: nil})
%Permalink{id: pid3} = TestRepo.insert!(%Permalink{url: "3", post_id: p3.id})
assert %Ecto.Association.NotLoaded{} = p1.permalink
assert %Ecto.Association.NotLoaded{} = p2.permalink
assert [p3, p1, p2] = TestRepo.preload([p3, p1, p2], :permalink)
assert %Permalink{id: ^pid1} = p1.permalink
assert nil = p2.permalink
assert %Permalink{id: ^pid3} = p3.permalink
end
test "preload belongs_to" do
%Post{id: pid1} = TestRepo.insert!(%Post{title: "1"})
TestRepo.insert!(%Post{title: "2"})
%Post{id: pid3} = TestRepo.insert!(%Post{title: "3"})
pl1 = TestRepo.insert!(%Permalink{url: "1", post_id: pid1})
pl2 = TestRepo.insert!(%Permalink{url: "2", post_id: nil})
pl3 = TestRepo.insert!(%Permalink{url: "3", post_id: pid3})
assert %Ecto.Association.NotLoaded{} = pl1.post
assert [pl3, pl1, pl2] = TestRepo.preload([pl3, pl1, pl2], :post)
assert %Post{id: ^pid1} = pl1.post
assert nil = pl2.post
assert %Post{id: ^pid3} = pl3.post
end
test "preload has_many through" do
%Post{id: pid1} = p1 = TestRepo.insert!(%Post{})
%Post{id: pid2} = p2 = TestRepo.insert!(%Post{})
%User{id: uid1} = TestRepo.insert!(%User{})
%User{id: uid2} = TestRepo.insert!(%User{})
%Comment{} = TestRepo.insert!(%Comment{post_id: pid1, author_id: uid1})
%Comment{} = TestRepo.insert!(%Comment{post_id: pid1, author_id: uid1})
%Comment{} = TestRepo.insert!(%Comment{post_id: pid1, author_id: uid2})
%Comment{} = TestRepo.insert!(%Comment{post_id: pid2, author_id: uid2})
p1 = TestRepo.preload(p1, :comments_authors)
# Through was preloaded
[u1, u2] = p1.comments_authors
assert u1.id == uid1
assert u2.id == uid2
# But we also preloaded everything along the way
assert [c1, c2, c3] = p1.comments
assert c1.author.id == uid1
assert c2.author.id == uid1
assert c3.author.id == uid2
[p1, p2] = TestRepo.preload([p1, p2], :comments_authors)
# Through was preloaded
[u1, u2] = p1.comments_authors
assert u1.id == uid1
assert u2.id == uid2
[u2] = p2.comments_authors
assert u2.id == uid2
# But we also preloaded everything along the way
assert [c1, c2, c3] = p1.comments
assert c1.author.id == uid1
assert c2.author.id == uid1
assert c3.author.id == uid2
assert [c4] = p2.comments
assert c4.author.id == uid2
end
test "preload has_one through" do
%Post{id: pid1} = TestRepo.insert!(%Post{})
%Post{id: pid2} = TestRepo.insert!(%Post{})
%Permalink{id: lid1} = TestRepo.insert!(%Permalink{post_id: pid1})
%Permalink{id: lid2} = TestRepo.insert!(%Permalink{post_id: pid2})
%Comment{} = c1 = TestRepo.insert!(%Comment{post_id: pid1})
%Comment{} = c2 = TestRepo.insert!(%Comment{post_id: pid1})
%Comment{} = c3 = TestRepo.insert!(%Comment{post_id: pid2})
[c1, c2, c3] = TestRepo.preload([c1, c2, c3], :post_permalink)
# Through was preloaded
assert c1.post.id == pid1
assert c1.post.permalink.id == lid1
assert c1.post_permalink.id == lid1
assert c2.post.id == pid1
assert c2.post.permalink.id == lid1
assert c2.post_permalink.id == lid1
assert c3.post.id == pid2
assert c3.post.permalink.id == lid2
assert c3.post_permalink.id == lid2
end
test "preload has_many through-through" do
%Post{id: pid1} = TestRepo.insert!(%Post{})
%Post{id: pid2} = TestRepo.insert!(%Post{})
%Permalink{} = l1 = TestRepo.insert!(%Permalink{post_id: pid1})
%Permalink{} = l2 = TestRepo.insert!(%Permalink{post_id: pid2})
%User{id: uid1} = TestRepo.insert!(%User{})
%User{id: uid2} = TestRepo.insert!(%User{})
%Comment{} = TestRepo.insert!(%Comment{post_id: pid1, author_id: uid1})
%Comment{} = TestRepo.insert!(%Comment{post_id: pid1, author_id: uid1})
%Comment{} = TestRepo.insert!(%Comment{post_id: pid1, author_id: uid2})
%Comment{} = TestRepo.insert!(%Comment{post_id: pid2, author_id: uid2})
[l1, l2] = TestRepo.preload([l1, l2], :post_comments_authors)
# Through was preloaded
[u1, u2] = l1.post_comments_authors
assert u1.id == uid1
assert u2.id == uid2
[u2] = l2.post_comments_authors
assert u2.id == uid2
# But we also preloaded everything along the way
assert l1.post.id == pid1
assert l1.post.comments != []
assert l2.post.id == pid2
assert l2.post.comments != []
end
test "preload has_many through nested" do
%Post{id: pid1} = p1 = TestRepo.insert!(%Post{})
%Post{id: pid2} = p2 = TestRepo.insert!(%Post{})
%User{id: uid1} = TestRepo.insert!(%User{})
%User{id: uid2} = TestRepo.insert!(%User{})
%Comment{} = c1 = TestRepo.insert!(%Comment{post_id: pid1, author_id: uid1})
%Comment{} = c2 = TestRepo.insert!(%Comment{post_id: pid1, author_id: uid1})
%Comment{} = c3 = TestRepo.insert!(%Comment{post_id: pid1, author_id: uid2})
%Comment{} = c4 = TestRepo.insert!(%Comment{post_id: pid2, author_id: uid2})
[p1, p2] = TestRepo.preload([p1, p2], [:permalink, comments_authors: :comments])
# Through was preloaded
[u1, u2] = p1.comments_authors
assert u1.id == uid1
assert u2.id == uid2
assert u1.comments == [c1, c2]
[u2] = p2.comments_authors
assert u2.id == uid2
assert u2.comments == [c3, c4]
end
test "preload belongs_to with shared assocs" do
%Post{id: pid1} = TestRepo.insert!(%Post{title: "1"})
%Post{id: pid2} = TestRepo.insert!(%Post{title: "2"})
c1 = TestRepo.insert!(%Comment{text: "1", post_id: pid1})
c2 = TestRepo.insert!(%Comment{text: "2", post_id: pid1})
c3 = TestRepo.insert!(%Comment{text: "3", post_id: pid2})
assert [c3, c1, c2] = TestRepo.preload([c3, c1, c2], :post)
assert %Post{id: ^pid1} = c1.post
assert %Post{id: ^pid1} = c2.post
assert %Post{id: ^pid2} = c3.post
end
test "preload nested" do
p1 = TestRepo.insert!(%Post{title: "1"})
p2 = TestRepo.insert!(%Post{title: "2"})
TestRepo.insert!(%Comment{text: "1", post_id: p1.id})
TestRepo.insert!(%Comment{text: "2", post_id: p1.id})
TestRepo.insert!(%Comment{text: "3", post_id: p2.id})
TestRepo.insert!(%Comment{text: "4", post_id: p2.id})
assert [p2, p1] = TestRepo.preload([p2, p1], [comments: :post])
assert [c1, c2] = p1.comments
assert [c3, c4] = p2.comments
assert p1.id == c1.post.id
assert p1.id == c2.post.id
assert p2.id == c3.post.id
assert p2.id == c4.post.id
end
test "preload has_many with no associated entries" do
p = TestRepo.insert!(%Post{title: "1"})
p = TestRepo.preload(p, :comments)
assert p.title == "1"
assert p.comments == []
end
test "preload has_one with no associated entries" do
p = TestRepo.insert!(%Post{title: "1"})
p = TestRepo.preload(p, :permalink)
assert p.title == "1"
assert p.permalink == nil
end
test "preload belongs_to with no associated entry" do
c = TestRepo.insert!(%Comment{text: "1"})
c = TestRepo.preload(c, :post)
assert c.text == "1"
assert c.post == nil
end
test "preload with binary_id" do
c = TestRepo.insert!(%Custom{})
u = TestRepo.insert!(%User{custom_id: c.bid})
u = TestRepo.preload(u, :custom)
assert u.custom.bid == c.bid
end
test "preload skips already loaded" do
p1 = TestRepo.insert!(%Post{title: "1"})
p2 = TestRepo.insert!(%Post{title: "2"})
%Comment{id: _} = TestRepo.insert!(%Comment{text: "1", post_id: p1.id})
%Comment{id: cid2} = TestRepo.insert!(%Comment{text: "2", post_id: p2.id})
assert %Ecto.Association.NotLoaded{} = p1.comments
p1 = %{p1 | comments: []}
assert [p1, p2] = TestRepo.preload([p1, p2], :comments)
assert [] = p1.comments
assert [%Comment{id: ^cid2}] = p2.comments
end
test "preload keyword query" do
p1 = TestRepo.insert!(%Post{title: "1"})
p2 = TestRepo.insert!(%Post{title: "2"})
TestRepo.insert!(%Post{title: "3"})
%Comment{id: cid1} = TestRepo.insert!(%Comment{text: "1", post_id: p1.id})
%Comment{id: cid2} = TestRepo.insert!(%Comment{text: "2", post_id: p1.id})
%Comment{id: cid3} = TestRepo.insert!(%Comment{text: "3", post_id: p2.id})
%Comment{id: cid4} = TestRepo.insert!(%Comment{text: "4", post_id: p2.id})
# Regular query
query = from(p in Post, preload: [:comments], select: p)
assert [p1, p2, p3] = TestRepo.all(query)
assert [%Comment{id: ^cid1}, %Comment{id: ^cid2}] = p1.comments
assert [%Comment{id: ^cid3}, %Comment{id: ^cid4}] = p2.comments
assert [] = p3.comments
# Now let's use an interpolated preload too
comments = [:comments]
query = from(p in Post, preload: ^comments, select: {0, [p], 1, 2})
posts = TestRepo.all(query)
[p1, p2, p3] = Enum.map(posts, fn {0, [p], 1, 2} -> p end)
assert [%Comment{id: ^cid1}, %Comment{id: ^cid2}] = p1.comments
assert [%Comment{id: ^cid3}, %Comment{id: ^cid4}] = p2.comments
assert [] = p3.comments
end
end
| 34.048701 | 84 | 0.631925 |
9ea96f05c46e41ce2a4b46865ee49be4dca0280a | 19,732 | exs | Elixir | test/teslamate/vehicles/vehicle/driving_test.exs | qianbin/teslamate | ef673e88824c24c934b9fc7b155ad8766c1377bb | [
"MIT"
] | 1 | 2021-10-31T13:22:49.000Z | 2021-10-31T13:22:49.000Z | test/teslamate/vehicles/vehicle/driving_test.exs | qianbin/teslamate | ef673e88824c24c934b9fc7b155ad8766c1377bb | [
"MIT"
] | null | null | null | test/teslamate/vehicles/vehicle/driving_test.exs | qianbin/teslamate | ef673e88824c24c934b9fc7b155ad8766c1377bb | [
"MIT"
] | null | null | null | defmodule TeslaMate.Vehicles.Vehicle.DrivingTest do
use TeslaMate.VehicleCase, async: true
alias TeslaMate.Log.{Drive, Car}
test "logs a full drive", %{test: name} do
now = DateTime.utc_now()
now_ts = DateTime.to_unix(now, :millisecond)
events = [
{:ok, online_event()},
{:ok, drive_event(now_ts + 1, "D", 60)},
{:ok, drive_event(now_ts + 2, "N", 30)},
{:ok, drive_event(now_ts + 3, "R", -5)},
{:ok, online_event(drive_state: %{timestamp: now_ts + 4, latitude: 0.2, longitude: 0.2})}
]
:ok = start_vehicle(name, events)
start_date = DateTime.from_unix!(now_ts + 1, :millisecond)
assert_receive {:start_state, car, :online, date: ^start_date}
assert_receive {ApiMock, {:stream, 1000, _}}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online, since: s0}}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :driving, since: s1}}}
assert DateTime.diff(s0, s1, :nanosecond) < 0
assert_receive {:start_drive, ^car}
assert_receive {:insert_position, drive, %{longitude: 0.1, speed: 97}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :driving, since: ^s1}}}
assert_receive {:insert_position, ^drive, %{longitude: 0.1, speed: 48}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :driving, since: ^s1}}}
assert_receive {:insert_position, ^drive, %{longitude: 0.1, speed: -8}}
assert_receive {:insert_position, ^drive, %{longitude: 0.2}}
assert_receive {:close_drive, ^drive, lookup_address: true}
start_date = DateTime.from_unix!(now_ts + 4, :millisecond)
assert_receive {:start_state, ^car, :online, date: ^start_date}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online, since: s2}}}
assert DateTime.diff(s1, s2, :nanosecond) < 0
refute_receive _
end
@tag :capture_log
test "handles a connection loss when driving", %{test: name} do
now = DateTime.utc_now()
now_ts = DateTime.to_unix(now, :millisecond)
events = [
{:ok, online_event()},
{:ok, online_event(drive_state: %{timestamp: now_ts, latitude: 0.0, longitude: 0.0})},
{:ok, drive_event(now_ts + 1, "D", 50)},
{:error, :vehicle_unavailable},
{:ok, %TeslaApi.Vehicle{state: "offline"}},
{:error, :vehicle_unavailable},
{:ok, %TeslaApi.Vehicle{state: "unknown"}},
{:ok, drive_event(now_ts + 2, "D", 55)},
{:ok, drive_event(now_ts + 3, "D", 40)},
{:ok, online_event(drive_state: %{timestamp: now_ts + 4, latitude: 0.2, longitude: 0.2})}
]
:ok = start_vehicle(name, events)
start_date = DateTime.from_unix!(now_ts, :millisecond)
assert_receive {:start_state, car, :online, date: ^start_date}
assert_receive {ApiMock, {:stream, 1000, _}}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online}}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :driving}}}
assert_receive {:start_drive, ^car}
assert_receive {:insert_position, drive, %{longitude: 0.1, speed: 80}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :driving}}}
assert_receive {:insert_position, ^drive, %{longitude: 0.1, speed: 89}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :driving}}}
assert_receive {:insert_position, ^drive, %{longitude: 0.1, speed: 64}}
assert_receive {:insert_position, ^drive, %{longitude: 0.2}}
assert_receive {:close_drive, ^drive, lookup_address: true}
start_date = DateTime.from_unix!(now_ts + 4, :millisecond)
assert_receive {:start_state, ^car, :online, date: ^start_date}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online}}}
refute_receive _
end
test "transitions directly into driving state", %{test: name} do
now = DateTime.utc_now()
now_ts = DateTime.to_unix(now, :millisecond)
events = [
{:ok, online_event()},
{:ok, drive_event(now_ts, "N", 0)}
]
:ok = start_vehicle(name, events)
start_date = DateTime.from_unix!(now_ts, :millisecond)
assert_receive {:start_state, car, :online, date: ^start_date}
assert_receive {ApiMock, {:stream, 1000, _}}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online}}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :driving}}}
assert_receive {:start_drive, ^car}
assert_receive {:insert_position, drive, %{longitude: 0.1, speed: 0}}
assert_receive {:insert_position, ^drive, %{longitude: 0.1, speed: 0}}
assert_receive {:insert_position, ^drive, %{longitude: 0.1, speed: 0}}
# ...
refute_received _
end
test "shift state P does not trigger driving state", %{test: name} do
now = DateTime.utc_now()
now_ts = DateTime.to_unix(now, :millisecond)
events = [
{:ok, online_event()},
{:ok, drive_event(now_ts, "P", 0)}
]
:ok = start_vehicle(name, events)
date = DateTime.from_unix!(now_ts, :millisecond)
assert_receive {:start_state, car, :online, date: ^date}
assert_receive {ApiMock, {:stream, 1000, _}}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online}}}
refute_receive _
end
test "shift_state P ends the drive", %{test: name} do
now = DateTime.utc_now()
now_ts = DateTime.to_unix(now, :millisecond)
events = [
{:ok, online_event()},
{:ok, drive_event(now_ts, "D", 5)},
{:ok, drive_event(now_ts + 1, "D", 15)},
{:ok, drive_event(now_ts + 2, "P", 0)}
]
:ok = start_vehicle(name, events)
start_date = DateTime.from_unix!(now_ts, :millisecond)
assert_receive {:start_state, car, :online, date: ^start_date}
assert_receive {ApiMock, {:stream, 1000, _}}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online}}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :driving}}}
assert_receive {:start_drive, ^car}
assert_receive {:insert_position, %Drive{id: 111} = drive, %{longitude: 0.1, speed: 8}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :driving}}}
assert_receive {:insert_position, ^drive, %{longitude: 0.1, speed: 24}}
assert_receive {:insert_position, ^drive, %{longitude: 0.1, speed: 0}}
assert_receive {:close_drive, ^drive, lookup_address: true}
start_date = DateTime.from_unix!(now_ts + 2, :millisecond)
assert_receive {:start_state, ^car, :online, date: ^start_date}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online}}}
refute_receive _
end
describe "when offline" do
defp drive_event(ts, pos, speed, lvl, range, added) do
{:ok,
online_event(
drive_state: %{
timestamp: ts,
latitude: pos,
longitude: pos,
shift_state: "D",
speed: speed
},
charge_state: %{
battery_level: lvl,
ideal_battery_range: range,
timestamp: ts,
charge_energy_added: added
}
)}
end
@tag :capture_log
test "interprets a significant offline period while driving with SOC gains as charge session",
%{test: name} do
now = DateTime.utc_now()
now_ts = DateTime.to_unix(now, :millisecond)
events =
[
{:ok, online_event()},
drive_event(now_ts, 0.1, 30, 20, 200, 0),
drive_event(now_ts + 1, 0.1, 30, 20, 200, 0)
] ++
List.duplicate({:ok, %TeslaApi.Vehicle{state: "offline"}}, 20) ++
[
drive_event(now_ts + :timer.minutes(5), 0.2, 20, 80, 300, 45),
{:ok,
online_event(
drive_state: %{
timestamp: now_ts + :timer.minutes(5) + 1,
latitude: 0.3,
longitude: 0.3
}
)}
]
:ok = start_vehicle(name, events)
date = DateTime.from_unix!(now_ts, :millisecond)
assert_receive {:start_state, car, :online, date: ^date}
assert_receive {ApiMock, {:stream, 1000, _}}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online}}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :driving}}}
assert_receive {:start_drive, ^car}
assert_receive {:insert_position, drive, %{longitude: 0.1, speed: 48}}
assert_receive {:insert_position, ^drive, %{longitude: 0.1, speed: 48}}
refute_receive _, 50
# Logs previous drive because of timeout
assert_receive {:close_drive, ^drive, lookup_address: true}, 300
# Logs a charge session based on the available data
start_date =
now
|> DateTime.add(1, :millisecond)
|> DateTime.truncate(:millisecond)
end_date =
now
|> DateTime.add(5 * 60, :second)
|> DateTime.truncate(:millisecond)
assert_receive {:start_charging_process, ^car, %{latitude: 0.1, longitude: 0.1},
lookup_address: true}
assert_receive {:insert_charge, charging_id,
%{date: ^start_date, charge_energy_added: 0, charger_power: 0}}
assert_receive {:insert_charge, ^charging_id,
%{date: ^end_date, charge_energy_added: 45, charger_power: 0}}
assert_receive {:complete_charging_process, ^charging_id}
d1 = DateTime.from_unix!(now_ts + :timer.minutes(5), :millisecond)
assert_receive {:start_state, ^car, :online, date: ^d1}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online}}}
assert_receive {:start_drive, ^car}
assert_receive {:insert_position, drive, %{longitude: 0.2, speed: 32}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :driving}}}
assert_receive {:insert_position, ^drive, %{longitude: 0.3}}
assert_receive {:close_drive, ^drive, lookup_address: true}
d2 = DateTime.from_unix!(now_ts + :timer.minutes(5) + 1, :millisecond)
assert_receive {:start_state, ^car, :online, date: ^d2}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online}}}
refute_receive _
end
@tag :capture_log
test "times out a drive when being offline for to long",
%{test: name} do
now_ts = DateTime.utc_now() |> DateTime.to_unix(:millisecond)
events = [
{:ok, online_event()},
drive_event(now_ts, 0.1, 30, 20, 200, nil),
drive_event(now_ts, 0.1, 30, 20, 200, nil),
{:ok, %TeslaApi.Vehicle{state: "offline"}}
]
:ok = start_vehicle(name, events)
date = DateTime.from_unix!(now_ts, :millisecond)
assert_receive {:start_state, car, :online, date: ^date}
assert_receive {ApiMock, {:stream, 1000, _}}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online}}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :driving}}}
assert_receive {:start_drive, ^car}
assert_receive {:insert_position, drive, %{longitude: 0.1, speed: 48}}
assert_receive {:insert_position, ^drive, %{longitude: 0.1, speed: 48}}
# Timeout
assert_receive {:close_drive, ^drive, lookup_address: true}, 1200
refute_receive _
end
test "times out a drive when rececing sleep event", %{test: name} do
now_ts = DateTime.utc_now() |> DateTime.to_unix(:millisecond)
events = [
{:ok, online_event()},
drive_event(now_ts, 0.1, 30, 20, 200, nil),
drive_event(now_ts + 1, 0.1, 30, 20, 200, nil),
{:ok, %TeslaApi.Vehicle{state: "asleep"}}
]
:ok = start_vehicle(name, events)
date = DateTime.from_unix!(now_ts, :millisecond)
assert_receive {:start_state, car, :online, date: ^date}
assert_receive {ApiMock, {:stream, 1000, _}}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online}}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :driving}}}
assert_receive {:start_drive, ^car}
assert_receive {:insert_position, drive, %{longitude: 0.1, speed: 48}}
assert_receive {:insert_position, ^drive, %{longitude: 0.1, speed: 48}}
# Timeout
assert_receive {:close_drive, ^drive, lookup_address: true}, 1200
assert_receive {:start_state, car, :asleep, []}
assert_receive {:"$websockex_cast", :disconnect}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :asleep}}}
refute_receive _
end
@tag :capture_log
test "logs a drive after a significant offline period while driving",
%{test: name} do
now = DateTime.utc_now()
now_ts = DateTime.to_unix(now, :millisecond)
events =
[
{:ok, online_event()},
drive_event(now_ts, 0.1, 30, 20, 200, nil),
drive_event(now_ts + 1, 0.1, 30, 20, 200, nil)
] ++
List.duplicate({:ok, %TeslaApi.Vehicle{state: "offline"}}, 20) ++
[
drive_event(now_ts + :timer.minutes(15), 0.2, 20, 19, 190, nil),
{:ok,
online_event(
drive_state: %{
timestamp: now_ts + :timer.minutes(15) + 1,
latitude: 0.3,
longitude: 0.3
}
)}
]
:ok = start_vehicle(name, events)
d0 = DateTime.from_unix!(now_ts, :millisecond)
assert_receive {:start_state, car, :online, date: ^d0}
assert_receive {ApiMock, {:stream, 1000, _}}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online}}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :driving}}}
assert_receive {:start_drive, ^car}
assert_receive {:insert_position, drive, %{longitude: 0.1, speed: 48}}
assert_receive {:insert_position, ^drive, %{longitude: 0.1, speed: 48}}
refute_receive _, 100
# Logs previous drive
assert_receive {:close_drive, ^drive, lookup_address: true}, 250
d1 = DateTime.from_unix!(now_ts + :timer.minutes(15), :millisecond)
assert_receive {:start_state, ^car, :online, date: ^d1}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online}}}
assert_receive {:start_drive, ^car}
assert_receive {:insert_position, drive, %{longitude: 0.2, speed: 32}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :driving}}}
assert_receive {:insert_position, drive, %{longitude: 0.3}}
assert_receive {:close_drive, ^drive, lookup_address: true}
d2 = DateTime.from_unix!(now_ts + :timer.minutes(15) + 1, :millisecond)
assert_receive {:start_state, ^car, :online, date: ^d2}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online}}}
refute_receive _
end
@tag :capture_log
test "continues a drive after a short offline period while driving",
%{test: name} do
now = DateTime.utc_now()
now_ts = DateTime.to_unix(now, :millisecond)
events =
[
{:ok, online_event()},
drive_event(now_ts, 0.1, 30, 20, 200, nil),
drive_event(now_ts + 1, 0.1, 30, 20, 200, nil)
] ++
List.duplicate({:ok, %TeslaApi.Vehicle{state: "offline"}}, 16) ++
[
drive_event(now_ts + :timer.minutes(4), 0.2, 20, 19, 190, nil),
{:ok,
online_event(
drive_state: %{
timestamp: now_ts + :timer.minutes(4) + 1,
latitude: 0.3,
longitude: 0.3
}
)}
]
:ok = start_vehicle(name, events)
d0 = DateTime.from_unix!(now_ts, :millisecond)
assert_receive {:start_state, car, :online, date: ^d0}
assert_receive {ApiMock, {:stream, 1000, _}}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online}}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :driving}}}
assert_receive {:start_drive, ^car}
assert_receive {:insert_position, drive, %{longitude: 0.1, speed: 48}}
assert_receive {:insert_position, ^drive, %{longitude: 0.1, speed: 48}}
refute_receive _, 50
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :driving}}}
assert_receive {:insert_position, drive, %{longitude: 0.2, speed: 32}}
assert_receive {:insert_position, drive, %{longitude: 0.3}}
assert_receive {:close_drive, ^drive, lookup_address: true}
d1 = DateTime.from_unix!(now_ts + :timer.minutes(4) + 1, :millisecond)
assert_receive {:start_state, ^car, :online, date: ^d1}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online}}}
refute_receive _
end
end
describe "geofencing" do
alias TeslaMate.Locations.GeoFence
test "changes geofence when enterling or leaving", %{test: name} do
ts = DateTime.utc_now() |> DateTime.to_unix(:millisecond)
drive_event = fn s, lat, lng ->
online_event(drive_state: %{timestamp: ts, shift_state: s, latitude: lat, longitude: lng})
end
events = [
{:ok, online_event()},
{:ok, drive_event.("D", 90, 45)},
{:ok, drive_event.("D", 90, 45.1)},
{:ok, drive_event.("D", 90, 45.2)},
{:ok, drive_event.("D", 90, 45.1)},
{:ok, drive_event.("P", 90, 45)}
]
:ok = start_vehicle(name, events)
assert_receive {:start_state, car, :online, date: _}
assert_receive {ApiMock, {:stream, 1000, _}}
assert_receive {:insert_position, ^car, %{longitude: 45}}
assert_receive {:pubsub,
{:broadcast, _, _,
%Summary{state: :online, geofence: %GeoFence{name: "South Pole"}}}}
assert_receive {:start_drive, ^car}
assert_receive {:insert_position, drive, %{longitude: 45}}
assert_receive {:pubsub,
{:broadcast, _, _,
%Summary{state: :driving, geofence: %GeoFence{name: "South Pole"}}}}
assert_receive {:insert_position, ^drive, %{longitude: 45.1}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :driving, geofence: nil}}}
assert_receive {:insert_position, ^drive, %{longitude: 45.2}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :driving, geofence: nil}}}
assert_receive {:insert_position, ^drive, %{longitude: 45.1}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :driving, geofence: nil}}}
assert_receive {:insert_position, ^drive, %{longitude: 45}}
assert_receive {:close_drive, ^drive, lookup_address: true}
assert_receive {:start_state, ^car, :online, date: _}
assert_receive {:insert_position, ^car, %{longitude: 45}}
assert_receive {:pubsub,
{:broadcast, _, _,
%Summary{state: :online, geofence: %GeoFence{name: "South Pole"}}}}
refute_receive _
end
end
end
| 38.166344 | 98 | 0.61342 |
9ea985395d329c4374bce587cadbba8afd5d77d2 | 2,815 | ex | Elixir | clients/big_query/lib/google_api/big_query/v2/model/training_run.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/big_query/lib/google_api/big_query/v2/model/training_run.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/big_query/lib/google_api/big_query/v2/model/training_run.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.BigQuery.V2.Model.TrainingRun do
@moduledoc """
Information about a single training query run for the model.
## Attributes
* `dataSplitResult` (*type:* `GoogleApi.BigQuery.V2.Model.DataSplitResult.t`, *default:* `nil`) - Data split result of the training run. Only set when the input data is actually split.
* `evaluationMetrics` (*type:* `GoogleApi.BigQuery.V2.Model.EvaluationMetrics.t`, *default:* `nil`) - The evaluation metrics over training/eval data that were computed at the end of training.
* `results` (*type:* `list(GoogleApi.BigQuery.V2.Model.IterationResult.t)`, *default:* `nil`) - Output of each iteration run, results.size() <= max_iterations.
* `startTime` (*type:* `DateTime.t`, *default:* `nil`) - The start time of this training run.
* `trainingOptions` (*type:* `GoogleApi.BigQuery.V2.Model.TrainingOptions.t`, *default:* `nil`) - Options that were used for this training run, includes user specified and default options that were used.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:dataSplitResult => GoogleApi.BigQuery.V2.Model.DataSplitResult.t(),
:evaluationMetrics => GoogleApi.BigQuery.V2.Model.EvaluationMetrics.t(),
:results => list(GoogleApi.BigQuery.V2.Model.IterationResult.t()),
:startTime => DateTime.t(),
:trainingOptions => GoogleApi.BigQuery.V2.Model.TrainingOptions.t()
}
field(:dataSplitResult, as: GoogleApi.BigQuery.V2.Model.DataSplitResult)
field(:evaluationMetrics, as: GoogleApi.BigQuery.V2.Model.EvaluationMetrics)
field(:results, as: GoogleApi.BigQuery.V2.Model.IterationResult, type: :list)
field(:startTime, as: DateTime)
field(:trainingOptions, as: GoogleApi.BigQuery.V2.Model.TrainingOptions)
end
defimpl Poison.Decoder, for: GoogleApi.BigQuery.V2.Model.TrainingRun do
def decode(value, options) do
GoogleApi.BigQuery.V2.Model.TrainingRun.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.BigQuery.V2.Model.TrainingRun do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 47.711864 | 207 | 0.736767 |
9ea9918578d5fbb45d539a92908a549b26662076 | 351 | exs | Elixir | priv/repo/seeds.exs | OrigamiApp/server | efbf185a33694b47fc94376c8ddc4b30f8e3d620 | [
"Apache-2.0"
] | null | null | null | priv/repo/seeds.exs | OrigamiApp/server | efbf185a33694b47fc94376c8ddc4b30f8e3d620 | [
"Apache-2.0"
] | null | null | null | priv/repo/seeds.exs | OrigamiApp/server | efbf185a33694b47fc94376c8ddc4b30f8e3d620 | [
"Apache-2.0"
] | null | null | null | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# Origami.Repo.insert!(%Origami.SomeSchema{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 29.25 | 61 | 0.706553 |
9ea9c3e7ab8cca6f7f3a1ad12a4ddc8d97cff073 | 840 | ex | Elixir | Microsoft.Azure.Management.Containers/lib/microsoft/azure/management/containers/model/webhook_properties_update_parameters.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | 4 | 2018-09-29T03:43:15.000Z | 2021-04-01T18:30:46.000Z | Microsoft.Azure.Management.Containers/lib/microsoft/azure/management/containers/model/webhook_properties_update_parameters.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | Microsoft.Azure.Management.Containers/lib/microsoft/azure/management/containers/model/webhook_properties_update_parameters.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule Microsoft.Azure.Management.Containers.Model.WebhookPropertiesUpdateParameters do
@moduledoc """
The parameters for updating the properties of a webhook.
"""
@derive [Poison.Encoder]
defstruct [
:"serviceUri",
:"customHeaders",
:"status",
:"scope",
:"actions"
]
@type t :: %__MODULE__{
:"serviceUri" => String.t,
:"customHeaders" => %{optional(String.t) => String.t},
:"status" => String.t,
:"scope" => String.t,
:"actions" => [String.t]
}
end
defimpl Poison.Decoder, for: Microsoft.Azure.Management.Containers.Model.WebhookPropertiesUpdateParameters do
def decode(value, _options) do
value
end
end
| 24.705882 | 109 | 0.684524 |
9eaa1e34d9eb33ac2d6e83bb04ddc63851a8aeb8 | 1,122 | exs | Elixir | lib/makeup/lexers/elixir_lexer/variables.ex.exs | davydog187/makeup_elixir | 462b0edd8b7ec878202587c6506ae1fe029aaca9 | [
"BSD-2-Clause"
] | null | null | null | lib/makeup/lexers/elixir_lexer/variables.ex.exs | davydog187/makeup_elixir | 462b0edd8b7ec878202587c6506ae1fe029aaca9 | [
"BSD-2-Clause"
] | null | null | null | lib/makeup/lexers/elixir_lexer/variables.ex.exs | davydog187/makeup_elixir | 462b0edd8b7ec878202587c6506ae1fe029aaca9 | [
"BSD-2-Clause"
] | null | null | null | defmodule Makeup.Lexers.ElixirLexer.Variables do
@moduledoc false
# parsec:Makeup.Lexers.ElixirLexer.Variables
# This module is generated at "dev time" so that the lexer
# doesn't have to depend on the (excelent) `unicode_set` library,
# which takes several minutes to compile.
import NimbleParsec
variable_start_unicode_syntax =
"[[:L:][:Nl:][:Other_ID_Start:]-[:Pattern_Syntax:]-[:Pattern_White_Space:]-[:Lu:]-[:Lt:][_]]"
variable_continue_unicode_syntax =
"[[:ID_Start:][:Mn:][:Mc:][:Nd:][:Pc:][:Other_ID_Continue:]-[:Pattern_Syntax:]-[:Pattern_White_Space:]]"
# TODO: Why do we need to flatten these lists? A bug in `unicode_set`?
variable_start_chars = Unicode.Set.to_utf8_char(variable_start_unicode_syntax) |> List.flatten()
variable_continue_chars = Unicode.Set.to_utf8_char(variable_continue_unicode_syntax) |> List.flatten()
defcombinator :variable_start_chars, label(utf8_char(variable_start_chars), "variable start")
defcombinator :variable_continue_chars, label(utf8_char(variable_continue_chars), "variable continue")
# parsec:Makeup.Lexers.ElixirLexer.Variables
end
| 46.75 | 108 | 0.759358 |
9eaa289537fde3266abefe64c9700d9754a761ea | 1,972 | ex | Elixir | apps/rent_bot/lib/rent_bot/subscribers/subscribers.ex | Shiva-1927/Chatbot | cfc4adcb70a5481ecbf65b9dc932029bbdcf5eae | [
"MIT"
] | 94 | 2018-02-19T11:53:45.000Z | 2021-11-15T12:20:24.000Z | apps/rent_bot/lib/rent_bot/subscribers/subscribers.ex | Shiva-1927/Chatbot | cfc4adcb70a5481ecbf65b9dc932029bbdcf5eae | [
"MIT"
] | 2 | 2021-03-10T03:48:31.000Z | 2021-05-10T23:32:27.000Z | apps/rent_bot/lib/rent_bot/subscribers/subscribers.ex | dih78/rent-bot | 35ee3f32ca227979922aee37678e98e6e46491c8 | [
"MIT"
] | 28 | 2018-02-19T11:53:48.000Z | 2021-01-02T21:55:36.000Z | defmodule RentBot.Subscribers do
@moduledoc """
The Subscribers context.
"""
import Ecto.Query, warn: false
alias RentBot.Repo
alias RentBot.Subscribers.Subscriber
@doc """
Returns the list of subscribers.
## Examples
iex> list_subscribers()
[%Subscriber{}, ...]
"""
def list_subscribers do
Repo.all(Subscriber)
end
@doc """
Gets a single subscriber.
Raises `Ecto.NoResultsError` if the Subscriber does not exist.
## Examples
iex> get_subscriber!(123)
%Subscriber{}
iex> get_subscriber!(456)
** (Ecto.NoResultsError)
"""
def get_subscriber!(id), do: Repo.get!(Subscriber, id)
@doc """
Creates a subscriber.
## Examples
iex> create_subscriber(%{field: value})
{:ok, %Subscriber{}}
iex> create_subscriber(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_subscriber(attrs \\ %{}) do
%Subscriber{}
|> Subscriber.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a subscriber.
## Examples
iex> update_subscriber(subscriber, %{field: new_value})
{:ok, %Subscriber{}}
iex> update_subscriber(subscriber, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_subscriber(%Subscriber{} = subscriber, attrs) do
subscriber
|> Subscriber.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Subscriber.
## Examples
iex> delete_subscriber(subscriber)
{:ok, %Subscriber{}}
iex> delete_subscriber(subscriber)
{:error, %Ecto.Changeset{}}
"""
def delete_subscriber(%Subscriber{} = subscriber) do
Repo.delete(subscriber)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking subscriber changes.
## Examples
iex> change_subscriber(subscriber)
%Ecto.Changeset{source: %Subscriber{}}
"""
def change_subscriber(%Subscriber{} = subscriber) do
Subscriber.changeset(subscriber, %{})
end
end
| 18.780952 | 65 | 0.633874 |
9eaa318c16325cbd83029ff4accf53c836e22ba5 | 816 | exs | Elixir | config/prod.secret.exs | joaohf/epad | ff3e37c3fc3b13363f5fb728c6d4a0a3d0ef3fce | [
"MIT"
] | 2 | 2020-09-07T19:45:28.000Z | 2021-05-04T05:31:41.000Z | config/prod.secret.exs | joaohf/epad | ff3e37c3fc3b13363f5fb728c6d4a0a3d0ef3fce | [
"MIT"
] | null | null | null | config/prod.secret.exs | joaohf/epad | ff3e37c3fc3b13363f5fb728c6d4a0a3d0ef3fce | [
"MIT"
] | null | null | null | # In this file, we load production configuration and secrets
# from environment variables. You can also hardcode secrets,
# although such is generally not recommended and you have to
# remember to add this file to your .gitignore.
use Mix.Config
secret_key_base = "l/e8Q8Ev6H8Q5WzQR0jU+amNX85j6EYdWYiNvH9JryYaUaa1i2VxG217Qq8RZNaU"
config :epad, EpadWeb.Endpoint,
http: [
port: String.to_integer(System.get_env("PORT") || "4000"),
transport_options: [socket_opts: [:inet6]]
],
secret_key_base: secret_key_base
# ## Using releases (Elixir v1.9+)
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start each relevant endpoint:
#
config :epad, EpadWeb.Endpoint, server: true
#
# Then you can assemble a release by calling `mix release`.
# See `mix help release` for more information.
| 32.64 | 84 | 0.756127 |
9eaa846343768da4d013c6129c409542294afac1 | 800 | ex | Elixir | lib/projare/endpoint.ex | tuvistavie/projare | e776b2d326fed97e0dbf62530674fe688ff73ab8 | [
"MIT"
] | 3 | 2016-03-06T12:23:01.000Z | 2017-03-21T18:22:07.000Z | lib/projare/endpoint.ex | tuvistavie/projare | e776b2d326fed97e0dbf62530674fe688ff73ab8 | [
"MIT"
] | null | null | null | lib/projare/endpoint.ex | tuvistavie/projare | e776b2d326fed97e0dbf62530674fe688ff73ab8 | [
"MIT"
] | null | null | null | defmodule Projare.Endpoint do
use Phoenix.Endpoint, otp_app: :projare
plug Plug.Static,
at: "/", from: :projare, gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt components)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Logger
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Poison
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session,
store: :cookie,
key: "_projare_sprint_key",
signing_salt: "3f73Owud"
plug Projare.Router
end
| 23.529412 | 69 | 0.71125 |
9eaa929e18d9ecb714e3440c2b3a42fd5599d720 | 1,018 | exs | Elixir | mix.exs | HeyHomie/keycloak_api | ba873e093eb2e68410abf4b7228ef30a7191addc | [
"MIT"
] | null | null | null | mix.exs | HeyHomie/keycloak_api | ba873e093eb2e68410abf4b7228ef30a7191addc | [
"MIT"
] | null | null | null | mix.exs | HeyHomie/keycloak_api | ba873e093eb2e68410abf4b7228ef30a7191addc | [
"MIT"
] | null | null | null | defmodule KeycloakAPI.MixProject do
use Mix.Project
def project do
[
app: :keycloak_api,
version: "0.1.0",
elixir: "~> 1.12",
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps(),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.html": :test,
"coveralls.xml": :test
],
elixirc_options: [warnings_as_errors: true]
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:hackney, "~> 1.17"},
{:jason, ">= 1.0.0"},
{:bypass, "~> 2.1", only: :test},
{:credo, "~> 1.5", only: [:dev, :test], runtime: false},
{:excoveralls, "~> 0.10", only: :test}
]
end
defp aliases do
[
test: ["test --warnings-as-errors"]
]
end
end
| 21.659574 | 62 | 0.545187 |
9eaa9f980a512cc8b559c6a1757ccf7d89673191 | 2,237 | ex | Elixir | clients/content/lib/google_api/content/v2/model/account_address.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/account_address.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/content/lib/google_api/content/v2/model/account_address.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V2.Model.AccountAddress do
@moduledoc """
## Attributes
* `country` (*type:* `String.t`, *default:* `nil`) - CLDR country code (e.g. "US"). This value cannot be set for a sub-account of an MCA. All MCA sub-accounts inherit the country of their parent MCA.
* `locality` (*type:* `String.t`, *default:* `nil`) - City, town or commune. May also include dependent localities or sublocalities (e.g. neighborhoods or suburbs).
* `postalCode` (*type:* `String.t`, *default:* `nil`) - Postal code or ZIP (e.g. "94043").
* `region` (*type:* `String.t`, *default:* `nil`) - Top-level administrative subdivision of the country. For example, a state like California ("CA") or a province like Quebec ("QC").
* `streetAddress` (*type:* `String.t`, *default:* `nil`) - Street-level part of the address.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:country => String.t(),
:locality => String.t(),
:postalCode => String.t(),
:region => String.t(),
:streetAddress => String.t()
}
field(:country)
field(:locality)
field(:postalCode)
field(:region)
field(:streetAddress)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.AccountAddress do
def decode(value, options) do
GoogleApi.Content.V2.Model.AccountAddress.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.AccountAddress do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.915254 | 203 | 0.693339 |
9eaaf10cd11d26a6b7db237b8dad133682dd699f | 682 | exs | Elixir | config/config.exs | bitwalker/aws-dist-test | 94d87e82b617da02d541f7b2744d20747d8ef21f | [
"Apache-2.0"
] | 4 | 2019-03-13T16:38:32.000Z | 2020-01-11T20:05:25.000Z | config/config.exs | bitwalker/aws-dist-test | 94d87e82b617da02d541f7b2744d20747d8ef21f | [
"Apache-2.0"
] | 1 | 2019-03-14T17:41:55.000Z | 2019-03-14T17:41:55.000Z | config/config.exs | bitwalker/aws-dist-test | 94d87e82b617da02d541f7b2744d20747d8ef21f | [
"Apache-2.0"
] | 1 | 2019-03-14T14:53:29.000Z | 2019-03-14T14:53:29.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# By default, the umbrella project as well as each child
# application will require this configuration file, ensuring
# they all use the same configuration. While one could
# configure all applications here, we prefer to delegate
# back to each application for organization purposes.
#import_config "../apps/*/config/config.exs"
# Sample configuration (overrides the imported configuration above):
#
# config :logger, :console,
# level: :info,
# format: "$date $time [$level] $metadata$message\n",
# metadata: [:user_id]
| 37.888889 | 68 | 0.740469 |
9eab0dd41eb5b556b5da9e9971df38aea423db90 | 950 | ex | Elixir | lib/game/format/bugs.ex | jgsmith/ex_venture | 546adaa8fe80d45a72fde6de8d8d6906902c12d4 | [
"MIT"
] | 2 | 2019-05-14T11:36:44.000Z | 2020-07-01T08:54:04.000Z | lib/game/format/bugs.ex | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | null | null | null | lib/game/format/bugs.ex | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | 1 | 2021-01-29T14:12:40.000Z | 2021-01-29T14:12:40.000Z | defmodule Game.Format.Bugs do
@moduledoc """
Format functions for bugs
"""
import Game.Format.Context
alias Game.Format
alias Game.Format.Table
@doc """
Format a list of bugs
"""
@spec list_bugs([Bug.t()]) :: String.t()
def list_bugs(bugs) do
rows =
bugs
|> Enum.map(fn bug ->
[to_string(bug.id), bug.title, to_string(bug.is_completed)]
end)
rows = [["ID", "Title", "Is Fixed?"] | rows]
Table.format("Bugs", rows, [10, 30, 10])
end
@doc """
Format a list of bugs
"""
@spec show_bug(Bug.t()) :: String.t()
def show_bug(bug) do
context()
|> assign(:title, bug.title)
|> assign(:underline, Format.underline(bug.title))
|> assign(:is_completed, bug.is_completed)
|> assign(:body, bug.body)
|> Format.template(template("show"))
end
def template("show") do
"""
[title]
[underline]
Fixed: [is_completed]
[body]
"""
end
end
| 19 | 67 | 0.584211 |
9eab1e3576e39255a6c0edbffc82479a5d59b7e5 | 3,230 | ex | Elixir | clients/storage_transfer/lib/google_api/storage_transfer/v1/model/http_data.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/storage_transfer/lib/google_api/storage_transfer/v1/model/http_data.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/storage_transfer/lib/google_api/storage_transfer/v1/model/http_data.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.StorageTransfer.V1.Model.HttpData do
@moduledoc """
An HttpData resource specifies a list of objects on the web to be transferred over HTTP. The information of the objects to be transferred is contained in a file referenced by a URL. The first line in the file must be `"TsvHttpData-1.0"`, which specifies the format of the file. Subsequent lines specify the information of the list of objects, one object per list entry. Each entry has the following tab-delimited fields: * **HTTP URL** — The location of the object. * **Length** — The size of the object in bytes. * **MD5** — The base64-encoded MD5 hash of the object. For an example of a valid TSV file, see [Transferring data from URLs](https://cloud.google.com/storage-transfer/docs/create-url-list). When transferring data based on a URL list, keep the following in mind: * When an object located at `http(s)://hostname:port/` is transferred to a data sink, the name of the object at the data sink is `/`. * If the specified size of an object does not match the actual size of the object fetched, the object will not be transferred. * If the specified MD5 does not match the MD5 computed from the transferred bytes, the object transfer will fail. For more information, see [Generating MD5 hashes](https://cloud.google.com/storage-transfer/docs/create-url-list#md5) * Ensure that each URL you specify is publicly accessible. For example, in Cloud Storage you can [share an object publicly] (https://cloud.google.com/storage/docs/cloud-console#_sharingdata) and get a link to it. * Storage Transfer Service obeys `robots.txt` rules and requires the source HTTP server to support `Range` requests and to return a `Content-Length` header in each response. * ObjectConditions have no effect when filtering objects to transfer.
## Attributes
* `listUrl` (*type:* `String.t`, *default:* `nil`) - Required. The URL that points to the file that stores the object list entries. This file must allow public access. Currently, only URLs with HTTP and HTTPS schemes are supported.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:listUrl => String.t()
}
field(:listUrl)
end
defimpl Poison.Decoder, for: GoogleApi.StorageTransfer.V1.Model.HttpData do
def decode(value, options) do
GoogleApi.StorageTransfer.V1.Model.HttpData.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.StorageTransfer.V1.Model.HttpData do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 68.723404 | 1,727 | 0.758514 |
9eab25d07def99a4b2ce202076631110d29335d1 | 287 | ex | Elixir | test/support/factories/okr_reflection.ex | sb8244/okr_app_pub | 933872107bd13390a0a5ea119d7997d4cb5ea7db | [
"MIT"
] | 12 | 2019-05-10T21:48:06.000Z | 2021-11-07T14:04:30.000Z | test/support/factories/okr_reflection.ex | sb8244/okr_app_pub | 933872107bd13390a0a5ea119d7997d4cb5ea7db | [
"MIT"
] | 2 | 2019-05-14T19:07:10.000Z | 2019-05-20T21:06:27.000Z | test/support/factories/okr_reflection.ex | sb8244/okr_app_pub | 933872107bd13390a0a5ea119d7997d4cb5ea7db | [
"MIT"
] | 3 | 2019-05-19T18:24:20.000Z | 2019-10-31T20:29:12.000Z | defmodule Test.Factories.OkrReflection do
def create!(override_params = %{"okr_id" => _}) do
params =
%{
"reflection" => "How I did"
}
|> Map.merge(override_params)
{:ok, struct} = OkrApp.Objectives.create_okr_reflection(params)
struct
end
end
| 22.076923 | 67 | 0.623693 |
9eab2ab79ae010bffb27f95749ac97fbd2bd2cd4 | 1,059 | ex | Elixir | test/support/conn_case.ex | nicohartto/headland-back | 413febe835dafc15b4dae731998ff42aa755496b | [
"MIT"
] | null | null | null | test/support/conn_case.ex | nicohartto/headland-back | 413febe835dafc15b4dae731998ff42aa755496b | [
"MIT"
] | null | null | null | test/support/conn_case.ex | nicohartto/headland-back | 413febe835dafc15b4dae731998ff42aa755496b | [
"MIT"
] | null | null | null | defmodule Headland.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
imports other functionality to make it easier
to build and query models.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
alias Headland.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query, only: [from: 1, from: 2]
import Headland.Router.Helpers
# The default endpoint for testing
@endpoint Headland.Endpoint
end
end
setup tags do
unless tags[:async] do
Ecto.Adapters.SQL.restart_test_transaction(Headland.Repo, [])
end
{:ok, conn: Phoenix.ConnTest.conn()}
end
end
| 24.627907 | 67 | 0.704438 |
9eab4bb5791d98cef278eb936d93a8226b29f0f2 | 1,642 | ex | Elixir | lib/ex_oauth2_provider/access_grants/access_grants.ex | aidanranney/ex_oauth2_provider | b6ab9c37d39430cd220b2be9794502111c05585c | [
"MIT"
] | 161 | 2017-02-27T19:29:17.000Z | 2021-12-22T12:29:14.000Z | lib/ex_oauth2_provider/access_grants/access_grants.ex | lawalalao/ex_oauth2_provider | 093cc3b289a45e6aeac6205d40b632c0a0af8b3c | [
"MIT"
] | 54 | 2017-03-23T21:05:03.000Z | 2022-02-22T00:16:59.000Z | lib/ex_oauth2_provider/access_grants/access_grants.ex | lawalalao/ex_oauth2_provider | 093cc3b289a45e6aeac6205d40b632c0a0af8b3c | [
"MIT"
] | 49 | 2017-02-26T22:38:34.000Z | 2022-03-07T19:17:03.000Z | defmodule ExOauth2Provider.AccessGrants do
@moduledoc """
The boundary for the OauthAccessGrants system.
"""
alias ExOauth2Provider.Mixin.{Expirable, Revocable}
alias ExOauth2Provider.{Applications.Application, AccessGrants.AccessGrant, Config}
defdelegate revoke!(data, config \\ []), to: Revocable
defdelegate revoke(data, config \\ []), to: Revocable
@doc """
Gets a single access grant registered with an application.
## Examples
iex> get_active_grant_for(application, "jE9dk", otp_app: :my_app)
%OauthAccessGrant{}
iex> get_active_grant_for(application, "jE9dk", otp_app: :my_app)
** nil
"""
@spec get_active_grant_for(Application.t(), binary(), keyword()) :: AccessGrant.t() | nil
def get_active_grant_for(application, token, config \\ []) do
config
|> Config.access_grant()
|> Config.repo(config).get_by(application_id: application.id, token: token)
|> Expirable.filter_expired()
|> Revocable.filter_revoked()
end
@doc """
Creates an access grant.
## Examples
iex> create_grant(resource_owner, application, attrs)
{:ok, %OauthAccessGrant{}}
iex> create_grant(resource_owner, application, attrs)
{:error, %Ecto.Changeset{}}
"""
@spec create_grant(Ecto.Schema.t(), Application.t(), map(), keyword()) :: {:ok, AccessGrant.t()} | {:error, term()}
def create_grant(resource_owner, application, attrs, config \\ []) do
config
|> Config.access_grant()
|> struct(resource_owner: resource_owner, application: application)
|> AccessGrant.changeset(attrs, config)
|> Config.repo(config).insert()
end
end
| 30.407407 | 117 | 0.686967 |
9eab56ce840c1d3d9c31c0486b3d6b640f964c54 | 15,076 | exs | Elixir | lib/elixir/test/elixir/kernel/quote_test.exs | goalves/elixir | 75726d5611413ee45cb5235b1698944b72efa244 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/kernel/quote_test.exs | goalves/elixir | 75726d5611413ee45cb5235b1698944b72efa244 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/kernel/quote_test.exs | goalves/elixir | 75726d5611413ee45cb5235b1698944b72efa244 | [
"Apache-2.0"
] | null | null | null | Code.require_file("../test_helper.exs", __DIR__)
defmodule Kernel.QuoteTest do
use ExUnit.Case, async: true
@some_fun &List.flatten/1
test "fun" do
assert is_function(@some_fun)
end
test "list" do
assert quote(do: [1, 2, 3]) == [1, 2, 3]
end
test "tuple" do
assert quote(do: {:a, 1}) == {:a, 1}
end
test "keep line" do
line = __ENV__.line + 2
assert quote(location: :keep, do: bar(1, 2, 3)) ==
{:bar, [keep: {Path.relative_to_cwd(__ENV__.file), line}], [1, 2, 3]}
end
test "fixed line" do
assert quote(line: 3, do: bar(1, 2, 3)) == {:bar, [line: 3], [1, 2, 3]}
assert quote(line: false, do: bar(1, 2, 3)) == {:bar, [], [1, 2, 3]}
assert quote(line: true, do: bar(1, 2, 3)) == {:bar, [line: __ENV__.line], [1, 2, 3]}
end
test "quote line var" do
line = __ENV__.line
assert quote(line: line, do: bar(1, 2, 3)) == {:bar, [line: line], [1, 2, 3]}
assert_raise ArgumentError, fn ->
line = "oops"
quote(line: line, do: bar(1, 2, 3))
end
assert_raise ArgumentError, fn ->
line = true
quote(line: line, do: bar(1, 2, 3))
end
end
test "quote context var" do
context = :dynamic
assert quote(context: context, do: bar) == {:bar, [], :dynamic}
assert_raise ArgumentError, fn ->
context = "oops"
quote(context: context, do: bar)
end
assert_raise ArgumentError, fn ->
context = nil
quote(context: context, do: bar)
end
end
test "operator precedence" do
assert {:+, _, [{:+, _, [1, _]}, 1]} = quote(do: 1 + Foo.l() + 1)
assert {:+, _, [1, {_, _, [{:+, _, [1]}]}]} = quote(do: 1 + Foo.l(+1))
end
test "generated" do
assert quote(generated: true, do: bar(1)) == {:bar, [generated: true], [1]}
end
test "unquote call" do
assert quote(do: foo(bar)[unquote(:baz)]) == quote(do: foo(bar)[:baz])
assert quote(do: unquote(:bar)()) == quote(do: bar())
assert (quote do
unquote(:bar)(1) do
2 + 3
end
end) ==
(quote do
bar 1 do
2 + 3
end
end)
assert quote(do: foo.unquote(:bar)) == quote(do: foo.bar)
assert quote(do: foo.unquote(:bar)()) == quote(do: foo.bar())
assert quote(do: foo.unquote(:bar)(1)) == quote(do: foo.bar(1))
assert (quote do
foo.unquote(:bar)(1) do
2 + 3
end
end) ==
(quote do
foo.bar 1 do
2 + 3
end
end)
assert quote(do: foo.unquote({:bar, [], nil})) == quote(do: foo.bar)
assert quote(do: foo.unquote({:bar, [], nil})()) == quote(do: foo.bar())
assert quote(do: foo.unquote({:bar, [], [1, 2]})) == quote(do: foo.bar(1, 2))
assert Code.eval_quoted(quote(do: Foo.unquote(Bar))) == {Elixir.Foo.Bar, []}
assert Code.eval_quoted(quote(do: Foo.unquote(quote(do: Bar)))) == {Elixir.Foo.Bar, []}
assert_raise ArgumentError, fn ->
quote(do: foo.unquote(1))
end
end
test "nested quote" do
assert {:quote, _, [[do: {:unquote, _, _}]]} = quote(do: quote(do: unquote(x)))
end
defmacrop nested_quote_in_macro do
x = 1
quote do
x = unquote(x)
quote do
unquote(x)
end
end
end
test "nested quote in macro" do
assert nested_quote_in_macro() == 1
end
defmodule Dyn do
for {k, v} <- [foo: 1, bar: 2, baz: 3] do
# Local call unquote
def unquote(k)(), do: unquote(v)
# Remote call unquote
def unquote(k)(arg), do: __MODULE__.unquote(k)() + arg
end
end
test "dynamic definition with unquote" do
assert Dyn.foo() == 1
assert Dyn.bar() == 2
assert Dyn.baz() == 3
assert Dyn.foo(1) == 2
assert Dyn.bar(2) == 4
assert Dyn.baz(3) == 6
end
test "splice on root" do
contents = [1, 2, 3]
assert quote(do: (unquote_splicing(contents))) ==
(quote do
1
2
3
end)
end
test "splice with tail" do
contents = [1, 2, 3]
assert quote(do: [unquote_splicing(contents) | [1, 2, 3]]) == [1, 2, 3, 1, 2, 3]
assert quote(do: [unquote_splicing(contents) | val]) == quote(do: [1, 2, 3 | val])
assert quote(do: [unquote_splicing(contents) | unquote([4])]) == quote(do: [1, 2, 3, 4])
end
test "splice on stab" do
{fun, []} = Code.eval_quoted(quote(do: fn unquote_splicing([1, 2, 3]) -> :ok end), [])
assert fun.(1, 2, 3) == :ok
{fun, []} = Code.eval_quoted(quote(do: fn 1, unquote_splicing([2, 3]) -> :ok end), [])
assert fun.(1, 2, 3) == :ok
end
test "splice on definition" do
defmodule Hello do
def world([unquote_splicing(["foo", "bar"]) | rest]) do
rest
end
end
assert Hello.world(["foo", "bar", "baz"]) == ["baz"]
end
test "splice on map" do
assert %{unquote_splicing(foo: :bar)} == %{foo: :bar}
assert %{unquote_splicing(foo: :bar), baz: :bat} == %{foo: :bar, baz: :bat}
assert %{unquote_splicing(foo: :bar), :baz => :bat} == %{foo: :bar, baz: :bat}
assert %{:baz => :bat, unquote_splicing(foo: :bar)} == %{foo: :bar, baz: :bat}
map = %{foo: :default}
assert %{map | unquote_splicing(foo: :bar)} == %{foo: :bar}
end
test "when" do
assert [{:->, _, [[{:when, _, [1, 2, 3, 4]}], 5]}] = quote(do: (1, 2, 3 when 4 -> 5))
assert [{:->, _, [[{:when, _, [1, 2, 3, 4]}], 5]}] = quote(do: (1, 2, 3 when 4 -> 5))
assert [{:->, _, [[{:when, _, [1, 2, 3, {:when, _, [4, 5]}]}], 6]}] =
quote(do: (1, 2, 3 when 4 when 5 -> 6))
end
test "stab" do
assert [{:->, _, [[], 1]}] =
(quote do
() -> 1
end)
assert [{:->, _, [[], 1]}] = quote(do: (() -> 1))
end
test "empty block" do
# Since ; is allowed by itself, it must also be allowed inside ()
# The exception to this rule is an empty (). While empty expressions
# are allowed, an empty () is ambiguous. We also can't use quote here,
# since the formatter will rewrite (;) to something else.
assert {:ok, {:__block__, [line: 1], []}} = Code.string_to_quoted("(;)")
end
test "bind quoted" do
args = [
{:=, [], [{:foo, [line: __ENV__.line + 4], Kernel.QuoteTest}, 3]},
{:foo, [], Kernel.QuoteTest}
]
quoted = quote(bind_quoted: [foo: 1 + 2], do: foo)
assert quoted == {:__block__, [], args}
end
test "literals" do
assert quote(do: []) == []
assert quote(do: nil) == nil
assert (quote do
[]
end) == []
assert (quote do
nil
end) == nil
end
defmacrop dynamic_opts do
[line: 3]
end
test "with dynamic opts" do
assert quote(dynamic_opts(), do: bar(1, 2, 3)) == {:bar, [line: 3], [1, 2, 3]}
end
test "unary with integer precedence" do
assert quote(do: +1.foo) == quote(do: +1.foo)
assert quote(do: (@1).foo) == quote(do: (@1).foo)
assert quote(do: &1.foo) == quote(do: &1.foo)
end
test "pipe precedence" do
assert {:|>, _, [{:|>, _, [{:foo, _, _}, {:bar, _, _}]}, {:baz, _, _}]} =
quote(do: foo |> bar |> baz)
assert {:|>, _, [{:|>, _, [{:foo, _, _}, {:bar, _, _}]}, {:baz, _, _}]} =
(quote do
foo do
end
|> bar
|> baz
end)
assert {:|>, _, [{:|>, _, [{:foo, _, _}, {:bar, _, _}]}, {:baz, _, _}]} =
(quote do
foo
|> bar do
end
|> baz
end)
assert {:|>, _, [{:|>, _, [{:foo, _, _}, {:bar, _, _}]}, {:baz, _, _}]} =
(quote do
foo
|> bar
|> baz do
end
end)
assert {:|>, _, [{:|>, _, [{:foo, _, _}, {:bar, _, _}]}, {:baz, _, _}]} =
(quote do
foo do
end
|> bar
|> baz do
end
end)
assert {:|>, _, [{:|>, _, [{:foo, _, _}, {:bar, _, _}]}, {:baz, _, _}]} =
(quote do
foo do
end
|> bar do
end
|> baz do
end
end)
end
end
# DO NOT MOVE THIS LINE
defmodule Kernel.QuoteTest.Errors do
def line, do: __ENV__.line + 4
defmacro defraise do
quote location: :keep do
def will_raise(_a, _b), do: raise("oops")
end
end
defmacro will_raise do
quote(location: :keep, do: raise("oops"))
end
end
defmodule Kernel.QuoteTest.ErrorsTest do
use ExUnit.Case, async: true
import Kernel.QuoteTest.Errors
# Defines the add function
defraise()
@line line()
test "inside function error" do
try do
will_raise(:a, :b)
rescue
RuntimeError ->
mod = Kernel.QuoteTest.ErrorsTest
file = __ENV__.file |> Path.relative_to_cwd() |> String.to_charlist()
assert [{^mod, :will_raise, 2, [file: ^file, line: @line]} | _] = __STACKTRACE__
else
_ -> flunk("expected failure")
end
end
@line __ENV__.line + 3
test "outside function error" do
try do
will_raise()
rescue
RuntimeError ->
mod = Kernel.QuoteTest.ErrorsTest
file = __ENV__.file |> Path.relative_to_cwd() |> String.to_charlist()
assert [{^mod, _, _, [file: ^file, line: @line]} | _] = __STACKTRACE__
else
_ -> flunk("expected failure")
end
end
end
defmodule Kernel.QuoteTest.VarHygiene do
defmacro no_interference do
quote(do: a = 1)
end
defmacro write_interference do
quote(do: var!(a) = 1)
end
defmacro read_interference do
quote(do: 10 = var!(a))
end
defmacro cross_module_interference do
quote(do: var!(a, Kernel.QuoteTest.VarHygieneTest) = 1)
end
end
defmodule Kernel.QuoteTest.VarHygieneTest do
use ExUnit.Case, async: true
import Kernel.QuoteTest.VarHygiene
defmacrop cross_module_no_interference do
quote(do: a = 10)
end
defmacrop read_cross_module do
quote(do: var!(a, __MODULE__))
end
defmacrop nested(var, do: block) do
quote do
var = unquote(var)
unquote(block)
var
end
end
defmacrop hat do
quote do
var = 1
^var = 1
var
end
end
test "no interference" do
a = 10
no_interference()
assert a == 10
end
test "cross module interference" do
cross_module_no_interference()
cross_module_interference()
assert read_cross_module() == 1
end
test "write interference" do
write_interference()
assert a == 1
end
test "read interference" do
a = 10
read_interference()
end
test "hat" do
assert hat() == 1
end
test "nested macro" do
assert (nested 1 do
nested 2 do
_ = :ok
end
end) == 1
end
test "nested quoted" do
defmodule NestedQuote do
defmacro __using__(_) do
quote unquote: false do
arg = quote(do: arg)
def test(arg) do
unquote(arg)
end
end
end
end
defmodule UseNestedQuote do
use NestedQuote
end
assert UseNestedQuote.test("foo") == "foo"
end
test "nested bind quoted" do
defmodule NestedBindQuoted do
defmacrop macro(arg) do
quote bind_quoted: [arg: arg] do
quote bind_quoted: [arg: arg], do: String.duplicate(arg, 2)
end
end
defmacro __using__(_) do
quote do
def test do
unquote(macro("foo"))
end
end
end
end
defmodule UseNestedBindQuoted do
use NestedBindQuoted
end
assert UseNestedBindQuoted.test() == "foofoo"
end
end
defmodule Kernel.QuoteTest.AliasHygiene do
alias Dict, as: SuperDict
defmacro dict do
quote(do: Dict.Bar)
end
defmacro super_dict do
quote(do: SuperDict.Bar)
end
end
defmodule Kernel.QuoteTest.AliasHygieneTest do
use ExUnit.Case, async: true
alias Dict, as: SuperDict
test "annotate aliases" do
assert {:__aliases__, [alias: false], [:Foo, :Bar]} = quote(do: Foo.Bar)
assert {:__aliases__, [alias: false], [:Dict, :Bar]} = quote(do: Dict.Bar)
assert {:__aliases__, [alias: Dict.Bar], [:SuperDict, :Bar]} = quote(do: SuperDict.Bar)
end
test "expand aliases" do
assert Code.eval_quoted(quote(do: SuperDict.Bar)) == {Elixir.Dict.Bar, []}
assert Code.eval_quoted(quote(do: alias!(SuperDict.Bar))) == {Elixir.SuperDict.Bar, []}
end
test "expand aliases without macro" do
alias HashDict, as: SuperDict
assert SuperDict.Bar == Elixir.HashDict.Bar
end
test "expand aliases with macro does not expand source alias" do
alias HashDict, as: Dict, warn: false
require Kernel.QuoteTest.AliasHygiene
assert Kernel.QuoteTest.AliasHygiene.dict() == Elixir.Dict.Bar
end
test "expand aliases with macro has higher preference" do
alias HashDict, as: SuperDict, warn: false
require Kernel.QuoteTest.AliasHygiene
assert Kernel.QuoteTest.AliasHygiene.super_dict() == Elixir.Dict.Bar
end
end
defmodule Kernel.QuoteTest.ImportsHygieneTest do
use ExUnit.Case, async: true
# We are redefining |> and using it inside the quote
# and only inside the quote. This code should still compile.
defmacro x |> f do
quote do
unquote(x) |> unquote(f)
end
end
defmacrop get_list_length do
quote do
length('hello')
end
end
defmacrop get_list_length_with_partial do
quote do
(&length(&1)).('hello')
end
end
defmacrop get_list_length_with_function do
quote do
(&length/1).('hello')
end
end
test "expand imports" do
import Kernel, except: [length: 1]
assert get_list_length() == 5
assert get_list_length_with_partial() == 5
assert get_list_length_with_function() == 5
end
defmacrop get_string_length do
import Kernel, except: [length: 1]
quote do
length("hello")
end
end
test "lazy expand imports" do
import Kernel, except: [length: 1]
import String, only: [length: 1]
assert get_string_length() == 5
end
test "lazy expand imports no conflicts" do
import Kernel, except: [length: 1]
import String, only: [length: 1]
assert get_list_length() == 5
assert get_list_length_with_partial() == 5
assert get_list_length_with_function() == 5
end
defmacrop with_length do
quote do
import Kernel, except: [length: 1]
import String, only: [length: 1]
length('hello')
end
end
test "explicitly overridden imports" do
assert with_length() == 5
end
defmodule BinaryUtils do
defmacro int32 do
quote do
integer - size(32)
end
end
end
test "checks the context also for variables to zero-arity functions" do
import BinaryUtils
{:int32, meta, __MODULE__} = quote(do: int32)
assert meta[:import] == BinaryUtils
end
end
| 24.276973 | 92 | 0.553197 |
9eab6ad70a6570970dad97474560c54bf2f35b4d | 7,121 | ex | Elixir | carnage/agents/reminder_agent.ex | JediLuke/flamelex | b38d1171b8f93375d8dc59f1710442860b6c8580 | [
"Apache-2.0"
] | 10 | 2021-03-02T20:05:13.000Z | 2022-03-14T21:10:39.000Z | carnage/agents/reminder_agent.ex | JediLuke/flamelex | b38d1171b8f93375d8dc59f1710442860b6c8580 | [
"Apache-2.0"
] | 2 | 2021-12-14T18:29:44.000Z | 2021-12-23T20:38:27.000Z | carnage/agents/reminder_agent.ex | JediLuke/flamelex | b38d1171b8f93375d8dc59f1710442860b6c8580 | [
"Apache-2.0"
] | 2 | 2021-12-05T20:41:26.000Z | 2021-12-26T01:46:42.000Z | defmodule Flamelex.Agent.Reminders do
@moduledoc """
This agent runs & checks for reminders.
"""
use GenServer
require Logger
alias Flamelex.Structs.TidBit
@default_reminder_time_in_minutes 15
def start_link([] = default_params) do
GenServer.start_link(__MODULE__, default_params)
end
# def snooze_reminder(reminder_uuid, time) do
# find_reminder(reminder_uuid)
# |> update_state()
# |> update_user_data()
# end
def pending_reminders() do
GenServer.call(__MODULE__, :pending_reminders)
end
# changes tag from "reminder" to "ackd_reminder"
def ack_reminder(r = %Memex.TidBit{}) do
GenServer.cast(__MODULE__, {:ack_reminder, r})
end
## GenServer callbacks
## -------------------------------------------------------------------
@impl true
def init(_params) do
IO.puts "Initializing #{__MODULE__}..."
Process.register(self(), __MODULE__)
{:ok, _initial_state = [], {:continue, :after_init}}
end
@impl true
def handle_continue(:after_init, state) do
send self(), :check_reminders
{:noreply, state}
end
@impl true
def handle_call(:pending_reminders, _from, state) do
{:reply, state, state}
end
@impl true
def handle_cast({:ack_reminder, %TidBit{uuid: ack_uuid} = r}, state) do
new_state = state |> Enum.reject(& &1.uuid == ack_uuid)
:ok = ack_reminder_in_user_data_file(r)
Logger.info "Reminder #{inspect r} has been acknowledged."
{:noreply, new_state}
end
@impl true
def handle_info(:check_reminders, state) do
# Logger.info("Checking reminders...")
state =
Utilities.Data.find(tags: "reminder") |> process_reminders(state)
Process.send_after(self(), :check_reminders, :timer.seconds(10))
{:noreply, state}
end
def handle_info({:reminder!, r}, state) do
Logger.warn "REMINDING YOU ABOUT! - #{inspect r}"
#TODO right now, schedule to remind me again (so I don't forget) - when it's acknowledged, this will stop
Process.send_after(self(), {:reminder!, r}, @default_reminder_time_in_minutes * (60 * 1000))
{:noreply, state}
end
defp process_reminders([], state), do: state
defp process_reminders([{_key, data} = r | rest], state) do
case reminder_already_pending?(r, state) do
true ->
# Logger.info "Reminder was already pending. #{inspect r}"
process_reminders(rest, state)
false ->
# we found a new reminder...
case data["remind_me_datetime"] do
nil ->
Logger.error "Found a reminder #{inspect r} that didn't have a reminder time."
state = set_up_reminder(r, state, @default_reminder_time_in_minutes)
process_reminders(rest, state)
_remind_me_datetime ->
Logger.info "Found a new reminder! Setting up a reminder... #{inspect r}"
state = set_up_reminder(r, state)
process_reminders(rest, state)
end
end
end
defp reminder_already_pending?(r, state) when is_list(state) do
state |> Enum.member?(r)
end
defp set_up_reminder({_key, _data} = r, state, time_in_minutes) do
Process.send_after(self(), {:reminder!, r}, time_in_minutes * (60 * 1000))
state ++ [r]
end
defp set_up_reminder({_key, data} = r, state) do
now_utc = DateTime.utc_now()
remind_me_datetime = data["remind_me_datetime"]
{:ok, remind_me_utc, 0} = remind_me_datetime |> DateTime.from_iso8601()
case DateTime.compare(remind_me_utc, now_utc) do
future when future in [:gt] ->
notify_delay_ms = DateTime.diff(remind_me_utc, DateTime.utc_now()) * 1000
Process.send_after(self(), {:reminder!, r}, notify_delay_ms)
state ++ [r]
past_or_present when past_or_present in [:lt, :eq] ->
Logger.warn "This reminder is in the past! #{inspect r}"
Process.send_after(self(), {:reminder!, r}, @default_reminder_time_in_minutes * (60 * 1000))
state ++ [r]
end
end
defp ack_reminder_in_user_data_file(r) do
ackd_reminder = r |> TidBit.ack_reminder()
Utilities.Data.replace_tidbit(r, ackd_reminder)
end
end
# this came out of GUI.Controller...
# @impl true
# def handle_info(:check_reminders, state) do
# # Logger.info("Checking reminders...")
# state =
# Utilities.Data.find(tags: "reminder") |> process_reminders(state)
# Process.send_after(self(), :check_reminders, :timer.seconds(10))
# {:noreply, state}
# end
# def handle_info({:reminder!, r}, state) do
# Logger.warn "REMINDING YOU ABOUT! - #{inspect r}"
# #TODO right now, schedule to remind me again (so I don't forget) - when it's acknowledged, this will stop
# Process.send_after(self(), {:reminder!, r}, @default_reminder_time_in_minutes * (60 * 1000))
# {:noreply, state}
# end
# defp process_reminders([], state), do: state
# defp process_reminders([{_key, data} = r | rest], state) do
# case reminder_already_pending?(r, state) do
# true ->
# # Logger.info "Reminder was already pending. #{inspect r}"
# process_reminders(rest, state)
# false ->
# # we found a new reminder...
# case data["remind_me_datetime"] do
# nil ->
# Logger.error "Found a reminder #{inspect r} that didn't have a reminder time."
# state = set_up_reminder(r, state, @default_reminder_time_in_minutes)
# process_reminders(rest, state)
# _remind_me_datetime ->
# Logger.info "Found a new reminder! Setting up a reminder... #{inspect r}"
# state = set_up_reminder(r, state)
# process_reminders(rest, state)
# end
# end
# end
# defp reminder_already_pending?(r, state) when is_list(state) do
# state |> Enum.member?(r)
# end
# defp set_up_reminder({_key, _data} = r, state, time_in_minutes) do
# Process.send_after(self(), {:reminder!, r}, time_in_minutes * (60 * 1000))
# state ++ [r]
# end
# defp set_up_reminder({_key, data} = r, state) do
# now_utc = DateTime.utc_now()
# remind_me_datetime = data["remind_me_datetime"]
# {:ok, remind_me_utc, 0} = remind_me_datetime |> DateTime.from_iso8601()
# case DateTime.compare(remind_me_utc, now_utc) do
# future when future in [:gt] ->
# notify_delay_ms = DateTime.diff(remind_me_utc, DateTime.utc_now()) * 1000
# Process.send_after(self(), {:reminder!, r}, notify_delay_ms)
# state ++ [r]
# past_or_present when past_or_present in [:lt, :eq] ->
# Logger.warn "This reminder is in the past! #{inspect r}"
# Process.send_after(self(), {:reminder!, r}, @default_reminder_time_in_minutes * (60 * 1000))
# state ++ [r]
# end
# end
# defp ack_reminder_in_user_data_file(r) do
# ackd_reminder = r |> TidBit.ack_reminder()
# Utilities.Data.replace_tidbit(r, ackd_reminder)
# end
# add a new buffer to the state's buffer_list
# defp add_buffer(state, buf, f) do
# buf_frame = {buf: buf, frame: f}
# state
# |> Map.update!(:buffers, fn buf_list -> buf_list ++ [buf_frame] end)
# end
| 32.967593 | 111 | 0.640781 |
9eab7e4adee71253b56529c1c7aad0b181ec626a | 990 | ex | Elixir | lib/snapshot.ex | BoringButGreat/grafana | 1d2381268e8b3abc54fd7ec046ac95d9f2b7e362 | [
"BSD-3-Clause"
] | 15 | 2016-05-05T01:30:27.000Z | 2021-02-19T12:50:00.000Z | lib/snapshot.ex | BoringButGreat/grafana | 1d2381268e8b3abc54fd7ec046ac95d9f2b7e362 | [
"BSD-3-Clause"
] | 3 | 2017-05-10T11:22:11.000Z | 2017-05-10T13:52:07.000Z | lib/snapshot.ex | BoringButGreat/grafana | 1d2381268e8b3abc54fd7ec046ac95d9f2b7e362 | [
"BSD-3-Clause"
] | 2 | 2016-10-25T13:18:38.000Z | 2017-05-10T11:23:43.000Z | defmodule Grafana.Snapshot do
use Grafana.API
@path "/api/snapshots"
@doc """
Create a new snapshot as specified by json.
iex> {:ok, result} = Grafana.Snapshot.new(%{"dashboard" => %{}, "expires" => 3600})
...> Map.keys(result)
["deleteKey","deleteUrl","key","url"]
"""
def new(json), do: api_post @path, json
@doc """
Get snapshot with given key.
iex> {:ok, result} = Grafana.Snapshot.get("my_key")
...> Map.keys(result)
["dashboard","meta"]
"""
def get(key), do: api_get "#{@path}/#{key}"
@doc """
Delete snapshot with given DeleteKey. The DeleteKey is returned when the
snapshot is created.
iex> {:ok, result} = Grafana.Snapshot.delete("delete_key")
...> Map.keys(result)
["message"]
"""
def delete(deletekey), do: api_get "#{@path}-delete/#{deletekey}"
@doc """
Build snapshot URL from given key.
"""
def url_from_key(key), do: "#{Application.get_env(:grafana, :api_host)}/dashboard/snapshot/#{key}"
end
| 25.384615 | 100 | 0.621212 |
9eab9ca37ef405bdaace38c7526122e333508d91 | 645 | ex | Elixir | lib/altstatus_web/views/submission_view.ex | AltCampus/altstatus_backend | 70bf7a9d337e570f54002c3a7df264e88372adfa | [
"MIT"
] | 1 | 2020-01-20T18:17:59.000Z | 2020-01-20T18:17:59.000Z | lib/altstatus_web/views/submission_view.ex | AltCampus/altstatus_backend | 70bf7a9d337e570f54002c3a7df264e88372adfa | [
"MIT"
] | null | null | null | lib/altstatus_web/views/submission_view.ex | AltCampus/altstatus_backend | 70bf7a9d337e570f54002c3a7df264e88372adfa | [
"MIT"
] | 2 | 2018-09-09T08:05:24.000Z | 2018-09-09T08:35:18.000Z | defmodule AltstatusWeb.SubmissionView do
use AltstatusWeb, :view
alias AltstatusWeb.SubmissionView
def render("index.json", %{submissions: submissions}) do
%{data: render_many(submissions, SubmissionView, "submission.json")}
end
def render("show.json", %{submission: submission}) do
%{data: render_one(submission, SubmissionView, "submission.json")}
end
def render("submission.json", %{submission: submission}) do
%{id: submission.id,
twitter_url: submission.twitter_url,
reflection: submission.reflection,
medium_url: submission.medium_url,
timestamp: submission.inserted_at
}
end
end
| 29.318182 | 72 | 0.724031 |
9eaba5e5d61ba1b09fae69147b53e8f06154f8af | 8,838 | ex | Elixir | lib/hangman/dictionary_ingestion.ex | brpandey/elixir-hangman | 458502af766b42e492ebb9ca543fc8b855687b09 | [
"MIT"
] | 1 | 2016-12-19T00:10:34.000Z | 2016-12-19T00:10:34.000Z | lib/hangman/dictionary_ingestion.ex | brpandey/elixir-hangman | 458502af766b42e492ebb9ca543fc8b855687b09 | [
"MIT"
] | null | null | null | lib/hangman/dictionary_ingestion.ex | brpandey/elixir-hangman | 458502af766b42e492ebb9ca543fc8b855687b09 | [
"MIT"
] | null | null | null | defmodule Hangman.Dictionary.Ingestion do
@moduledoc """
Module handles the ingestion of hangman dictionary words
through the coordination of `Ingestion.First.Flow` and `Ingestion.Cache.Flow`
Saves ingestion state in intermediary cache partition files and finally in
ets dump file
Module transforms the dictionary file in three steps
a) a preprocessing step, which ingests the dictionary file and
generates ingestion cache partition files
b) ingestion of cache files into ETS
c) if both steps are done, we generate an ets table file which can be loaded
upon startup the next time through
For the first step, we run flow and store the results
in partitioned files. We pass a map of file_pids keyed by word length key, to
facilitate writing to the intermediate cache files
After the inital run with the dictionary file, we run flow on the cached
output files (the intermediate partition files) saving the initial flow intermediary
processing -- windowing etc. Allowing us to generate and load the relevant data
(word list chunks, random word generation, tally generation) into ETS concurrently.
Lastly, once both the flows have finished we generate an ets file.
On subsequent runs, this bypasses extra flow processing as the ets file
is loaded to create the ets
"""
alias Hangman.{Dictionary, Ingestion}
# Standard name for hangman dictionary file
# To distinguish between types, we place file in different directory
# with directory name marking the difference e.g. "big"
@dictionary_file_name "words.txt"
# Cache Partition
@cache_dir "cache/"
@partition_file_prefix "words_key_"
@partition_file_suffix ".txt"
@ets_file_name "ets_table"
# Manifest file existance indicates initial flow pass has been completed
@manifest_file_name "manifest"
# Used in writing intermediate files and conversely parsing them
@line_delimiter " \n"
@key_value_delimiter ": "
def delimiter(:line), do: @line_delimiter
def delimiter(:kv), do: @key_value_delimiter
def print_table_info, do: Dictionary.ETS.info()
@doc """
Routine kicks off the ingestion process by
setting up the proper state and then running it
If we have pregenerated an ets table file previously
use that to load the ets and bypass flow processing
"""
@spec run(Keyword.t()) :: :ok
def run(args) do
case Dictionary.startup_params(args) do
# if ingestion is not enabled return :ok
{_dir, false} ->
:ok
{dir, true} ->
dictionary_path = dir <> @dictionary_file_name
cache_full_dir = dir <> @cache_dir
ets_file_path = cache_full_dir <> @ets_file_name
# Check to see if we've already written to an ets cache file
:ok =
case File.exists?(ets_file_path) do
true ->
args = {:ets, ets_file_path}
args |> setup
false ->
args = {:flow, dictionary_path, cache_full_dir, ets_file_path}
args |> setup |> process
end
:ok
end
end
@doc """
Setup has two modes: a) :ets b) :flow
a) :ets
Checks if there is an ets cache file that has been pre-generated. If so, we can
avoid running the flow computations because the table is ready to load from the file.
b) :flow
Loads environment to run initial ingestion pass through the dictionary file.
If this is the first time through, we set up intermediary ingestion cache files
If not, we ensure the partition manifest file is present indicating the initial
run is complete and execute the cache ingestion run loading word lists and generating
random words into ETS as well as generating tally data
"""
@spec setup({:ets, binary} | {:flow, binary, binary, binary}) ::
{:full, binary, binary, map, binary} | {:cache, binary, binary} | :ok
def setup({:ets, ets_path}) when is_binary(ets_path) do
Dictionary.ETS.load(ets_path)
:ok
end
def setup({:flow, dictionary_path, cache_dir, ets_path})
when is_binary(dictionary_path) and is_binary(cache_dir) and is_binary(ets_path) do
# Check to see if dictionary path is valid, if not error
case File.exists?(dictionary_path) do
true -> :ok
false -> raise "Unable to find dictionary file"
end
# The presence of a partition manifest file indicates whether we have finished
# the partition steps, if not found we need to partition
# This allows us to run the main flow logic once and store the results of the
# flow in a set of files to be quickly loaded into ETS on second pass
# These generate partition files are cache files so to speak for Dictionary.Flow.Cache
# So, let's check whether the partition files have already been generated
# If so, forward to Dictionary.Flow.Cache.run
# If not, setup partition cache file state and setup Flow with writing to partition files
case File.exists?(cache_dir <> @manifest_file_name) do
false ->
# Manifest file doesn't exist -> we haven't partitioned into files yet
# Setup the cache state
# Remove the partition cache dir + files in case it exists, cleaning any prior state
# NOTE: SAFE TO USE RM_RF SINCE WE DON"T ASK FOR USER INPUT INVOLVING PATHS
# ALL COMPILE-TIME STATIC PATHS
_ = File.rm_rf!(cache_dir)
# Start clean with a new cache dir
:ok = File.mkdir!(cache_dir)
# Take a range of key values, and generate a map which contain k-v parts, where
# the key is the word length, and values are open file pids
# This map will be used when doing the partition each - file write in the
# context of the flow processing
partial_name = cache_dir <> @partition_file_prefix
key_file_map =
Dictionary.key_range()
|> Enum.reduce(%{}, fn key, acc ->
file_name = partial_name <> "#{key}" <> @partition_file_suffix
{:ok, pid} = File.open(file_name, [:append])
Map.put(acc, key, pid)
end)
{:full, dictionary_path, cache_dir, key_file_map, ets_path}
true ->
{:cache, cache_dir, ets_path}
end
end
@doc """
Process method supports two modes: new, cache, and full
`New` runs the initial ingestion concurrently chunking the original dictionary
file into key based partition files which contain the various
windowed data
`Cache` runs a flow against the cached partitioned files and
concurrently generates and loads all the relevent information into
memory
Full basically invokes new and cache
`Full` runs the full ingestion process by first
running the initial ingestion flow process followed by
a state cleanup, then running the ingestion cache flow process
"""
@spec process(
{:new, binary, binary, map}
| {:cache, binary, binary}
| {:full, binary, binary, map, binary}
) :: :ok
def process({:full, dictionary_path, cache_dir, %{} = key_file_map, ets_path}) do
process({:new, dictionary_path, cache_dir, key_file_map})
process({:cache, cache_dir, ets_path})
:ok
end
def process({:new, dictionary_path, cache_dir, %{} = key_file_map}) do
{:ok, key_file_map} = Ingestion.First.Flow.run(dictionary_path, key_file_map)
cleanup(cache_dir, key_file_map)
:ok
end
def process({:cache, cache_dir, ets_path}) do
# NOTE: The Dictionary Cache process will own the ETS table
# load the ETS table since we will be storing the results in here
Dictionary.ETS.new()
Ingestion.Cache.Flow.run(cache_dir, ets_path)
:ok
end
@doc "Put data into Dictionary.ETS"
@spec put(:words | :random | :counter, term) :: :ok | no_return
def put(:words, data), do: Dictionary.ETS.put(:words, data)
def put(:random, data), do: Dictionary.ETS.put(:random, data)
def put(:counter, data), do: Dictionary.ETS.put(:counter, data)
@doc "Dumps table data into file"
@spec dump(binary) :: :ok | no_return
def dump(path), do: Dictionary.ETS.dump(path)
@doc """
Cleans up open file handles left over from writing to the cached files.
Also generates a partition manifest file signifying the initial pass
has been completed
"""
@spec cleanup(binary, map) :: :ok
def cleanup(cache_dir, %{} = key_file_map) do
# Close partition files from file_map
key_file_map
|> Enum.each(fn {_key, pid} ->
:ok = File.close(pid)
end)
# Create manifest file to signal flow initial processing is finished
manifest_path = cache_dir <> @manifest_file_name
# 'Touch' manifest file
# Future could have checksums of each partitioned file, etc..
_ =
case File.exists?(manifest_path) do
true -> :ok
false -> :ok = File.touch(manifest_path)
end
:ok
end
end
| 33.604563 | 93 | 0.68873 |
9eabe26a989e704069c7e368293c38d6425df96d | 273 | exs | Elixir | config/test.exs | cscairns/agile_pulse | 01675fc60c5ebc88e6e6c3304f2a5aa683528f7c | [
"CC0-1.0"
] | null | null | null | config/test.exs | cscairns/agile_pulse | 01675fc60c5ebc88e6e6c3304f2a5aa683528f7c | [
"CC0-1.0"
] | null | null | null | config/test.exs | cscairns/agile_pulse | 01675fc60c5ebc88e6e6c3304f2a5aa683528f7c | [
"CC0-1.0"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :agile_pulse, AgilePulseWeb.Endpoint,
http: [port: 4001],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
| 24.818182 | 56 | 0.74359 |
9eabe30dec380a3650383fd66201bd7f0ef0efd4 | 879 | ex | Elixir | lib/wunderground/forecast/txt_forecast_day.ex | optikfluffel/wunderground | 67ebd8fbb83f2f0d1eb1a6fba1273afa3cec8233 | [
"Unlicense"
] | 2 | 2017-08-23T21:48:07.000Z | 2017-10-16T21:35:36.000Z | lib/wunderground/forecast/txt_forecast_day.ex | optikfluffel/wunderground | 67ebd8fbb83f2f0d1eb1a6fba1273afa3cec8233 | [
"Unlicense"
] | 8 | 2017-08-23T10:02:35.000Z | 2017-09-03T11:35:36.000Z | lib/wunderground/forecast/txt_forecast_day.ex | optikfluffel/wunderground | 67ebd8fbb83f2f0d1eb1a6fba1273afa3cec8233 | [
"Unlicense"
] | 1 | 2021-06-22T15:02:15.000Z | 2021-06-22T15:02:15.000Z | defmodule Wunderground.Forecast.TXTForecastDay do
@moduledoc """
Ensures correct JSON encoding.
"""
@derive [Poison.Encoder]
defstruct ~w(fcttext fcttext_metric icon icon_url period pop title)a
@typedoc """
The Wunderground.Forecast.TXTForecastDay struct.
## Example
%Wunderground.Forecast.TXTForecastDay{
fcttext: "Considerable cloudiness. High 73F. Winds WSW at 10 to 15 mph.",
fcttext_metric: "A mix of clouds and sun. High 23C. Winds WSW at 15 to 25 km/h.",
icon: "mostlycloudy",
icon_url: "http://icons.wxug.com/i/c/k/mostlycloudy.gif",
period: 0,
pop: "10",
title: "Thursday"
}
"""
@type t :: %__MODULE__{
fcttext: String.t,
fcttext_metric: String.t,
icon: String.t,
icon_url: String.t,
period: non_neg_integer,
pop: String.t,
title: String.t
}
end
| 24.416667 | 89 | 0.643914 |
9eac0cb319050309959e7fbe98d517c59cc84685 | 743 | ex | Elixir | test/support/channel_case.ex | EVE-Tools/static_data | 6080c43e9cddd36df33c3ed79db9ef4c8d74e7d0 | [
"BSD-3-Clause"
] | null | null | null | test/support/channel_case.ex | EVE-Tools/static_data | 6080c43e9cddd36df33c3ed79db9ef4c8d74e7d0 | [
"BSD-3-Clause"
] | null | null | null | test/support/channel_case.ex | EVE-Tools/static_data | 6080c43e9cddd36df33c3ed79db9ef4c8d74e7d0 | [
"BSD-3-Clause"
] | null | null | null | defmodule StaticData.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build and query models.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
# The default endpoint for testing
@endpoint StaticData.Endpoint
end
end
setup tags do
:ok
end
end
| 21.852941 | 56 | 0.720054 |
9eac4db5c5b8730909e3bcbd6f99f78d6bb723af | 1,519 | ex | Elixir | clients/spanner/lib/google_api/spanner/v1/model/get_database_ddl_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/spanner/lib/google_api/spanner/v1/model/get_database_ddl_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/spanner/lib/google_api/spanner/v1/model/get_database_ddl_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Spanner.V1.Model.GetDatabaseDdlResponse do
@moduledoc """
The response for GetDatabaseDdl.
## Attributes
* `statements` (*type:* `list(String.t)`, *default:* `nil`) - A list of formatted DDL statements defining the schema of the database specified in the request.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:statements => list(String.t()) | nil
}
field(:statements, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Spanner.V1.Model.GetDatabaseDdlResponse do
def decode(value, options) do
GoogleApi.Spanner.V1.Model.GetDatabaseDdlResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Spanner.V1.Model.GetDatabaseDdlResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.319149 | 162 | 0.742594 |
9eacb7b15e7a08cc84b6b87672b12bb41b0aee2c | 955 | exs | Elixir | mix.exs | acac99/credo-module-function-order-rule | a60a8641e682dde1517bc38df37c91bb23359b2b | [
"MIT"
] | 1 | 2019-09-19T10:29:24.000Z | 2019-09-19T10:29:24.000Z | mix.exs | acac99/credo-module-function-ordering | a60a8641e682dde1517bc38df37c91bb23359b2b | [
"MIT"
] | null | null | null | mix.exs | acac99/credo-module-function-ordering | a60a8641e682dde1517bc38df37c91bb23359b2b | [
"MIT"
] | null | null | null | defmodule CredoModuleFunctionOrdering.MixProject do
use Mix.Project
def project do
[
app: :credo_module_function_ordering,
version: "0.1.0",
deps: deps(),
elixirc_paths: elixirc_paths(Mix.env()),
description: "Credo rule for ordering of different functions types within a module",
package: [
maintainers: ["acac99"],
licenses: ["MIT"],
maintainers: ["[email protected]"],
links: %{"Github" => "https://github.com/acac99/credo-module-function-ordering"}
]
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[]
end
defp elixirc_paths(:test), do: ["lib", "test/helper"]
defp elixirc_paths(_), do: ["lib"]
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:credo, "~> 1.1.0", only: [:dev, :test], runtime: false},
{:ex_doc, "~> 0.21.2", only: :dev, runtime: false}
]
end
end
| 26.527778 | 90 | 0.613613 |
9eacf0f75056f9d75c4643da94c999a42c82b5e1 | 4,621 | exs | Elixir | test/mix/tasks/ex_oauth2_provider.gen.migration_test.exs | heroinbob/ex_oauth2_provider | 80c21a53bba0955ab3b66f1bd32cc81db0f04f49 | [
"MIT"
] | null | null | null | test/mix/tasks/ex_oauth2_provider.gen.migration_test.exs | heroinbob/ex_oauth2_provider | 80c21a53bba0955ab3b66f1bd32cc81db0f04f49 | [
"MIT"
] | null | null | null | test/mix/tasks/ex_oauth2_provider.gen.migration_test.exs | heroinbob/ex_oauth2_provider | 80c21a53bba0955ab3b66f1bd32cc81db0f04f49 | [
"MIT"
] | null | null | null | defmodule Mix.Tasks.ExOauth2Provider.Gen.MigrationTest do
use ExOauth2Provider.Mix.TestCase
alias Mix.Tasks.ExOauth2Provider.Gen.Migration
defmodule Repo do
def __adapter__, do: true
def config, do: [priv: "tmp/#{inspect(Migration)}", otp_app: :ex_oauth2_provider]
end
@tmp_path Path.join(["tmp", inspect(Migration)])
@migrations_path Path.join(@tmp_path, "migrations")
@options ~w(-r #{inspect(Repo)})
setup do
File.rm_rf!(@tmp_path)
File.mkdir_p!(@tmp_path)
:ok
end
test "generates migrations" do
File.cd!(@tmp_path, fn ->
Migration.run(@options)
assert [migration_file] = File.ls!(@migrations_path)
assert String.match?(migration_file, ~r/^\d{14}_create_oauth_tables\.exs$/)
file = @migrations_path |> Path.join(migration_file) |> File.read!()
assert file =~ "defmodule #{inspect(Repo)}.Migrations.CreateOauthTables do"
assert file =~ "use Ecto.Migration"
assert file =~ "def change do"
assert file =~ "add :owner_id, references(:users, on_delete: :nothing)"
assert file =~ "add :resource_owner_id, references(:users, on_delete: :nothing)"
refute file =~ "add :owner_id, references(:users, on_delete: :nothing, type: :binary_id)"
refute file =~
"add :resource_owner_id, references(:users, on_delete: :nothing, type: :binary_id)"
refute file =~ ":oauth_applications, primary_key: false"
refute file =~ ":oauth_access_grants, primary_key: false"
refute file =~ ":oauth_access_tokens, primary_key: false"
refute file =~ "add :id, :binary_id, primary_key: true"
refute file =~
"add :application_id, references(:oauth_applications, on_delete: :nothing, type: binary_id)"
refute file =~ ":oauth_device_grants"
# TODO: this could be improved by testing each table indpendently and
# completely.
assert file =~ "add :is_trusted, :boolean, null: false, default: false"
end)
end
test "generates migrations with binary id" do
File.cd!(@tmp_path, fn ->
Migration.run(@options ++ ~w(--binary-id))
assert [migration_file] = File.ls!(@migrations_path)
file = @migrations_path |> Path.join(migration_file) |> File.read!()
refute file =~ "add :owner_id, :integer, null: false"
refute file =~ "add :resource_owner_id, :integer"
assert file =~ "add :owner_id, references(:users, on_delete: :nothing, type: :binary_id)"
assert file =~
"add :resource_owner_id, references(:users, on_delete: :nothing, type: :binary_id)"
assert file =~ ":oauth_applications, primary_key: false"
assert file =~ ":oauth_access_grants, primary_key: false"
assert file =~ ":oauth_access_tokens, primary_key: false"
assert file =~ "add :id, :binary_id, primary_key: true"
assert file =~
"add :application_id, references(:oauth_applications, on_delete: :nothing, type: :binary_id)"
end)
end
test "it creates device_grants table when --device-code option is given" do
File.cd!(@tmp_path, fn ->
Migration.run(@options ++ ~w(--device-code))
assert [migration_file] = File.ls!(@migrations_path)
file = @migrations_path |> Path.join(migration_file) |> File.read!()
assert file =~ ":oauth_applications"
assert file =~ ":oauth_access_grants"
assert file =~ ":oauth_access_tokens"
create_table_content =
[
" create table(:oauth_device_grants) do",
" add :device_code, :string, null: false",
" add :expires_in, :integer, null: false",
" add :last_polled_at, :utc_datetime",
" add :scopes, :string",
" add :user_code, :string",
" add :application_id, references(:oauth_applications, on_delete: :nothing)",
" add :resource_owner_id, references(:users, on_delete: :nothing)",
"",
" timestamps()",
" end",
"",
" create unique_index(:oauth_device_grants, [:device_code])",
" create unique_index(:oauth_device_grants, [:user_code])"
]
|> Enum.join("\n")
assert file =~ create_table_content
end)
end
test "doesn't make duplicate migrations" do
File.cd!(@tmp_path, fn ->
Migration.run(@options)
assert_raise Mix.Error,
"migration can't be created, there is already a migration file with name CreateOauthTables.",
fn ->
Migration.run(@options)
end
end)
end
end
| 36.101563 | 112 | 0.627786 |
9eacf34a2acb1df18803b107db460157be22d400 | 11,372 | ex | Elixir | lib/mix/lib/mix/tasks/new.ex | nurugger07/elixir | c859e49199bb24190656e6d3acb6cf35fe70e8bb | [
"Apache-2.0"
] | 1 | 2019-06-11T20:22:20.000Z | 2019-06-11T20:22:20.000Z | lib/mix/lib/mix/tasks/new.ex | nurugger07/elixir | c859e49199bb24190656e6d3acb6cf35fe70e8bb | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/new.ex | nurugger07/elixir | c859e49199bb24190656e6d3acb6cf35fe70e8bb | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.New do
use Mix.Task
import Mix.Generator
import Mix.Utils, only: [camelize: 1, underscore: 1]
@shortdoc "Creates a new Elixir project"
@moduledoc """
Creates a new Elixir project.
It expects the path of the project as argument.
mix new PATH [--sup] [--module MODULE] [--app APP] [--umbrella]
A project at the given PATH will be created. The
application name and module name will be retrieved
from the path, unless `--module` or `--app` is given.
A `--sup` option can be given to generate an OTP application
skeleton including a supervision tree. Normally an app is
generated without a supervisor and without the app callback.
An `--umbrella` option can be given to generate an
umbrella project.
An `--app` option can be given in order to
name the OTP application for the project.
A `--module` option can be given in order
to name the modules in the generated code skeleton.
## Examples
mix new hello_world
Is equivalent to:
mix new hello_world --module HelloWorld
To generate an app with supervisor and application callback:
mix new hello_world --sup
"""
@spec run(OptionParser.argv) :: :ok
def run(argv) do
{opts, argv, _} = OptionParser.parse(argv, switches: [sup: :boolean, umbrella: :boolean])
case argv do
[] ->
Mix.raise "Expected PATH to be given, please use \"mix new PATH\""
[path|_] ->
app = opts[:app] || Path.basename(Path.expand(path))
check_application_name!(app, !!opts[:app])
mod = opts[:module] || camelize(app)
check_mod_name_validity!(mod)
check_mod_name_availability!(mod)
File.mkdir_p!(path)
File.cd! path, fn ->
if opts[:umbrella] do
do_generate_umbrella(app, mod, path, opts)
else
do_generate(app, mod, path, opts)
end
end
end
end
defp do_generate(app, mod, path, opts) do
assigns = [app: app, mod: mod, otp_app: otp_app(mod, !!opts[:sup]),
version: get_version(System.version)]
create_file "README.md", readme_template(assigns)
create_file ".gitignore", gitignore_text
if in_umbrella? do
create_file "mix.exs", mixfile_apps_template(assigns)
else
create_file "mix.exs", mixfile_template(assigns)
end
create_directory "config"
create_file "config/config.exs", config_template(assigns)
create_directory "lib"
if opts[:sup] do
create_file "lib/#{app}.ex", lib_sup_template(assigns)
else
create_file "lib/#{app}.ex", lib_template(assigns)
end
create_directory "test"
create_file "test/test_helper.exs", test_helper_template(assigns)
create_file "test/#{app}_test.exs", test_template(assigns)
Mix.shell.info """
Your Mix project was created successfully.
You can use "mix" to compile it, test it, and more:
cd #{path}
mix test
Run "mix help" for more commands.
"""
end
defp otp_app(_mod, false) do
" [applications: [:logger]]"
end
defp otp_app(mod, true) do
" [applications: [:logger],\n mod: {#{mod}, []}]"
end
defp do_generate_umbrella(_app, mod, path, _opts) do
assigns = [mod: mod]
create_file ".gitignore", gitignore_text
create_file "README.md", readme_template(assigns)
create_file "mix.exs", mixfile_umbrella_template(assigns)
create_directory "apps"
create_directory "config"
create_file "config/config.exs",
config_umbrella_template(assigns)
Mix.shell.info """
Your umbrella project was created successfully.
Inside your project, you will find an apps/ directory
where you can create and host many apps:
cd #{path}
cd apps
mix new my_app
Commands like "mix compile" and "mix test" when executed
in the umbrella project root will automatically run
for each application in the apps/ directory.
"""
end
defp check_application_name!(name, from_app_flag) do
unless name =~ ~r/^[a-z][\w_]*$/ do
Mix.raise "Application name must start with a letter and have only lowercase " <>
"letters, numbers and underscore, got: #{inspect name}" <>
(if !from_app_flag do
". The application name is inferred from the path, if you'd like to " <>
"explicitly name the application then use the \"--app APP\" option."
else
""
end)
end
end
defp check_mod_name_validity!(name) do
unless name =~ ~r/^[A-Z]\w*(\.[A-Z]\w*)*$/ do
Mix.raise "Module name must be a valid Elixir alias (for example: Foo.Bar), got: #{inspect name}"
end
end
defp check_mod_name_availability!(name) do
name = Module.concat(Elixir, name)
if Code.ensure_loaded?(name) do
Mix.raise "Module name #{inspect name} is already taken, please choose another name"
end
end
defp get_version(version) do
{:ok, version} = Version.parse(version)
"#{version.major}.#{version.minor}" <>
case version.pre do
[h|_] -> "-#{h}"
[] -> ""
end
end
defp in_umbrella? do
apps = Path.dirname(File.cwd!)
try do
Mix.Project.in_project(:umbrella_check, "../..", fn _ ->
path = Mix.Project.config[:apps_path]
path && Path.expand(path) == apps
end)
catch
_, _ -> false
end
end
embed_template :readme, """
# <%= @mod %>
**TODO: Add description**
## Installation
If [available in Hex](https://hex.pm/docs/publish), the package can be installed as:
1. Add <%= @app %> to your list of dependencies in `mix.exs`:
def deps do
[{:<%= @app %>, "~> 0.0.1"}]
end
2. Ensure <%= @app %> is started before your application:
def application do
[applications: [:<%= @app %>]]
end
"""
embed_text :gitignore, """
/_build
/cover
/deps
erl_crash.dump
*.ez
"""
embed_template :mixfile, """
defmodule <%= @mod %>.Mixfile do
use Mix.Project
def project do
[app: :<%= @app %>,
version: "0.0.1",
elixir: "~> <%= @version %>",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
<%= @otp_app %>
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[]
end
end
"""
embed_template :mixfile_apps, """
defmodule <%= @mod %>.Mixfile do
use Mix.Project
def project do
[app: :<%= @app %>,
version: "0.0.1",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> <%= @version %>",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
<%= @otp_app %>
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# To depend on another app inside the umbrella:
#
# {:myapp, in_umbrella: true}
#
# Type "mix help deps" for more examples and options
defp deps do
[]
end
end
"""
embed_template :mixfile_umbrella, """
defmodule <%= @mod %>.Mixfile do
use Mix.Project
def project do
[apps_path: "apps",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options.
#
# Dependencies listed here are available only for this project
# and cannot be accessed from applications inside the apps folder
defp deps do
[]
end
end
"""
embed_template :config, ~S"""
# This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :<%= @app %>, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:<%= @app %>, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
"""
embed_template :config_umbrella, ~S"""
# This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# The configuration defined here will only affect the dependencies
# in the apps directory when commands are executed from the umbrella
# project. For this reason, it is preferred to configure each child
# application directly and import its configuration, as done below.
import_config "../apps/*/config/config.exs"
# Sample configuration (overrides the imported configuration above):
#
# config :logger, :console,
# level: :info,
# format: "$date $time [$level] $metadata$message\n",
# metadata: [:user_id]
"""
embed_template :lib, """
defmodule <%= @mod %> do
end
"""
embed_template :lib_sup, """
defmodule <%= @mod %> do
use Application
# See http://elixir-lang.org/docs/stable/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec, warn: false
children = [
# Define workers and child supervisors to be supervised
# worker(<%= @mod %>.Worker, [arg1, arg2, arg3]),
]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: <%= @mod %>.Supervisor]
Supervisor.start_link(children, opts)
end
end
"""
embed_template :test, """
defmodule <%= @mod %>Test do
use ExUnit.Case
doctest <%= @mod %>
test "the truth" do
assert 1 + 1 == 2
end
end
"""
embed_template :test_helper, """
ExUnit.start()
"""
end
| 26.757647 | 103 | 0.620559 |
9eacf963aecb9af3897913542aad803b82d05684 | 2,878 | ex | Elixir | lib/yummy_web/mutations/recipes_mutations.ex | MatthieuSegret/yummy-phoenix-graphql | f0b258293697b0b120ef8e8a3b3905043c998617 | [
"MIT"
] | 122 | 2017-11-24T11:28:17.000Z | 2022-02-25T17:05:20.000Z | lib/yummy_web/mutations/recipes_mutations.ex | MatthieuSegret/yummy-phoenix-graphql | f0b258293697b0b120ef8e8a3b3905043c998617 | [
"MIT"
] | 6 | 2018-01-11T22:07:44.000Z | 2021-11-21T15:41:42.000Z | lib/yummy_web/mutations/recipes_mutations.ex | MatthieuSegret/yummy-phoenix-graphql | f0b258293697b0b120ef8e8a3b3905043c998617 | [
"MIT"
] | 25 | 2018-04-01T02:43:21.000Z | 2022-02-15T03:22:54.000Z | defmodule YummyWeb.Mutations.RecipesMutations do
use Absinthe.Schema.Notation
import Ecto.Query, warn: false
import YummyWeb.Helpers.ValidationMessageHelpers
alias YummyWeb.Schema.Middleware
alias Yummy.Repo
alias Yummy.Recipes
alias Yummy.Recipes.Recipe
input_object :recipe_input do
field(:title, :string)
field(:content, :string)
field(:total_time, :string)
field(:level, :string)
field(:budget, :string)
field(:remove_image, :boolean, default_value: false)
field(:image, :upload)
end
object :recipes_mutations do
@desc "Create a recipe"
field :create_recipe, :recipe_payload do
arg(:input, :recipe_input)
middleware(Middleware.Authorize)
resolve(fn %{input: params}, %{context: context} ->
case context[:current_user] |> Recipes.create(params) do
{:ok, recipe} -> {:ok, recipe}
{:error, %Ecto.Changeset{} = changeset} -> {:ok, changeset}
end
end)
end
@desc "Update a Recipe and return Recipe"
field :update_recipe, :recipe_payload do
arg(:id, non_null(:id))
arg(:input, :recipe_input)
middleware(Middleware.Authorize)
resolve(fn %{input: params} = args, %{context: context} ->
recipe =
Recipe
|> preload(:author)
|> Repo.get!(args[:id])
with true <- Recipes.is_author(context[:current_user], recipe),
{:ok, recipe_updated} <- Recipes.update(recipe, params) do
if params[:remove_image] do
# return recipe without image
{:ok, Recipes.delete_image(recipe_updated)}
else
{:ok, recipe_updated}
end
else
{:error, %Ecto.Changeset{} = changeset} -> {:ok, changeset}
{:error, msg} -> {:ok, generic_message(msg)}
end
end)
end
@desc "Destroy a Recipe"
field :delete_recipe, :recipe_payload do
arg(:id, non_null(:id))
middleware(Middleware.Authorize)
resolve(fn args, %{context: context} ->
recipe =
Recipe
|> preload(:author)
|> Repo.get!(args[:id])
case Recipes.is_author(context[:current_user], recipe) do
true -> recipe |> Recipes.delete()
{:error, msg} -> {:ok, generic_message(msg)}
end
end)
end
@desc "Create a comment to recipe"
field :create_comment, :comment_payload do
arg(:body, :string)
arg(:recipe_id, non_null(:id))
middleware(Middleware.Authorize)
resolve(fn args, %{context: context} ->
recipe = Recipe |> Repo.get!(args[:recipe_id])
case context[:current_user] |> Recipes.create_comment(recipe, %{body: args[:body]}) do
{:ok, comment} -> {:ok, comment}
{:error, %Ecto.Changeset{} = changeset} -> {:ok, changeset}
end
end)
end
end
end
| 29.670103 | 94 | 0.601459 |
9ead4356a2870a2d4f62268efab7b20d389695fe | 1,353 | exs | Elixir | test/day06_test.exs | hvnsweeting/adventofcode2018 | 8e5a85ebb7b102361b844b0f92522c18148a672a | [
"BSD-3-Clause"
] | 1 | 2022-01-10T02:34:18.000Z | 2022-01-10T02:34:18.000Z | test/day06_test.exs | hvnsweeting/adventofcode2018 | 8e5a85ebb7b102361b844b0f92522c18148a672a | [
"BSD-3-Clause"
] | null | null | null | test/day06_test.exs | hvnsweeting/adventofcode2018 | 8e5a85ebb7b102361b844b0f92522c18148a672a | [
"BSD-3-Clause"
] | 1 | 2019-12-02T09:42:17.000Z | 2019-12-02T09:42:17.000Z | defmodule Day06Test do
use ExUnit.Case, async: true
doctest Day06
@example "1, 1
1, 6
8, 3
3, 4
5, 5
8, 9"
test "greets the world" do
assert Day06.hello() == :hello
end
test "smallest grid" do
coord =
@example
|> Day06.string_to_coordinate()
|> Day06.smallest_grid()
assert coord == {{1, 1}, {8, 9}}
end
test "Manhattan distance of {1,1} {3,4} is 5" do
[first, forth] = Day06.string_to_coordinate("1, 1\n3, 4")
assert Day06.manhattan_distance(
first,
forth
) == 5
end
test "boundary" do
assert @example |> Day06.string_to_coordinate() |> Day06.boundery_indexes() == [
true,
true,
true,
false,
false,
true
]
end
test "generate grid" do
assert @example
|> Day06.string_to_coordinate()
|> Day06.smallest_grid()
|> Day06.generate_grid_coords()
|> length == 72
end
test "solve_example" do
assert @example |> Day06.solve_part1() == 17
end
test "part 2 example" do
assert @example |> Day06.solve_part2(32) == 16
end
test "total distance of 4,3 to all points is 30" do
assert @example
|> Day06.string_to_coordinate()
|> Day06.total_distance({4, 3}) == 30
end
end
| 20.5 | 84 | 0.552106 |
9ead50a1f023a4f57dc8d6147626f4381b279cd3 | 454 | exs | Elixir | test/lib_leaf_web/views/error_view_test.exs | gimKondo/lib-leaf | 241609a82b3c76a64263b6a151f05c0d3c4f4dd6 | [
"MIT"
] | null | null | null | test/lib_leaf_web/views/error_view_test.exs | gimKondo/lib-leaf | 241609a82b3c76a64263b6a151f05c0d3c4f4dd6 | [
"MIT"
] | null | null | null | test/lib_leaf_web/views/error_view_test.exs | gimKondo/lib-leaf | 241609a82b3c76a64263b6a151f05c0d3c4f4dd6 | [
"MIT"
] | null | null | null | defmodule LibLeafWeb.ErrorViewTest do
use LibLeafWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.json" do
assert render(LibLeafWeb.ErrorView, "404.json", []) == %{errors: %{detail: "Not Found"}}
end
test "renders 500.json" do
assert render(LibLeafWeb.ErrorView, "500.json", []) ==
%{errors: %{detail: "Internal Server Error"}}
end
end
| 28.375 | 92 | 0.682819 |
9ead60f01d75d551289c15f48b947c69466830ff | 663 | exs | Elixir | test/controllers/module_controller_test.exs | lee-dohm/ship-designer | 641b6f44aa5efc676ee3f9251618eeafb2b89bad | [
"MIT"
] | 1 | 2020-01-26T18:06:25.000Z | 2020-01-26T18:06:25.000Z | test/controllers/module_controller_test.exs | lee-dohm/ship-designer | 641b6f44aa5efc676ee3f9251618eeafb2b89bad | [
"MIT"
] | null | null | null | test/controllers/module_controller_test.exs | lee-dohm/ship-designer | 641b6f44aa5efc676ee3f9251618eeafb2b89bad | [
"MIT"
] | null | null | null | defmodule ShipDesigner.ModuleControllerTest do
use ShipDesigner.ConnCase
alias ShipDesigner.Module
test "lists all entries on index", %{conn: conn} do
conn = get conn, module_path(conn, :index)
assert html_response(conn, 200) =~ "Modules"
end
test "shows chosen resource", %{conn: conn} do
module = Repo.insert! %Module{}
conn = get conn, module_path(conn, :show, module)
assert html_response(conn, 200) =~ "Category"
end
test "renders page not found when id is nonexistent", %{conn: conn} do
assert_error_sent 404, fn ->
get conn, module_path(conn, :show, "11111111-1111-1111-1111-111111111111")
end
end
end
| 28.826087 | 80 | 0.695324 |
9ead7ebee9f3a2d5adb2a59726e2aa29942a7d7b | 106 | exs | Elixir | source/elixir_clients/doorpi/apps/fw/test/fw_test.exs | rveshovda/pifog | 127c2de6ff2666ebc9987d8c2cfd5431ce5ff888 | [
"Apache-2.0"
] | 16 | 2016-09-14T16:57:33.000Z | 2017-11-11T16:31:14.000Z | apps/fw/test/fw_test.exs | royveshovda/buildstuff2016 | edb7c283a84a939383f6869eb623030d7316a91b | [
"MIT"
] | 5 | 2016-09-08T07:14:44.000Z | 2018-11-09T12:34:19.000Z | apps/fw/test/fw_test.exs | royveshovda/buildstuff2016 | edb7c283a84a939383f6869eb623030d7316a91b | [
"MIT"
] | 2 | 2017-12-09T02:07:22.000Z | 2021-01-06T15:30:03.000Z | defmodule FwTest do
use ExUnit.Case
doctest Fw
test "the truth" do
assert 1 + 1 == 2
end
end
| 11.777778 | 21 | 0.641509 |
9ead84618c31ac31d2fef3675c44023b08a1cc9c | 2,921 | ex | Elixir | apps/core/lib/core/policies/repository.ex | asamoal/plural | 5b336f27cb2d775560e35e5323192c42d62e72f5 | [
"Apache-2.0"
] | null | null | null | apps/core/lib/core/policies/repository.ex | asamoal/plural | 5b336f27cb2d775560e35e5323192c42d62e72f5 | [
"Apache-2.0"
] | null | null | null | apps/core/lib/core/policies/repository.ex | asamoal/plural | 5b336f27cb2d775560e35e5323192c42d62e72f5 | [
"Apache-2.0"
] | null | null | null | defmodule Core.Policies.Repository do
use Piazza.Policy
import Core.Policies.Utils
alias Core.Schema.{User, Installation, Repository, Integration, Artifact, DockerRepository, ApplyLock}
def can?(%User{} = user, %Integration{} = integ, policy) do
%{repository: repo} = Core.Repo.preload(integ, [:repository])
can?(user, repo, policy)
end
def can?(%User{} = user, %Artifact{} = art, policy) do
%{repository: repo} = Core.Repo.preload(art, [:repository])
can?(user, repo, policy)
end
def can?(%User{account_id: aid} = user, %Repository{} = repo, :support) do
case Core.Repo.preload(repo, [:publisher]) do
%{publisher: %{account_id: ^aid}} ->
check_rbac(user, :support, repository: repo.name)
_ -> {:error, :forbidden}
end
end
def can?(%User{account_id: aid}, %Repository{private: true} = repo, :access) do
case Core.Repo.preload(repo, [:publisher]) do
%{publisher: %{account_id: ^aid}} -> :continue
_ -> {:error, :forbidden}
end
end
def can?(%User{} = user, %DockerRepository{} = dkr, :edit) do
%{repository: repo} = Core.Repo.preload(dkr, [repository: [publisher: :account]])
can?(user, repo, :edit)
end
def can?(%User{id: id}, %ApplyLock{owner_id: id}, _), do: :pass
def can?(%User{} = user, %ApplyLock{owner_id: nil} = lock, :create) do
%{repository: repo} = Core.Repo.preload(lock, [repository: [publisher: :account]])
can?(user, repo, :edit)
end
def can?(_, %ApplyLock{inserted_at: ins, updated_at: upd}, :create) do
touched = upd || ins
Timex.now()
|> Timex.shift(minutes: -5)
|> Timex.before?(touched)
|> case do
true -> {:error, "lock already in use"}
false -> :pass
end
end
def can?(%User{}, %Repository{}, :access), do: :continue
def can?(%User{account_id: aid, id: user_id}, %Repository{} = repo, :pull) do
case Core.Repo.preload(repo, [:publisher]) do
%{publisher: %{account_id: ^aid}} -> :continue
%{publisher: %{owner_id: ^user_id}} -> :continue
_ ->
if Core.Services.Repositories.get_installation(user_id, repo.id),
do: :continue, else: {:error, :forbidden}
end
end
def can?(%User{} = user, %Repository{} = repo, action) when action in [:create, :edit] do
case Core.Repo.preload(repo, [publisher: :account]) do
%{publisher: pub} -> Core.Policies.Publisher.can?(user, pub, :edit)
_ -> {:error, :forbidden}
end
end
def can?(%User{id: user_id}, %Installation{user_id: user_id}, action) when action in [:edit, :access],
do: :continue
def can?(%User{} = user, %Installation{} = inst, :create) do
%{repository: repo} = Core.Repo.preload(inst, [:repository])
check_rbac(user, :install, repository: repo.name)
end
def can?(user, %Ecto.Changeset{} = cs, action),
do: can?(user, apply_changes(cs), action)
def can?(_, _, _), do: {:error, :forbidden}
end
| 34.77381 | 104 | 0.625813 |
9eadc2d11ada3b972d81196ae04c7482e6152695 | 2,286 | exs | Elixir | test/ecto/migrator_repo_test.exs | nasrulgunawan/ecto_sql | ace2c9daf07190a3f7debfa2060cd3ddd251b6c7 | [
"Apache-2.0"
] | 384 | 2018-10-03T17:52:39.000Z | 2022-03-24T17:54:21.000Z | test/ecto/migrator_repo_test.exs | nasrulgunawan/ecto_sql | ace2c9daf07190a3f7debfa2060cd3ddd251b6c7 | [
"Apache-2.0"
] | 357 | 2018-10-06T13:47:33.000Z | 2022-03-29T08:18:02.000Z | test/ecto/migrator_repo_test.exs | nasrulgunawan/ecto_sql | ace2c9daf07190a3f7debfa2060cd3ddd251b6c7 | [
"Apache-2.0"
] | 251 | 2018-10-04T11:06:41.000Z | 2022-03-29T07:22:53.000Z | defmodule Ecto.MigratorRepoTest do
use ExUnit.Case
import Ecto.Migrator
import ExUnit.CaptureLog
defmodule Migration do
use Ecto.Migration
def up do
execute "up"
end
def down do
execute "down"
end
end
defmodule ChangeMigration do
use Ecto.Migration
def change do
create table(:posts) do
add :name, :string
end
create index(:posts, [:title])
end
end
defmodule MainRepo do
use Ecto.Repo, otp_app: :ecto_sql, adapter: EctoSQL.TestAdapter
end
defmodule MigrationRepo do
use Ecto.Repo, otp_app: :ecto_sql, adapter: EctoSQL.TestAdapter
end
Application.put_env(:ecto_sql, MainRepo, [migration_repo: MigrationRepo])
setup do
{:ok, _} = start_supervised({MigrationsAgent, [{1, nil}, {2, nil}, {3, nil}]})
:ok
end
def put_test_adapter_config(config) do
Application.put_env(:ecto_sql, EctoSQL.TestAdapter, config)
on_exit fn ->
Application.delete_env(:ecto, EctoSQL.TestAdapter)
end
end
setup_all do
{:ok, _pid} = MainRepo.start_link()
{:ok, _pid} = MigrationRepo.start_link()
:ok
end
describe "migration_repo option" do
test "upwards and downwards migrations" do
assert run(MainRepo, [{3, ChangeMigration}, {4, Migration}], :up, to: 4, log: false) == [4]
assert run(MainRepo, [{2, ChangeMigration}, {3, Migration}], :down, all: true, log: false) == [3, 2]
end
test "down invokes the repository adapter with down commands" do
assert down(MainRepo, 0, Migration, log: false) == :already_down
assert down(MainRepo, 2, Migration, log: false) == :ok
end
test "up invokes the repository adapter with up commands" do
assert up(MainRepo, 3, Migration, log: false) == :already_up
assert up(MainRepo, 4, Migration, log: false) == :ok
end
test "migrations run inside a transaction if the adapter supports ddl transactions when configuring a migration repo" do
capture_log fn ->
put_test_adapter_config(supports_ddl_transaction?: true, test_process: self())
up(MainRepo, 0, Migration)
assert_receive {:transaction, %{repo: MainRepo}, _}
assert_receive {:lock_for_migrations, %{repo: MigrationRepo}, _, _}
end
end
end
end
| 26.275862 | 124 | 0.667542 |
9eadfddbb033a74cf2a6e642450c05f27015d29f | 10,342 | exs | Elixir | test/codec/encoder_test.exs | nocursor/saucexages | 33e986a652306b2c54ad4891db7a27d78ed0d7cf | [
"MIT"
] | 7 | 2018-11-01T15:47:05.000Z | 2021-05-19T10:07:23.000Z | test/codec/encoder_test.exs | nocursor/saucexages | 33e986a652306b2c54ad4891db7a27d78ed0d7cf | [
"MIT"
] | null | null | null | test/codec/encoder_test.exs | nocursor/saucexages | 33e986a652306b2c54ad4891db7a27d78ed0d7cf | [
"MIT"
] | null | null | null | defmodule Saucexages.EncoderTest do
use ExUnit.Case, async: true
import Saucexages.Codec.Encoder
require Codepagex
alias Codepagex
alias Saucexages.SauceBlock
test "encode_string/2 encodes a string to the proper size with space padding" do
assert encode_string("12345", 5) == "12345"
#overrun
assert encode_string("12345", 4) == "1234"
#underrun
assert encode_string("12345", 6) == "12345 "
assert encode_string("", 5) == <<32, 32, 32, 32, 32>>
end
test "encode_string/2 handles unicode characters" do
#japanese cheese
assert encode_string("チーズ", 3) == <<32, 32, 32>>
assert encode_string("チーズ is chizu", 12) == <<32, 105, 115, 32, 99, 104, 105, 122, 117, 32, 32, 32>>
end
test "encode_cstring encodes a string to the proper size with 0 padding" do
assert encode_cstring("12345", 5) == "12345"
assert encode_cstring("12345", 4) == "1234"
assert encode_cstring("12345", 6) == <<"12345", 0>>
assert encode_cstring("", 5) == <<0, 0, 0, 0, 0>>
end
test "encode_cstring/2 handles unicode" do
assert encode_cstring("チーズ", 3) == <<0, 0, 0>>
assert encode_cstring("チーズ is chizu", 12) == <<32, 105, 115, 32, 99, 104, 105, 122, 117, 0, 0, 0>>
end
test "encode_date/1 encodes an elixir datetime as a SAUCE data" do
# single digit month
dt1 = %DateTime{
year: 2000,
month: 2,
day: 29,
zone_abbr: "AMT",
hour: 23,
minute: 0,
second: 7,
microsecond: {0, 0},
utc_offset: -14400,
std_offset: 0,
time_zone: "America/Manaus"
}
# single digit day
dt2 = %DateTime{
year: 2000,
month: 12,
day: 1,
zone_abbr: "AMT",
hour: 23,
minute: 0,
second: 7,
microsecond: {0, 0},
utc_offset: -14400,
std_offset: 0,
time_zone: "America/Manaus"
}
# back when the world was good
dt3 = %DateTime{
year: 1994,
month: 2,
day: 2,
zone_abbr: "AMT",
hour: 23,
minute: 59,
second: 59,
microsecond: {0, 0},
utc_offset: -14400,
std_offset: 0,
time_zone: "America/Manaus"
}
assert encode_date(dt1) == "20000229"
assert encode_date(dt2) == "20001201"
assert encode_date(dt3) == "19940202"
end
test "encode_version/1 encodes a version string" do
assert encode_version("00") == "00"
assert encode_version("0") == <<"0", 32>>
assert encode_version("チーズ") == <<32, 32>>
assert encode_version(<<32, "00">>) == "00"
end
test "encode_integer/2 encodes a SAUCE integer of the given size" do
assert encode_integer(0, 1) == <<0>>
assert encode_integer(255, 1) == <<255>>
assert encode_integer(32767, 2) == <<255, 127>>
end
test "encode_integer/2 handles overflow" do
#overflow
assert encode_integer(32767, 1) == <<255>>
end
test "encode_integer/2 handles underflow" do
#padding
assert encode_integer(0, 2) == <<0, 0>>
assert encode_integer(2, 2) == <<2, 0>>
end
test "encode_integer/2 handles byte order" do
#byte order
assert encode_integer(32767, 3) == <<255, 127, 0>>
end
test "encode_integer/2 handles coercing to unsigned" do
#unsigned coerce
assert encode_integer(-255, 1) == <<1>>
end
describe "Encoding a SAUCE Block" do
setup do
ansi_sauce = %SauceBlock{
author: "RaD MaN",
comments: ["test notes", "second line", "more test", "",
"after a blank line"],
date: ~D[1994-08-31],
media_info: %Saucexages.MediaInfo{
data_type: 1,
file_size: 8900,
file_type: 1,
t_flags: 52,
t_info_1: 80,
t_info_2: 97,
t_info_3: 16,
t_info_4: 72,
t_info_s: "IBM VGA"
},
group: "ACiD Productions",
title: "ACiD 1994 Member/Board Listing",
version: "00"
}
rip_sauce = %SauceBlock{
author: "ReDMaN",
comments: [],
date: ~D[1994-08-29],
media_info: %Saucexages.MediaInfo{
data_type: 1,
file_size: 30441,
file_type: 3,
t_flags: 0,
t_info_1: 640,
t_info_2: 350,
t_info_3: 16,
t_info_4: 0,
t_info_s: ""
},
group: "ACiD Productions",
title: "Corruption Ad",
version: "00"
}
%{ansi_sauce: ansi_sauce, rip_sauce: rip_sauce}
end
test "encode_field/1 handles encoding all specific SAUCE block fields to binary", %{ansi_sauce: ansi_sauce, rip_sauce: rip_sauce} do
assert encode_field(:version, ansi_sauce) == <<"00">>
assert encode_field(:title, ansi_sauce) == "ACiD 1994 Member/Board Listing "
assert encode_field(:author, ansi_sauce) == "RaD MaN "
assert encode_field(:group, ansi_sauce) == "ACiD Productions "
assert encode_field(:date, ansi_sauce) == "19940831"
assert encode_field(:file_size, ansi_sauce) == <<196, 34, 0, 0>>
assert encode_field(:data_type, ansi_sauce) == <<1>>
assert encode_field(:file_type, ansi_sauce) == <<1>>
assert encode_field(:t_info_1, ansi_sauce) == <<80, 0>>
assert encode_field(:t_info_2, ansi_sauce) == <<97, 0>>
assert encode_field(:t_info_3, ansi_sauce) == <<16, 0>>
assert encode_field(:t_info_4, ansi_sauce) == <<72, 0>>
assert encode_field(:comment_lines, ansi_sauce) == <<5>>
assert encode_field(:t_flags, ansi_sauce) == <<52>>
assert encode_field(:t_info_s, ansi_sauce) == <<73, 66, 77, 32, 86, 71, 65, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>
assert encode_field(:version, rip_sauce) == <<"00">>
assert encode_field(:title, rip_sauce) == "Corruption Ad "
assert encode_field(:author, rip_sauce) == "ReDMaN "
assert encode_field(:group, rip_sauce) == "ACiD Productions "
assert encode_field(:date, rip_sauce) == "19940829"
assert encode_field(:file_size, rip_sauce) == <<233, 118, 0, 0>>
assert encode_field(:data_type, rip_sauce) == <<1>>
assert encode_field(:file_type, rip_sauce) == <<3>>
assert encode_field(:t_info_1, rip_sauce) == <<128, 2>>
assert encode_field(:t_info_2, rip_sauce) == <<94, 1>>
assert encode_field(:t_info_3, rip_sauce) == <<16, 0>>
assert encode_field(:t_info_4, rip_sauce) == <<0, 0>>
assert encode_field(:comment_lines, rip_sauce) == <<0>>
assert encode_field(:t_flags, rip_sauce) == <<0>>
assert encode_field(:t_info_s, rip_sauce) == <<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>
end
test "encode_record/1 handles encoding a SAUCE block into a binary SAUCE record", %{ansi_sauce: ansi_sauce, rip_sauce: rip_sauce} do
{:ok, ansi_record} = encode_record(ansi_sauce)
refute is_nil(ansi_record)
assert is_binary(ansi_record)
assert byte_size(ansi_record) == 128
#ensure the header
assert :binary.part(ansi_record, 0, 5) == "SAUCE"
assert :binary.part(ansi_record, 5, 2) == "00"
assert ansi_record == <<83, 65, 85, 67, 69, 48, 48, 65, 67, 105, 68, 32, 49, 57, 57, 52, 32, 77, 101,
109, 98, 101, 114, 47, 66, 111, 97, 114, 100, 32, 76, 105, 115, 116, 105,
110, 103, 32, 32, 32, 32, 32, 82, 97, 68, 32, 77, 97, 78, 32, 32, 32, 32, 32,
32, 32, 32, 32, 32, 32, 32, 32, 65, 67, 105, 68, 32, 80, 114, 111, 100, 117,
99, 116, 105, 111, 110, 115, 32, 32, 32, 32, 49, 57, 57, 52, 48, 56, 51, 49,
196, 34, 0, 0, 1, 1, 80, 0, 97, 0, 16, 0, 72, 0, 5, 52, 73, 66, 77, 32, 86,
71, 65, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>
{:ok, rip_record} = encode_record(rip_sauce)
refute is_nil(rip_record)
assert is_binary(rip_record)
assert byte_size(rip_record) == 128
assert :binary.part(rip_record, 0, 5) == "SAUCE"
assert :binary.part(rip_record, 5, 2) == "00"
assert rip_record == <<83, 65, 85, 67, 69, 48, 48, 67, 111, 114, 114, 117, 112, 116, 105, 111, 110,
32, 65, 100, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32,
32, 32, 32, 32, 32, 32, 82, 101, 68, 77, 97, 78, 32, 32, 32, 32, 32, 32, 32,
32, 32, 32, 32, 32, 32, 32, 65, 67, 105, 68, 32, 80, 114, 111, 100, 117, 99,
116, 105, 111, 110, 115, 32, 32, 32, 32, 49, 57, 57, 52, 48, 56, 50, 57, 233,
118, 0, 0, 1, 3, 128, 2, 94, 1, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>
end
test "encode_comments/1 encodes the SAUCE block comments in a SAUCE comment block", %{ansi_sauce: ansi_sauce, rip_sauce: rip_sauce} do
{:ok, ansi_comments} = encode_comments(ansi_sauce)
assert byte_size(ansi_comments) == 325
assert :binary.part(ansi_comments, 0, 5) == "COMNT"
{:ok, rip_comments} = encode_comments(rip_sauce)
assert byte_size(rip_comments) == 0
end
end
test "encode_comment_block_line/1 encodes a single SAUCE comment line" do
comment_line = "hello world"
encoded_line = encode_comment_block_line(comment_line)
assert byte_size(encoded_line) == 64
assert encoded_line === "hello world "
max_line = String.pad_trailing("", 64, "1")
encoded_max_line = encode_comment_block_line(max_line)
assert byte_size(encoded_max_line) == 64
assert encoded_max_line == "1111111111111111111111111111111111111111111111111111111111111111"
end
test "encode_comment_block_line/1 truncates lines that are too long" do
comment_line = String.pad_trailing("", 65, "1")
encoded_line = encode_comment_block_line(comment_line)
assert byte_size(encoded_line) == 64
assert encoded_line === "1111111111111111111111111111111111111111111111111111111111111111"
end
test "encode_comment_block_line/1 handles unicode" do
line_1 = encode_comment_block_line("チーズ")
assert byte_size(line_1) == 64
assert line_1 == " "
line_2 = encode_comment_block_line("チーズ is chizu")
assert line_2 == " is chizu "
assert byte_size(line_2) == 64
end
end | 37.471014 | 138 | 0.587217 |
9eae12e38d44ea0ace6885ae1d061b9ac1938736 | 2,030 | ex | Elixir | clients/firebase_app_check/lib/google_api/firebase_app_check/v1beta/model/google_firebase_appcheck_v1beta_app_attest_challenge_response.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/firebase_app_check/lib/google_api/firebase_app_check/v1beta/model/google_firebase_appcheck_v1beta_app_attest_challenge_response.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/firebase_app_check/lib/google_api/firebase_app_check/v1beta/model/google_firebase_appcheck_v1beta_app_attest_challenge_response.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.FirebaseAppCheck.V1beta.Model.GoogleFirebaseAppcheckV1betaAppAttestChallengeResponse do
@moduledoc """
Response message for the GenerateAppAttestChallenge method.
## Attributes
* `challenge` (*type:* `String.t`, *default:* `nil`) - A one-time use challenge for the client to pass to the App Attest API.
* `ttl` (*type:* `String.t`, *default:* `nil`) - The duration from the time this challenge is minted until its expiration. This field is intended to ease client-side token management, since the client may have clock skew, but is still able to accurately measure a duration.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:challenge => String.t() | nil,
:ttl => String.t() | nil
}
field(:challenge)
field(:ttl)
end
defimpl Poison.Decoder,
for:
GoogleApi.FirebaseAppCheck.V1beta.Model.GoogleFirebaseAppcheckV1betaAppAttestChallengeResponse do
def decode(value, options) do
GoogleApi.FirebaseAppCheck.V1beta.Model.GoogleFirebaseAppcheckV1betaAppAttestChallengeResponse.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for:
GoogleApi.FirebaseAppCheck.V1beta.Model.GoogleFirebaseAppcheckV1betaAppAttestChallengeResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.614035 | 277 | 0.747291 |
9eae60ee3e90cc57ffacaad61531d40f8ff73b30 | 2,258 | ex | Elixir | lib/hexa_web/telemetry.ex | libreearth/hexa | 81938c3a5abc710eb16055d73c43cbf60dbf487e | [
"MIT"
] | null | null | null | lib/hexa_web/telemetry.ex | libreearth/hexa | 81938c3a5abc710eb16055d73c43cbf60dbf487e | [
"MIT"
] | null | null | null | lib/hexa_web/telemetry.ex | libreearth/hexa | 81938c3a5abc710eb16055d73c43cbf60dbf487e | [
"MIT"
] | null | null | null | defmodule HexaWeb.Telemetry do
use Supervisor
import Telemetry.Metrics
def start_link(arg) do
Supervisor.start_link(__MODULE__, arg, name: __MODULE__)
end
@impl true
def init(_arg) do
children = [
# Telemetry poller will execute the given period measurements
# every 10_000ms. Learn more here: https://hexdocs.pm/telemetry_metrics
{:telemetry_poller, measurements: periodic_measurements(), period: 10_000}
# Add reporters as children of your supervision tree.
# {Telemetry.Metrics.ConsoleReporter, metrics: metrics()}
]
Supervisor.init(children, strategy: :one_for_one)
end
def metrics do
[
# Phoenix Metrics
summary("phoenix.endpoint.stop.duration",
unit: {:native, :millisecond}
),
summary("phoenix.router_dispatch.stop.duration",
tags: [:route],
unit: {:native, :millisecond}
),
# Database Metrics
summary("hexa.repo.query.total_time",
unit: {:native, :millisecond},
description: "The sum of the other measurements"
),
summary("hexa.repo.query.decode_time",
unit: {:native, :millisecond},
description: "The time spent decoding the data received from the database"
),
summary("hexa.repo.query.query_time",
unit: {:native, :millisecond},
description: "The time spent executing the query"
),
summary("hexa.repo.query.queue_time",
unit: {:native, :millisecond},
description: "The time spent waiting for a database connection"
),
summary("hexa.repo.query.idle_time",
unit: {:native, :millisecond},
description:
"The time the connection spent waiting before being checked out for the query"
),
# VM Metrics
summary("vm.memory.total", unit: {:byte, :kilobyte}),
summary("vm.total_run_queue_lengths.total"),
summary("vm.total_run_queue_lengths.cpu"),
summary("vm.total_run_queue_lengths.io")
]
end
defp periodic_measurements do
[
# A module, function and arguments to be invoked periodically.
# This function must call :telemetry.execute/3 and a metric must be added above.
# {HexaWeb, :count_users, []}
]
end
end
| 31.361111 | 88 | 0.650133 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.