hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
93fecfd72d5886208c985183d5c8b608d6206856 | 1,264 | exs | Elixir | test/test_helper.exs | YgorCastor/ravix-ecto | 63badc62e9ea2b38c7667d4ee1bfa8cb7c1cf371 | [
"Apache-2.0"
] | 1 | 2022-03-30T14:56:00.000Z | 2022-03-30T14:56:00.000Z | test/test_helper.exs | YgorCastor/ravix-ecto | 63badc62e9ea2b38c7667d4ee1bfa8cb7c1cf371 | [
"Apache-2.0"
] | null | null | null | test/test_helper.exs | YgorCastor/ravix-ecto | 63badc62e9ea2b38c7667d4ee1bfa8cb7c1cf371 | [
"Apache-2.0"
] | null | null | null | ExUnit.start(
exclude: [
:todo,
:aggregations
]
)
Application.put_env(:ecto, :primary_key_type, :binary_id)
Application.put_env(:ecto, :async_integration_tests, false)
defmodule Ecto.Integration.Repo do
defmacro __using__(opts) do
quote do
use Ecto.Repo, unquote(opts)
@query_event __MODULE__
|> Module.split()
|> Enum.map(&(&1 |> Macro.underscore() |> String.to_atom()))
|> Kernel.++([:query])
def init(_, opts) do
fun = &Ecto.Integration.Repo.handle_event/4
:telemetry.attach_many(__MODULE__, [[:custom], @query_event], fun, :ok)
{:ok, opts}
end
end
end
def handle_event(event, latency, metadata, _config) do
handler = Process.delete(:telemetry) || fn _, _, _ -> :ok end
handler.(event, latency, metadata)
end
end
defmodule Ecto.Integration.Case do
use ExUnit.CaseTemplate
alias Ecto.Integration.TestRepo
alias Ravix.Ecto.TestStore
import Ravix.RQL.Query
setup_all do
:ok
end
setup do
_ = start_supervised!(TestRepo)
{:ok, session_id} = TestStore.open_session()
{:ok, _} = from("@all_docs") |> delete_for(session_id)
_ = TestStore.close_session(session_id)
:ok
end
end
| 22.175439 | 79 | 0.634494 |
93fee322874d560e61dd11e80239b63569efa552 | 312 | exs | Elixir | test/fixtures/system_v1/mix.exs | tverlaan/nerves | 515b9b7730a1b2934ac051b0e7075cd7987b6c4a | [
"Apache-2.0"
] | null | null | null | test/fixtures/system_v1/mix.exs | tverlaan/nerves | 515b9b7730a1b2934ac051b0e7075cd7987b6c4a | [
"Apache-2.0"
] | null | null | null | test/fixtures/system_v1/mix.exs | tverlaan/nerves | 515b9b7730a1b2934ac051b0e7075cd7987b6c4a | [
"Apache-2.0"
] | null | null | null | defmodule SystemV1.Fixture.Mixfile do
use Mix.Project
@version Path.join(__DIR__, "VERSION")
|> File.read!
|> String.strip
def project do
[app: :system_v1,
version: @version,
deps: deps()]
end
defp deps do
[{:toolchain_v1, path: "../toolchain_v1"}]
end
end
| 17.333333 | 46 | 0.599359 |
93ff1eece2d6944ddf16a79439b4e5e70583f7ae | 2,111 | ex | Elixir | clients/text_to_speech/lib/google_api/text_to_speech/v1beta1/model/voice.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/text_to_speech/lib/google_api/text_to_speech/v1beta1/model/voice.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/text_to_speech/lib/google_api/text_to_speech/v1beta1/model/voice.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.TextToSpeech.V1beta1.Model.Voice do
@moduledoc """
Description of a voice supported by the TTS service.
## Attributes
* `languageCodes` (*type:* `list(String.t)`, *default:* `nil`) - The languages that this voice supports, expressed as [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. "en-US", "es-419", "cmn-tw").
* `name` (*type:* `String.t`, *default:* `nil`) - The name of this voice. Each distinct voice has a unique name.
* `naturalSampleRateHertz` (*type:* `integer()`, *default:* `nil`) - The natural sample rate (in hertz) for this voice.
* `ssmlGender` (*type:* `String.t`, *default:* `nil`) - The gender of this voice.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:languageCodes => list(String.t()) | nil,
:name => String.t() | nil,
:naturalSampleRateHertz => integer() | nil,
:ssmlGender => String.t() | nil
}
field(:languageCodes, type: :list)
field(:name)
field(:naturalSampleRateHertz)
field(:ssmlGender)
end
defimpl Poison.Decoder, for: GoogleApi.TextToSpeech.V1beta1.Model.Voice do
def decode(value, options) do
GoogleApi.TextToSpeech.V1beta1.Model.Voice.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.TextToSpeech.V1beta1.Model.Voice do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.696429 | 226 | 0.700616 |
93ff200316dd9691161cb56e8670fbfc52e2865f | 1,454 | ex | Elixir | lib/magic_number.ex | ishikawa/elixir-magic-number | 7cd096f71a1bb890151ef383d5d8c9280061feef | [
"MIT"
] | 8 | 2016-03-30T13:42:48.000Z | 2021-12-10T15:03:37.000Z | lib/magic_number.ex | ishikawa/elixir-magic-number | 7cd096f71a1bb890151ef383d5d8c9280061feef | [
"MIT"
] | null | null | null | lib/magic_number.ex | ishikawa/elixir-magic-number | 7cd096f71a1bb890151ef383d5d8c9280061feef | [
"MIT"
] | 3 | 2020-04-18T13:41:28.000Z | 2021-10-03T15:17:42.000Z | defmodule MagicNumber do
@moduledoc """
The module to determine a file's type from its
[magic number](https://en.wikipedia.org/wiki/File_format#Magic_number).
"""
@typedoc """
A media type is a two-part identifier for file format. For example:
```elixir
{:application, :zip} # application/zip
{:image, :png} # image/png
```
See [IANA list of official media types](https://www.iana.org/assignments/media-types/media-types.xhtml).
"""
@type media_type :: {atom, atom}
# Rules :: [{media_type, [binary]}]
@rules [
# image
{{:image, :gif}, ["GIF87a", "GIF89a"]},
{{:image, :jpeg}, [<<0xFF, 0xD8, 0xFF>>]},
{{:image, :png}, [<<0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A>>]},
{{:image, :tiff}, ["II*\0", "MM\0*"]},
# application
{{:application, :pdf}, ["%PDF"]},
{{:application, :zip},
[
<<0x50, 0x4B, 0x03, 0x04>>,
<<0x50, 0x4B, 0x05, 0x06>>,
<<0x50, 0x4B, 0x07, 0x08>>
]},
{{:application, :gzip}, [<<0x1F, 0x8B>>]}
]
@doc """
Determine media type from its contents.
## Examples
iex> MagicNumber.detect("GIF89a...")
{:ok, {:image, :gif}}
iex> MagicNumber.detect(<<>>)
:error
"""
@spec detect(binary) :: {:ok, media_type} | :error
for {media_type, headers} <- @rules, magic <- headers do
def detect(unquote(magic) <> _), do: {:ok, unquote(media_type)}
end
# error
def detect(_), do: :error
end
| 25.068966 | 106 | 0.568776 |
93ff2ef4678f39b0d8beb89c1a83da85b5b4839e | 505 | ex | Elixir | lib/creek_dict_admin_web/views/error_view.ex | nativesintech/creek-dictionary-admin | 14bfd6c364010a1c905f97cd9388bad0a40589bc | [
"MIT"
] | null | null | null | lib/creek_dict_admin_web/views/error_view.ex | nativesintech/creek-dictionary-admin | 14bfd6c364010a1c905f97cd9388bad0a40589bc | [
"MIT"
] | 7 | 2019-09-20T02:00:23.000Z | 2019-10-07T04:22:51.000Z | lib/creek_dict_admin_web/views/error_view.ex | nativesintech/creek-dictionary-admin | 14bfd6c364010a1c905f97cd9388bad0a40589bc | [
"MIT"
] | null | null | null | defmodule CreekDictAdminWeb.ErrorView do
use CreekDictAdminWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.html", _assigns) do
# "Internal Server Error"
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.html" becomes
# "Not Found".
def template_not_found(template, _assigns) do
Phoenix.Controller.status_message_from_template(template)
end
end
| 29.705882 | 61 | 0.742574 |
93ff2fe955ba892742297c52dc53d513f483f03d | 21,772 | ex | Elixir | lib/event_store/subscriptions/subscription_fsm.ex | jsmestad/eventstore | 93660ce316ca174ff4694e211a7ac420253e4dac | [
"MIT"
] | null | null | null | lib/event_store/subscriptions/subscription_fsm.ex | jsmestad/eventstore | 93660ce316ca174ff4694e211a7ac420253e4dac | [
"MIT"
] | null | null | null | lib/event_store/subscriptions/subscription_fsm.ex | jsmestad/eventstore | 93660ce316ca174ff4694e211a7ac420253e4dac | [
"MIT"
] | null | null | null | defmodule EventStore.Subscriptions.SubscriptionFsm do
@moduledoc false
alias EventStore.{AdvisoryLocks, PubSub, RecordedEvent, Storage}
alias EventStore.Streams.Stream
alias EventStore.Subscriptions.{SubscriptionState, Subscriber}
use Fsm, initial_state: :initial, initial_data: %SubscriptionState{}
require Logger
def new(stream_uuid, subscription_name, opts) do
new(
data: %SubscriptionState{
conn: Keyword.fetch!(opts, :conn),
event_store: Keyword.fetch!(opts, :event_store),
stream_uuid: stream_uuid,
subscription_name: subscription_name,
serializer: Keyword.fetch!(opts, :serializer),
schema: Keyword.fetch!(opts, :schema),
start_from: opts[:start_from] || 0,
mapper: opts[:mapper],
selector: opts[:selector],
partition_by: opts[:partition_by],
buffer_size: opts[:buffer_size] || 1,
max_size: opts[:max_size] || 1_000,
transient: Keyword.get(opts, :transient, false)
}
)
end
# The main flow between states in this finite state machine is:
#
# initial -> request_catch_up -> catching_up -> subscribed
#
defstate initial do
defevent subscribe, data: %SubscriptionState{transient: true} = data do
data = %SubscriptionState{
data
| queue_size: 0,
partitions: %{},
processed_event_numbers: MapSet.new()
}
with :ok <- subscribe_to_events(data) do
last_seen = data.start_from
data = %SubscriptionState{
data
| last_received: last_seen,
last_sent: last_seen,
last_ack: last_seen
}
notify_subscribed(data)
next_state(:request_catch_up, data)
else
_ ->
# Failed to subscribe to stream, retry after delay
next_state(:initial, data)
end
end
defevent subscribe,
data: %SubscriptionState{} = data do
data = %SubscriptionState{
data
| queue_size: 0,
partitions: %{},
processed_event_numbers: MapSet.new()
}
with {:ok, subscription} <- create_subscription(data),
{:ok, lock_ref} <- try_acquire_exclusive_lock(data, subscription),
:ok <- subscribe_to_events(data) do
%Storage.Subscription{subscription_id: subscription_id, last_seen: last_seen} =
subscription
last_seen = last_seen || 0
data = %SubscriptionState{
data
| subscription_id: subscription_id,
lock_ref: lock_ref,
last_received: last_seen,
last_sent: last_seen,
last_ack: last_seen
}
notify_subscribed(data)
next_state(:request_catch_up, data)
else
_ ->
# Failed to subscribe to stream, retry after delay
next_state(:initial, data)
end
end
end
defstate request_catch_up do
defevent catch_up, data: %SubscriptionState{} = data do
catch_up_from_stream(data)
end
defevent ack(ack, subscriber), data: %SubscriptionState{} = data do
with {:ok, data} <- ack_events(data, ack, subscriber) do
catch_up_from_stream(data)
else
reply -> respond(reply)
end
end
end
defstate catching_up do
defevent ack(ack, subscriber), data: %SubscriptionState{} = data do
with {:ok, data} <- ack_events(data, ack, subscriber) do
catch_up_from_stream(data)
else
reply -> respond(reply)
end
end
end
defstate subscribed do
# Notify events when subscribed
defevent notify_events(events), data: %SubscriptionState{} = data do
%SubscriptionState{last_received: last_received} = data
expected_event = last_received + 1
case first_event_number(events) do
past when past < expected_event ->
Logger.debug(fn -> describe(data) <> " received past event(s), ignoring" end)
# Ignore already seen events
next_state(:subscribed, data)
future when future > expected_event ->
Logger.debug(fn ->
describe(data) <> " received unexpected event(s), requesting catch up"
end)
# Missed event(s), request catch-up with any unseen events from storage
next_state(:request_catch_up, data)
^expected_event ->
Logger.debug(fn ->
describe(data) <> " is enqueueing #{length(events)} event(s)"
end)
# Subscriber is up-to-date, so enqueue events to send
data =
data
|> enqueue_events(events)
|> notify_subscribers()
if over_capacity?(data) do
# Too many pending events, must wait for these to be processed.
next_state(:max_capacity, data)
else
# Remain subscribed, waiting for subscriber to ack already sent events.
next_state(:subscribed, data)
end
end
end
defevent ack(ack, subscriber), data: %SubscriptionState{} = data do
with {:ok, data} <- ack_events(data, ack, subscriber) do
next_state(:subscribed, data)
else
reply -> respond(reply)
end
end
defevent catch_up, data: %SubscriptionState{} = data do
next_state(:request_catch_up, data)
end
end
defstate max_capacity do
defevent ack(ack, subscriber), data: %SubscriptionState{} = data do
with {:ok, data} <- ack_events(data, ack, subscriber) do
if empty_queue?(data) do
# No further pending events so catch up with any unseen.
next_state(:request_catch_up, data)
else
# Pending events remain, wait until subscriber ack's.
next_state(:max_capacity, data)
end
else
reply -> respond(reply)
end
end
end
defstate disconnected do
# Attempt to subscribe
defevent subscribe, data: %SubscriptionState{} = data do
with {:ok, subscription} <- create_subscription(data),
{:ok, lock_ref} <- try_acquire_exclusive_lock(data, subscription) do
%Storage.Subscription{
subscription_id: subscription_id,
last_seen: last_seen
} = subscription
last_ack = last_seen || 0
data = %SubscriptionState{
data
| subscription_id: subscription_id,
lock_ref: lock_ref,
last_sent: last_ack,
last_ack: last_ack
}
next_state(:request_catch_up, data)
else
_ ->
next_state(:disconnected, data)
end
end
end
defstate unsubscribed do
defevent unsubscribe(_subscriber), data: %SubscriptionState{} = data do
next_state(:unsubscribed, data)
end
end
# Catch-all event handlers
defevent ack(_ack, _subscriber), data: %SubscriptionState{} = data, state: state do
next_state(state, data)
end
defevent connect_subscriber(subscriber, opts),
data: %SubscriptionState{} = data,
state: state do
data = data |> monitor_subscriber(subscriber, opts) |> notify_subscribers()
unless state == :initial do
notify_subscribed(subscriber)
end
next_state(state, data)
end
defevent subscribe, data: %SubscriptionState{} = data, state: state do
next_state(state, data)
end
# Ignore notify events unless subscribed
defevent notify_events(events), data: %SubscriptionState{} = data, state: state do
next_state(state, track_last_received(data, events))
end
defevent catch_up, data: %SubscriptionState{} = data, state: state do
next_state(state, data)
end
defevent disconnect(lock_ref), data: %SubscriptionState{lock_ref: lock_ref} = data do
data =
%SubscriptionState{
data
| lock_ref: nil,
queue_size: 0,
partitions: %{},
processed_event_numbers: MapSet.new()
}
|> purge_in_flight_events()
next_state(:disconnected, data)
end
defevent unsubscribe(pid), data: %SubscriptionState{} = data, state: state do
data = data |> remove_subscriber(pid) |> notify_subscribers()
case has_subscribers?(data) do
true ->
next_state(state, data)
false ->
next_state(:unsubscribed, data)
end
end
defp create_subscription(%SubscriptionState{} = data) do
%SubscriptionState{
conn: conn,
schema: schema,
start_from: start_from,
stream_uuid: stream_uuid,
subscription_name: subscription_name
} = data
Storage.Subscription.subscribe_to_stream(
conn,
stream_uuid,
subscription_name,
start_from,
schema: schema
)
end
defp try_acquire_exclusive_lock(
%SubscriptionState{} = data,
%Storage.Subscription{} = subscription
) do
%Storage.Subscription{subscription_id: subscription_id} = subscription
%SubscriptionState{event_store: event_store} = data
server = Module.concat(event_store, AdvisoryLocks)
AdvisoryLocks.try_advisory_lock(server, subscription_id)
end
defp subscribe_to_events(%SubscriptionState{} = data) do
%SubscriptionState{event_store: event_store, stream_uuid: stream_uuid} = data
PubSub.subscribe(event_store, stream_uuid)
end
defp monitor_subscriber(%SubscriptionState{} = data, pid, opts) when is_pid(pid) do
%SubscriptionState{subscribers: subscribers, buffer_size: buffer_size} = data
subscriber = %Subscriber{
pid: pid,
ref: Process.monitor(pid),
buffer_size: Keyword.get(opts, :buffer_size, buffer_size)
}
%SubscriptionState{data | subscribers: Map.put(subscribers, pid, subscriber)}
end
defp remove_subscriber(%SubscriptionState{subscribers: subscribers} = data, subscriber_pid)
when is_pid(subscriber_pid) do
case subscriber_by_pid(subscribers, subscriber_pid) do
{:ok, %Subscriber{} = subscriber} ->
%Subscriber{in_flight: in_flight} = subscriber
# Prepend in-flight events for the removed subscriber to the pending
# event queue so they can be sent to another available subscriber.
data =
in_flight
|> Enum.sort_by(fn %RecordedEvent{event_number: event_number} -> -event_number end)
|> Enum.reduce(data, fn event, data ->
enqueue_event(data, event, &:queue.in_r/2)
end)
%SubscriptionState{data | subscribers: Map.delete(subscribers, subscriber_pid)}
{:error, :unknown_subscriber} ->
data
end
end
defp has_subscribers?(%SubscriptionState{subscribers: subscribers}), do: subscribers != %{}
# Notify all connected subscribers that this subscription has successfully subscribed.
defp notify_subscribed(%SubscriptionState{subscribers: subscribers}) do
for {pid, _subscriber} <- subscribers do
notify_subscribed(pid)
end
:ok
end
defp notify_subscribed(subscriber) when is_pid(subscriber) do
send(subscriber, {:subscribed, self()})
end
defp track_last_received(%SubscriptionState{} = data, events) when is_list(events) do
track_last_received(data, last_event_number(events))
end
defp track_last_received(%SubscriptionState{} = data, event_number)
when is_number(event_number) do
%SubscriptionState{last_received: last_received} = data
%SubscriptionState{data | last_received: max(last_received, event_number)}
end
defp track_last_sent(%SubscriptionState{} = data, event_number) do
%SubscriptionState{last_sent: last_sent} = data
%SubscriptionState{data | last_sent: max(last_sent, event_number)}
end
defp first_event_number([%RecordedEvent{event_number: event_number} | _]), do: event_number
defp last_event_number([%RecordedEvent{event_number: event_number}]), do: event_number
defp last_event_number([_event | events]), do: last_event_number(events)
def catch_up_from_stream(%SubscriptionState{queue_size: 0} = data) do
%SubscriptionState{last_sent: last_sent, last_received: last_received} = data
case read_stream_forward(data) do
{:ok, []} ->
if last_sent == last_received do
# Subscriber is up-to-date with latest published events
next_state(:subscribed, data)
else
# Need to catch-up with events published while catching up
next_state(:request_catch_up, data)
end
{:ok, events} ->
data = data |> enqueue_events(events) |> notify_subscribers()
if empty_queue?(data) do
# Request next batch of events
next_state(:request_catch_up, data)
else
# Wait until subscribers have ack'd in-flight events
next_state(:catching_up, data)
end
{:error, :stream_deleted} ->
# Don't allow subscriptions to deleted streams to receive any events
next_state(:unsubscribed, data)
{:error, :stream_not_found} ->
# Allow subscriptions to streams which don't yet exist, but might be created later
next_state(:subscribed, data)
end
end
def catch_up_from_stream(%SubscriptionState{} = data) do
next_state(:catching_up, data)
end
defp read_stream_forward(%SubscriptionState{} = data) do
%SubscriptionState{
conn: conn,
schema: schema,
serializer: serializer,
stream_uuid: stream_uuid,
last_sent: last_sent,
max_size: max_size
} = data
Stream.read_stream_forward(conn, stream_uuid, last_sent + 1, max_size,
schema: schema,
serializer: serializer
)
end
defp enqueue_events(%SubscriptionState{} = data, []), do: data
defp enqueue_events(%SubscriptionState{} = data, [event | events]) do
%SubscriptionState{processed_event_numbers: processed_event_numbers} = data
%RecordedEvent{event_number: event_number} = event
data =
case selected?(event, data) do
true ->
# Unfiltered event, enqueue to send to a subscriber
enqueue_event(data, event)
false ->
# Filtered event, don't send to subscriber, but track it as processed.
%SubscriptionState{
data
| processed_event_numbers: MapSet.put(processed_event_numbers, event_number)
}
|> track_last_sent(event_number)
end
data
|> track_last_received(event_number)
|> enqueue_events(events)
end
defp enqueue_event(%SubscriptionState{} = data, event, enqueue \\ &:queue.in/2) do
%SubscriptionState{partitions: partitions, queue_size: queue_size} = data
partition_key = partition_key(data, event)
partitions =
partitions
|> Map.put_new(partition_key, :queue.new())
|> Map.update!(partition_key, fn pending_events -> enqueue.(event, pending_events) end)
%SubscriptionState{data | partitions: partitions, queue_size: queue_size + 1}
end
def partition_key(%SubscriptionState{partition_by: nil}, %RecordedEvent{}), do: nil
def partition_key(%SubscriptionState{partition_by: partition_by}, %RecordedEvent{} = event)
when is_function(partition_by, 1),
do: partition_by.(event)
# Attempt to notify subscribers with any pending events. Partitions are
# selected by peeking at the event number of their queue to ensure earlier
# events are sent first.
defp notify_subscribers(%SubscriptionState{partitions: partitions} = data) do
partitions
|> Enum.sort_by(fn {_partition_key, pending_events} -> peek_event_number(pending_events) end)
|> Enum.reduce(data, fn {partition_key, _pending_events}, data ->
notify_partition_subscriber(data, partition_key)
end)
|> checkpoint_last_seen()
end
defp peek_event_number(pending_events) do
case :queue.peek(pending_events) do
{:value, %RecordedEvent{event_number: event_number}} -> event_number
_ -> nil
end
end
defp notify_partition_subscriber(data, partition_key, events_to_send \\ []) do
%SubscriptionState{
partitions: partitions,
subscribers: subscribers,
queue_size: queue_size
} = data
with pending_events when not is_nil(pending_events) <- Map.get(partitions, partition_key),
{{:value, event}, pending_events} <- :queue.out(pending_events),
{:ok, subscriber} <- next_available_subscriber(data, partition_key) do
%RecordedEvent{event_number: event_number} = event
%Subscriber{pid: subscriber_pid} = subscriber
subscriber = Subscriber.track_in_flight(subscriber, event, partition_key)
partitions =
case :queue.is_empty(pending_events) do
true -> Map.delete(partitions, partition_key)
false -> Map.put(partitions, partition_key, pending_events)
end
%SubscriptionState{
data
| partitions: partitions,
subscribers: Map.put(subscribers, subscriber_pid, subscriber),
queue_size: max(queue_size - 1, 0)
}
|> track_last_sent(event_number)
|> notify_partition_subscriber(partition_key, [{subscriber_pid, event} | events_to_send])
else
_ ->
# No further queued event or available subscriber, send ready events to
# subscribers then stop notifying.
send_queued_events(events_to_send, data)
end
end
# Send events to the subscriber
defp send_queued_events([], data), do: data
defp send_queued_events(events_to_send, data) do
events_to_send
|> Enum.group_by(fn {pid, _event} -> pid end, fn {_pid, event} -> event end)
|> Enum.each(fn {pid, events} ->
mapped_events = events |> Enum.reverse() |> map(data)
send(pid, {:events, mapped_events})
end)
data
end
# Select the next available subscriber based upon their partition key, buffer
# size and number of currently in-flight events.
#
# Uses a round robin strategy for balancing events between subscribers.
#
# Events will be distributed to subscribers based upon their partition key
# when a `partition_by/1` function is provided. This is used to guarantee
# ordering of events for each partition.
defp next_available_subscriber(%SubscriptionState{} = data, partition_key) do
%SubscriptionState{subscribers: subscribers} = data
partition_subscriber =
Enum.find(subscribers, fn {_pid, subscriber} ->
Subscriber.in_partition?(subscriber, partition_key)
end)
subscribers =
case partition_subscriber do
nil -> subscribers
subscriber -> [subscriber]
end
subscribers
|> Enum.sort_by(fn {_pid, %Subscriber{last_sent: last_sent}} -> last_sent end)
|> Enum.find(fn {_pid, subscriber} -> Subscriber.available?(subscriber) end)
|> case do
nil -> {:error, :no_available_subscriber}
{_pid, subscriber} -> {:ok, subscriber}
end
end
defp selected?(event, %SubscriptionState{selector: selector}) when is_function(selector, 1),
do: selector.(event)
defp selected?(_event, %SubscriptionState{}), do: true
defp map(events, %SubscriptionState{mapper: mapper}) when is_function(mapper, 1),
do: Enum.map(events, mapper)
defp map(events, _mapper), do: events
defp ack_events(%SubscriptionState{} = data, ack, subscriber_pid) do
%SubscriptionState{subscribers: subscribers, processed_event_numbers: processed_event_numbers} =
data
with {:ok, subscriber} <- subscriber_by_pid(subscribers, subscriber_pid),
{:ok, subscriber, acknowledged_events} <- Subscriber.acknowledge(subscriber, ack) do
processed_event_numbers =
acknowledged_events
|> Enum.map(& &1.event_number)
|> Enum.reduce(processed_event_numbers, &MapSet.put(&2, &1))
data =
%SubscriptionState{
data
| subscribers: Map.put(subscribers, subscriber_pid, subscriber),
processed_event_numbers: processed_event_numbers
}
|> notify_subscribers()
{:ok, data}
end
end
defp subscriber_by_pid(subscribers, subscriber_pid) do
case Map.get(subscribers, subscriber_pid) do
%Subscriber{} = subscriber -> {:ok, subscriber}
nil -> {:error, :unknown_subscriber}
end
end
defp checkpoint_last_seen(%SubscriptionState{} = data, persist \\ false) do
%SubscriptionState{
conn: conn,
schema: schema,
stream_uuid: stream_uuid,
subscription_name: subscription_name,
processed_event_numbers: processed_event_numbers,
last_ack: last_ack
} = data
ack = last_ack + 1
cond do
MapSet.member?(processed_event_numbers, ack) ->
persist_if_not_transient = not data.transient
%SubscriptionState{
data
| processed_event_numbers: MapSet.delete(processed_event_numbers, ack),
last_ack: ack
}
|> checkpoint_last_seen(persist_if_not_transient)
persist ->
Storage.Subscription.ack_last_seen_event(conn, stream_uuid, subscription_name, last_ack,
schema: schema
)
data
true ->
data
end
end
# Purge all subscriber in-flight events and subscription event queue.
defp purge_in_flight_events(%SubscriptionState{} = data) do
%SubscriptionState{subscribers: subscribers} = data
subscribers =
Enum.reduce(subscribers, %{}, fn {pid, subscriber}, acc ->
Map.put(acc, pid, Subscriber.reset_in_flight(subscriber))
end)
%SubscriptionState{data | subscribers: subscribers}
end
defp empty_queue?(%SubscriptionState{queue_size: 0}), do: true
defp empty_queue?(%SubscriptionState{}), do: false
defp over_capacity?(%SubscriptionState{queue_size: queue_size, max_size: max_size}),
do: queue_size >= max_size
defp describe(%SubscriptionState{stream_uuid: stream_uuid, subscription_name: name}),
do: "Subscription #{inspect(name)}@#{inspect(stream_uuid)}"
end
| 31.326619 | 100 | 0.664294 |
93ff9d9e3c9ce7d6eb342cb32bcf2e981fb59f8b | 1,851 | exs | Elixir | clients/os_login/mix.exs | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/os_login/mix.exs | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/os_login/mix.exs | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.OSLogin.Mixfile do
use Mix.Project
@version "0.27.0"
def project() do
[
app: :google_api_os_login,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/os_login"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.4"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Cloud OS Login API client library. You can use OS Login to manage access to your VM instances using IAM roles.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/os_login",
"Homepage" => "https://cloud.google.com/compute/docs/oslogin/"
}
]
end
end
| 27.626866 | 114 | 0.653701 |
93ffb10fca9634638c6de5e07eaa588b2f3f8b76 | 1,848 | ex | Elixir | lib/acmex/resource/challenge.ex | Wynteres/acmex | 3ec899673320d14fc127a80531adc8a54a7a3450 | [
"MIT"
] | null | null | null | lib/acmex/resource/challenge.ex | Wynteres/acmex | 3ec899673320d14fc127a80531adc8a54a7a3450 | [
"MIT"
] | null | null | null | lib/acmex/resource/challenge.ex | Wynteres/acmex | 3ec899673320d14fc127a80531adc8a54a7a3450 | [
"MIT"
] | null | null | null | defmodule Acmex.Resource.Challenge do
@moduledoc """
This structure represents a challenge to prove control of an identifier.
"""
alias JOSE.JWK
@enforce_keys ~w(status token type url)a
defstruct @enforce_keys
@type dns_response :: %{
key_authorization: String.t(),
record_name: String.t(),
record_type: String.t()
}
@type http_response :: %{
content_type: String.t(),
filename: String.t(),
key_authorization: String.t()
}
@type t :: %__MODULE__{status: String.t(), token: String.t(), type: String.t(), url: String.t()}
@doc """
Builds a challenge struct.
"""
@spec new(map()) :: t()
def new(challenge), do: struct(__MODULE__, challenge)
@doc """
Gets the response from a challenge.
"""
@spec get_response(t(), tuple()) :: {:ok, dns_response()} | {:ok, http_response()}
def get_response(%__MODULE__{type: "dns-01"} = challenge, jwk) do
{:ok, key_authorization} = get_key_authorization(challenge, jwk)
key_authorization =
:sha256
|> :crypto.hash(key_authorization)
|> Base.url_encode64(padding: false)
{:ok,
%{
record_name: "_acme-challenge",
record_type: "TXT",
key_authorization: key_authorization
}}
end
def get_response(%__MODULE__{token: token} = challenge, jwk) do
{:ok, key_authorization} = get_key_authorization(challenge, jwk)
{:ok,
%{
key_authorization: key_authorization,
filename: ".well-known/acme-challenge/#{token}",
content_type: "text/plain"
}}
end
@doc """
Gets the key authorization from a challenge and JWK.
"""
@spec get_key_authorization(t(), tuple()) :: {:ok, String.t()}
def get_key_authorization(%__MODULE__{token: token}, jwk),
do: {:ok, "#{token}.#{JWK.thumbprint(jwk)}"}
end
| 26.782609 | 98 | 0.624459 |
93ffb8187c0dffc61b5456bb8e41e7160fa509f5 | 11,533 | ex | Elixir | clients/android_management/lib/google_api/android_management/v1/model/device.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/android_management/lib/google_api/android_management/v1/model/device.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/android_management/lib/google_api/android_management/v1/model/device.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AndroidManagement.V1.Model.Device do
@moduledoc """
A device owned by an enterprise. Unless otherwise noted, all fields are read-only and can't be modified by enterprises.devices.patch.
## Attributes
* `apiLevel` (*type:* `integer()`, *default:* `nil`) - The API level of the Android platform version running on the device.
* `applicationReports` (*type:* `list(GoogleApi.AndroidManagement.V1.Model.ApplicationReport.t)`, *default:* `nil`) - Reports for apps installed on the device. This information is only available when application_reports_enabled is true in the device's policy.
* `appliedPolicyName` (*type:* `String.t`, *default:* `nil`) - The name of the policy currently applied to the device.
* `appliedPolicyVersion` (*type:* `String.t`, *default:* `nil`) - The version of the policy currently applied to the device.
* `appliedState` (*type:* `String.t`, *default:* `nil`) - The state currently applied to the device.
* `deviceSettings` (*type:* `GoogleApi.AndroidManagement.V1.Model.DeviceSettings.t`, *default:* `nil`) - Device settings information. This information is only available if deviceSettingsEnabled is true in the device's policy.
* `disabledReason` (*type:* `GoogleApi.AndroidManagement.V1.Model.UserFacingMessage.t`, *default:* `nil`) - If the device state is DISABLED, an optional message that is displayed on the device indicating the reason the device is disabled. This field can be modified by a patch request.
* `displays` (*type:* `list(GoogleApi.AndroidManagement.V1.Model.Display.t)`, *default:* `nil`) - Detailed information about displays on the device. This information is only available if displayInfoEnabled is true in the device's policy.
* `enrollmentTime` (*type:* `DateTime.t`, *default:* `nil`) - The time of device enrollment.
* `enrollmentTokenData` (*type:* `String.t`, *default:* `nil`) - If the device was enrolled with an enrollment token with additional data provided, this field contains that data.
* `enrollmentTokenName` (*type:* `String.t`, *default:* `nil`) - If the device was enrolled with an enrollment token, this field contains the name of the token.
* `hardwareInfo` (*type:* `GoogleApi.AndroidManagement.V1.Model.HardwareInfo.t`, *default:* `nil`) - Detailed information about the device hardware.
* `hardwareStatusSamples` (*type:* `list(GoogleApi.AndroidManagement.V1.Model.HardwareStatus.t)`, *default:* `nil`) - Hardware status samples in chronological order. This information is only available if hardwareStatusEnabled is true in the device's policy.
* `lastPolicyComplianceReportTime` (*type:* `DateTime.t`, *default:* `nil`) - Deprecated.
* `lastPolicySyncTime` (*type:* `DateTime.t`, *default:* `nil`) - The last time the device fetched its policy.
* `lastStatusReportTime` (*type:* `DateTime.t`, *default:* `nil`) - The last time the device sent a status report.
* `managementMode` (*type:* `String.t`, *default:* `nil`) - The type of management mode Android Device Policy takes on the device. This influences which policy settings are supported.
* `memoryEvents` (*type:* `list(GoogleApi.AndroidManagement.V1.Model.MemoryEvent.t)`, *default:* `nil`) - Events related to memory and storage measurements in chronological order. This information is only available if memoryInfoEnabled is true in the device's policy.
* `memoryInfo` (*type:* `GoogleApi.AndroidManagement.V1.Model.MemoryInfo.t`, *default:* `nil`) - Memory information. This information is only available if memoryInfoEnabled is true in the device's policy.
* `name` (*type:* `String.t`, *default:* `nil`) - The name of the device in the form enterprises/{enterpriseId}/devices/{deviceId}.
* `networkInfo` (*type:* `GoogleApi.AndroidManagement.V1.Model.NetworkInfo.t`, *default:* `nil`) - Device network information. This information is only available if networkInfoEnabled is true in the device's policy.
* `nonComplianceDetails` (*type:* `list(GoogleApi.AndroidManagement.V1.Model.NonComplianceDetail.t)`, *default:* `nil`) - Details about policy settings that the device is not compliant with.
* `policyCompliant` (*type:* `boolean()`, *default:* `nil`) - Whether the device is compliant with its policy.
* `policyName` (*type:* `String.t`, *default:* `nil`) - The name of the policy applied to the device, in the form enterprises/{enterpriseId}/policies/{policyId}. If not specified, the policy_name for the device's user is applied. This field can be modified by a patch request. You can specify only the policyId when calling enterprises.devices.patch, as long as the policyId doesn’t contain any slashes. The rest of the policy name is inferred.
* `powerManagementEvents` (*type:* `list(GoogleApi.AndroidManagement.V1.Model.PowerManagementEvent.t)`, *default:* `nil`) - Power management events on the device in chronological order. This information is only available if powerManagementEventsEnabled is true in the device's policy.
* `previousDeviceNames` (*type:* `list(String.t)`, *default:* `nil`) - If the same physical device has been enrolled multiple times, this field contains its previous device names. The serial number is used as the unique identifier to determine if the same physical device has enrolled previously. The names are in chronological order.
* `securityPosture` (*type:* `GoogleApi.AndroidManagement.V1.Model.SecurityPosture.t`, *default:* `nil`) - Device's security posture value that reflects how secure the device is.
* `softwareInfo` (*type:* `GoogleApi.AndroidManagement.V1.Model.SoftwareInfo.t`, *default:* `nil`) - Detailed information about the device software. This information is only available if softwareInfoEnabled is true in the device's policy.
* `state` (*type:* `String.t`, *default:* `nil`) - The state to be applied to the device. This field can be modified by a patch request. Note that when calling enterprises.devices.patch, ACTIVE and DISABLED are the only allowable values. To enter the device into a DELETED state, call enterprises.devices.delete.
* `systemProperties` (*type:* `map()`, *default:* `nil`) - Map of selected system properties name and value related to the device. This information is only available if systemPropertiesEnabled is true in the device's policy.
* `user` (*type:* `GoogleApi.AndroidManagement.V1.Model.User.t`, *default:* `nil`) - The user who owns the device.
* `userName` (*type:* `String.t`, *default:* `nil`) - The resource name of the user that owns this device in the form enterprises/{enterpriseId}/users/{userId}.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:apiLevel => integer(),
:applicationReports => list(GoogleApi.AndroidManagement.V1.Model.ApplicationReport.t()),
:appliedPolicyName => String.t(),
:appliedPolicyVersion => String.t(),
:appliedState => String.t(),
:deviceSettings => GoogleApi.AndroidManagement.V1.Model.DeviceSettings.t(),
:disabledReason => GoogleApi.AndroidManagement.V1.Model.UserFacingMessage.t(),
:displays => list(GoogleApi.AndroidManagement.V1.Model.Display.t()),
:enrollmentTime => DateTime.t(),
:enrollmentTokenData => String.t(),
:enrollmentTokenName => String.t(),
:hardwareInfo => GoogleApi.AndroidManagement.V1.Model.HardwareInfo.t(),
:hardwareStatusSamples => list(GoogleApi.AndroidManagement.V1.Model.HardwareStatus.t()),
:lastPolicyComplianceReportTime => DateTime.t(),
:lastPolicySyncTime => DateTime.t(),
:lastStatusReportTime => DateTime.t(),
:managementMode => String.t(),
:memoryEvents => list(GoogleApi.AndroidManagement.V1.Model.MemoryEvent.t()),
:memoryInfo => GoogleApi.AndroidManagement.V1.Model.MemoryInfo.t(),
:name => String.t(),
:networkInfo => GoogleApi.AndroidManagement.V1.Model.NetworkInfo.t(),
:nonComplianceDetails =>
list(GoogleApi.AndroidManagement.V1.Model.NonComplianceDetail.t()),
:policyCompliant => boolean(),
:policyName => String.t(),
:powerManagementEvents =>
list(GoogleApi.AndroidManagement.V1.Model.PowerManagementEvent.t()),
:previousDeviceNames => list(String.t()),
:securityPosture => GoogleApi.AndroidManagement.V1.Model.SecurityPosture.t(),
:softwareInfo => GoogleApi.AndroidManagement.V1.Model.SoftwareInfo.t(),
:state => String.t(),
:systemProperties => map(),
:user => GoogleApi.AndroidManagement.V1.Model.User.t(),
:userName => String.t()
}
field(:apiLevel)
field(:applicationReports,
as: GoogleApi.AndroidManagement.V1.Model.ApplicationReport,
type: :list
)
field(:appliedPolicyName)
field(:appliedPolicyVersion)
field(:appliedState)
field(:deviceSettings, as: GoogleApi.AndroidManagement.V1.Model.DeviceSettings)
field(:disabledReason, as: GoogleApi.AndroidManagement.V1.Model.UserFacingMessage)
field(:displays, as: GoogleApi.AndroidManagement.V1.Model.Display, type: :list)
field(:enrollmentTime, as: DateTime)
field(:enrollmentTokenData)
field(:enrollmentTokenName)
field(:hardwareInfo, as: GoogleApi.AndroidManagement.V1.Model.HardwareInfo)
field(:hardwareStatusSamples,
as: GoogleApi.AndroidManagement.V1.Model.HardwareStatus,
type: :list
)
field(:lastPolicyComplianceReportTime, as: DateTime)
field(:lastPolicySyncTime, as: DateTime)
field(:lastStatusReportTime, as: DateTime)
field(:managementMode)
field(:memoryEvents, as: GoogleApi.AndroidManagement.V1.Model.MemoryEvent, type: :list)
field(:memoryInfo, as: GoogleApi.AndroidManagement.V1.Model.MemoryInfo)
field(:name)
field(:networkInfo, as: GoogleApi.AndroidManagement.V1.Model.NetworkInfo)
field(:nonComplianceDetails,
as: GoogleApi.AndroidManagement.V1.Model.NonComplianceDetail,
type: :list
)
field(:policyCompliant)
field(:policyName)
field(:powerManagementEvents,
as: GoogleApi.AndroidManagement.V1.Model.PowerManagementEvent,
type: :list
)
field(:previousDeviceNames, type: :list)
field(:securityPosture, as: GoogleApi.AndroidManagement.V1.Model.SecurityPosture)
field(:softwareInfo, as: GoogleApi.AndroidManagement.V1.Model.SoftwareInfo)
field(:state)
field(:systemProperties, type: :map)
field(:user, as: GoogleApi.AndroidManagement.V1.Model.User)
field(:userName)
end
defimpl Poison.Decoder, for: GoogleApi.AndroidManagement.V1.Model.Device do
def decode(value, options) do
GoogleApi.AndroidManagement.V1.Model.Device.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AndroidManagement.V1.Model.Device do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 71.191358 | 448 | 0.729299 |
93ffe84b7f7d14270614bf7b2c824454aa9cc3a6 | 3,311 | ex | Elixir | web/controllers/database_controller.ex | sahilpaudel/AfterGlow | 0859ec14b47c8c5704cc8e5cba86d39aa258fff5 | [
"MIT"
] | null | null | null | web/controllers/database_controller.ex | sahilpaudel/AfterGlow | 0859ec14b47c8c5704cc8e5cba86d39aa258fff5 | [
"MIT"
] | null | null | null | web/controllers/database_controller.ex | sahilpaudel/AfterGlow | 0859ec14b47c8c5704cc8e5cba86d39aa258fff5 | [
"MIT"
] | null | null | null | defmodule AfterGlow.DatabaseController do
use AfterGlow.Web, :controller
import Ecto.Query
alias AfterGlow.Database
alias JaSerializer.Params
alias AfterGlow.Async
alias AfterGlow.SchemaTasks
alias AfterGlow.DatabaseWithConfigView
alias AfterGlow.CacheWrapper
alias AfterGlow.CacheWrapper.Repo
import AfterGlow.Policy.Helpers
alias AfterGlow.Plugs.Authorization
plug(Authorization)
plug(:authorize!, Database)
plug(:scrub_params, "data" when action in [:create, :update])
plug(:verify_authorized)
def index(conn, %{"id" => id, "include_config" => "true"}) do
if has_permission(conn.assigns.current_user, ["Settings.all"]) do
database =
scope(conn, from(d in Database, where: d.id == ^id), policy: AfterGlow.Database.Policy)
|> Repo.one()
json(
conn,
DatabaseWithConfigView
|> JaSerializer.format(database, conn, type: 'database')
)
else
conn
|> put_status(:unauthorized)
|> json(%{error: "not_permitted"})
end
end
def index(conn, _params) do
databases =
scope(conn, from(d in Database, select: [:id]), policy: AfterGlow.Database.Policy)
|> Repo.all()
|> Enum.map(fn x -> x.id end)
|> CacheWrapper.get_by_ids(Database)
|> Repo.preload(:tables)
render(conn, :index, data: databases)
end
def create(conn, %{"data" => data = %{"type" => "databases", "attributes" => _database_params}}) do
changeset = Database.changeset(%Database{}, Params.to_attributes(data))
case Database.insert(changeset) do
{:ok, database} ->
database = database |> Repo.preload(:tables)
conn
|> put_status(:created)
|> put_resp_header("location", database_path(conn, :show, database))
|> render(:show, data: database)
{:error, changeset} ->
conn
|> put_status(:unprocessable_entity)
|> render(:errors, data: changeset)
end
end
def show(conn, %{"id" => id}) do
database =
scope(conn, from(d in Database, where: d.id == ^id), policy: AfterGlow.Database.Policy)
|> Repo.one()
|> Repo.preload(:tables)
render(conn, :show, data: database)
end
def sync(conn, %{"id" => id}) do
database =
scope(conn, from(d in Database, where: d.id == ^id), policy: AfterGlow.Database.Policy)
|> Repo.one()
|> Repo.preload(:tables)
Async.perform(&SchemaTasks.sync/1, [database])
render(conn, :show, data: database)
end
def update(conn, %{
"id" => id,
"data" => data = %{"type" => "databases", "attributes" => _database_params}
}) do
database = Repo.get!(Database, id)
changeset = Database.changeset(database, Params.to_attributes(data))
case Database.update(changeset) do
{:ok, database} ->
render(conn, :show, data: database |> Repo.preload(:tables))
{:error, changeset} ->
conn
|> put_status(:unprocessable_entity)
|> render(:errors, data: changeset)
end
end
def delete(conn, %{"id" => id}) do
database = Repo.get!(Database, id)
# Here we use delete! (with a bang) because we expect
# it to always work (and if it does not, it will raise).
Repo.delete_with_cache(database)
send_resp(conn, :no_content, "")
end
end
| 29.04386 | 101 | 0.628813 |
9e00020b6477c3c45a59c18252d560a4e5357a53 | 3,178 | ex | Elixir | robotica_ui/lib/scenic_clock/digital.ex | brianmay/robotica-elixir | 8656510e54b7e32a547e3a54bf946f0e327911c9 | [
"RSA-MD"
] | 1 | 2019-04-23T09:16:44.000Z | 2019-04-23T09:16:44.000Z | robotica_ui/lib/scenic_clock/digital.ex | brianmay/robotica-elixir | 8656510e54b7e32a547e3a54bf946f0e327911c9 | [
"RSA-MD"
] | 107 | 2019-05-26T08:03:26.000Z | 2022-02-03T19:13:56.000Z | robotica_ui/lib/scenic_clock/digital.ex | brianmay/robotica-elixir | 8656510e54b7e32a547e3a54bf946f0e327911c9 | [
"RSA-MD"
] | 1 | 2019-08-10T20:44:24.000Z | 2019-08-10T20:44:24.000Z | #
# Created by Boyd Multerer on August 8, 2018.
# Copyright © 2018 Kry10 Industries. All rights reserved.
#
defmodule Scenic.Clock.Digital do
@moduledoc """
A component that runs an digital clock.
See the [Components](Scenic.Clock.Components.html#digital_clock/2) module for useage
"""
use Scenic.Component, has_children: false
alias Scenic.Graph
alias Scenic.Primitive.Style.Theme
import Scenic.Primitives, only: [{:text, 2}, {:text, 3}]
@default_theme :dark
# --------------------------------------------------------
@doc false
def verify(nil), do: {:ok, nil}
def verify(_), do: :invalid_data
# --------------------------------------------------------
@doc false
def init(_, opts) do
styles = opts[:styles]
# theme is passed in as an inherited style
theme =
(styles[:theme] || Theme.preset(@default_theme))
|> Theme.normalize()
timezone = styles[:timezone]
# set up the requested graph
graph =
Graph.build(styles: styles)
|> text("", id: :date, fill: theme.text, font_size: 32)
|> text("", id: :time, fill: theme.text, translate: {0, 64}, font_size: 32)
{state, graph} =
%{
graph: graph,
timezone: timezone,
timer: nil,
last: nil,
seconds: !!styles[:seconds]
}
# start up the graph
|> update_time()
# send a message to self to start the clock a fraction of a second
# into the future to hopefully line it up closer to when the seconds
# actually are. Note that I want it to arrive just slightly after
# the one second mark, which is way better than just slighty before.
# avoid trunc errors and such that way even if it means the second
# timer is one millisecond behind the actual time.
{microseconds, _} = Time.utc_now().microsecond
Process.send_after(self(), :start_clock, 1001 - trunc(microseconds / 1000))
{:ok, state, push: graph}
end
# --------------------------------------------------------
@doc false
# should be shortly after the actual one-second mark
def handle_info(:start_clock, state) do
# start the timer on a one-second interval
{:ok, timer} = :timer.send_interval(1000, :tick_tock)
# update the clock
{state, graph} = update_time(state)
{:noreply, %{state | timer: timer}, push: graph}
end
# --------------------------------------------------------
def handle_info(:tick_tock, state) do
{state, graph} = update_time(state)
{:noreply, state, push: graph}
end
# --------------------------------------------------------
defp update_time(
%{
timezone: timezone,
graph: graph,
last: last
} = state
) do
{:ok, time} = DateTime.now(timezone)
case time != last do
true ->
{:ok, date_str} = time |> Timex.format("%F %A", :strftime)
graph = Graph.modify(graph, :date, &text(&1, date_str))
{:ok, time_str} = time |> Timex.format("%k:%M:%S %z", :strftime)
graph = Graph.modify(graph, :time, &text(&1, time_str))
{%{state | last: time}, graph}
_ ->
{state, nil}
end
end
end
| 28.630631 | 86 | 0.559786 |
9e0006cda37f2bd1511cfc3d3cd7cae296f0752c | 2,226 | ex | Elixir | web/models/stripe_event.ex | superdev999/Phoenix-project | ab13ac9366cdd0aa9581da7faf993b11aaa5344c | [
"MIT"
] | null | null | null | web/models/stripe_event.ex | superdev999/Phoenix-project | ab13ac9366cdd0aa9581da7faf993b11aaa5344c | [
"MIT"
] | null | null | null | web/models/stripe_event.ex | superdev999/Phoenix-project | ab13ac9366cdd0aa9581da7faf993b11aaa5344c | [
"MIT"
] | null | null | null | defmodule CodeCorps.StripeEvent do
@moduledoc """
Represents a reference to single Stripe API Event object
## Fields
* `endpoint` - "connect" or "platform"
* `id_from_stripe` - Stripe's `id`
* `status` - "unprocessed", "processed", or "errored"
## Note on `status`
When the event is received via a webhook, it is stored as "unprocessed".
If during processing, there is an issue, it is set to "errored". Once
successfuly processed, it is set to "processed".
There are cases where Stripe can send multiple webhooks for the same event,
so when such a request is received, an event that is "errored" or "unprocessed"
can be processed again, while a "processed" event is ignored.
"""
use CodeCorps.Web, :model
@type t :: %__MODULE__{}
schema "stripe_events" do
field :endpoint, :string, null: false
field :id_from_stripe, :string, null: false
field :ignored_reason, :string
field :object_id, :string
field :object_type, :string
field :status, :string, default: "unprocessed"
field :type, :string, null: false
field :user_id, :string
timestamps()
end
@doc """
Builds a changeset for storing a new event reference into the database.
The `status` field is set to "unprocessed" by default.
"""
def create_changeset(struct, params \\ %{}) do
struct
|> cast(params, [:endpoint, :id_from_stripe, :object_id, :object_type, :type, :user_id])
|> validate_required([:endpoint, :id_from_stripe, :object_id, :object_type, :type])
|> put_change(:status, "processing")
|> validate_inclusion(:status, states())
|> validate_inclusion(:endpoint, endpoints())
|> unique_constraint(:id_from_stripe)
end
@doc """
Builds a changeset for updating the status of an existing event reference.
Accepts `:status` only and ensures it's one of "unprocessed", "processed"
or "errored".
"""
def update_changeset(struct, params) do
struct
|> cast(params, [:ignored_reason, :status])
|> validate_required([:status])
|> validate_inclusion(:status, states())
end
defp endpoints do
~w{ connect platform }
end
defp states do
~w{ errored ignored processed processing unhandled unprocessed }
end
end
| 30.493151 | 92 | 0.689128 |
9e0017fc951cce59bc61c5c90f02b10439afb0cc | 678 | ex | Elixir | lib/utils/triangle.ex | sorentwo/euler | 76244a0ef3dcfa17d6b9571daa5d0b46f09057f4 | [
"MIT"
] | 8 | 2015-11-04T05:03:05.000Z | 2022-01-25T19:34:46.000Z | lib/utils/triangle.ex | sorentwo/euler | 76244a0ef3dcfa17d6b9571daa5d0b46f09057f4 | [
"MIT"
] | null | null | null | lib/utils/triangle.ex | sorentwo/euler | 76244a0ef3dcfa17d6b9571daa5d0b46f09057f4 | [
"MIT"
] | null | null | null | defmodule Triangle do
def to_matrix(binary) do
String.split(binary, "\n", trim: true) |> Enum.map(&row_to_integers/1)
end
def row_to_integers(row) do
String.strip(row) |> String.split(" ") |> Enum.map(&String.to_integer/1)
end
def collapse([value]), do: value
def collapse([head | [next | tail]]) do
collapse([fold_row(head, next) | tail])
end
def fold_row(head, next) do
Enum.map(0..(length(next) - 1), fn(n) -> fold_triangle(n, next, head) end)
end
def fold_triangle(index, head, next) do
a = Enum.at(head, index)
b = Enum.at(next, index)
c = Enum.at(next, index + 1)
if a + b > a + c, do: a + b, else: a + c
end
end
| 25.111111 | 78 | 0.616519 |
9e00469a0b195f267a2f7ff236ce1c585020bfdc | 3,503 | ex | Elixir | apps/kv_map_reduce/lib/aggregators/job.ex | WhiteRookPL/production-debugging-workshop.ex | 26e81d14ba39c33764ddaee5d6d65a6061f4e823 | [
"MIT"
] | 5 | 2017-05-03T08:05:54.000Z | 2022-03-11T04:11:00.000Z | apps/kv_map_reduce/lib/aggregators/job.ex | WhiteRookPL/production-debugging-workshop.ex | 26e81d14ba39c33764ddaee5d6d65a6061f4e823 | [
"MIT"
] | null | null | null | apps/kv_map_reduce/lib/aggregators/job.ex | WhiteRookPL/production-debugging-workshop.ex | 26e81d14ba39c33764ddaee5d6d65a6061f4e823 | [
"MIT"
] | null | null | null | defmodule KV.MapReduce.Job do
require Logger
defmodule State do
@moduledoc """
Module which represents state of the aggregation job.
"""
defstruct id: 0, type: nil, bucket: nil, result: [], start_time: 0
end
@moduledoc """
Module which represents aggregation jobs built on top of lightweight `OTP` processes (`proc_lib`).
"""
# Client API
@doc """
Starting point for a job which need to be performed.
It casts result to its parent.
"""
def start(id, type, bucket) do
state = %State{
id: id,
type: type,
bucket: bucket,
start_time: System.system_time()
}
:proc_lib.start(__MODULE__, :init, [ self(), state ])
end
# Required functions for `:proc_lib`.
def system_continue(parent, opts, state) do
loop(parent, opts, state)
end
def system_terminate(reason, _parent, _opts, _state) do
exit(reason)
end
def system_get_state(state) do
{:ok, state}
end
defp write_debug(device, event, name) do
:io.format(device, "~p event = ~p~n", [ name, event ])
end
def system_replace_state(modify_state_fun, state) do
updated_state = modify_state_fun.(state)
{:ok, updated_state, updated_state}
end
def system_code_change(state, _module, _old_version, _extra) do
{:ok, state}
end
def init(parent, state) do
opts = :sys.debug_options([])
:proc_lib.init_ack(parent, {:ok, self()})
send(self(), :get_keys)
loop(parent, opts, state)
end
# Private functions.
defp loop(parent, opts, %State{id: id, type: type, bucket: bucket, result: result, start_time: start_time} = state) do
receive do
:get_keys ->
new_opts = :sys.handle_debug(opts, &write_debug/3, __MODULE__, {:in, :get_keys})
send(self(), :aggregation)
keys = KV.Bucket.keys(bucket)
loop(parent, new_opts, %{state | result: keys})
:aggregation ->
new_opts = :sys.handle_debug(opts, &write_debug/3, __MODULE__, {:in, :aggregate})
send(self(), :final_aggregation_step)
aggregate = aggregation(bucket, result)
loop(parent, new_opts, %{state | result: aggregate})
:final_aggregation_step ->
new_opts = :sys.handle_debug(opts, &write_debug/3, __MODULE__, {:in, :final_aggregation_step})
send(self(), :return_result)
final_aggregate = final_aggregation_step(type, result)
loop(parent, new_opts, %{state | result: final_aggregate})
:return_result ->
:sys.handle_debug(opts, &write_debug/3, __MODULE__, {:in, :return_result})
GenServer.cast(parent, {:finished, id, result})
end_time = System.system_time()
Logger.info("Job #{id} took #{System.convert_time_unit(end_time - start_time, :native, :milliseconds)} ms")
{:system, from, request} ->
:sys.handle_system_msg(request, from, parent, __MODULE__, opts, state)
loop(parent, opts, state)
end
end
defp aggregation(bucket, keys) do
sum = Enum.reduce(keys, 0, fn(key, accumulator) ->
accumulator + convert(KV.Bucket.get(bucket, key))
end)
{length(keys), sum}
end
defp convert(value) when is_number(value), do: value
defp convert(value) when is_binary(value), do: parser(Float.parse(value))
defp convert(_), do: 0
defp parser(:error), do: 0
defp parser({value, _rest}), do: value
defp final_aggregation_step(:avg, {size, sum}) do
sum / size
end
defp final_aggregation_step(:sum, {_size, sum}) do
sum
end
end
| 26.141791 | 120 | 0.6503 |
9e006ff82e241cbd8caf04f8465c67d8363f656c | 1,556 | ex | Elixir | lib/xdr/transactions/operations/inflation_result.ex | einerzg/stellar_base | 2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f | [
"MIT"
] | 3 | 2021-08-17T20:32:45.000Z | 2022-03-13T20:26:02.000Z | lib/xdr/transactions/operations/inflation_result.ex | einerzg/stellar_base | 2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f | [
"MIT"
] | 45 | 2021-08-12T20:19:41.000Z | 2022-03-27T21:00:10.000Z | lib/xdr/transactions/operations/inflation_result.ex | einerzg/stellar_base | 2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f | [
"MIT"
] | 2 | 2021-09-22T23:11:13.000Z | 2022-01-23T03:19:11.000Z | defmodule StellarBase.XDR.Operations.InflationResult do
@moduledoc """
Representation of Stellar `InflationResult` type.
"""
alias StellarBase.XDR.{InflationPayoutList, Void}
alias StellarBase.XDR.Operations.InflationResultCode
@behaviour XDR.Declaration
@arms [INFLATION_SUCCESS: InflationPayoutList, default: Void]
@type result :: InflationPayoutList.t() | any()
@type t :: %__MODULE__{result: result(), code: InflationResultCode.t()}
defstruct [:result, :code]
@spec new(result :: result(), code :: InflationResultCode.t()) :: t()
def new(result, %InflationResultCode{} = code),
do: %__MODULE__{result: result, code: code}
@impl true
def encode_xdr(%__MODULE__{result: result, code: code}) do
code
|> XDR.Union.new(@arms, result)
|> XDR.Union.encode_xdr()
end
@impl true
def encode_xdr!(%__MODULE__{result: result, code: code}) do
code
|> XDR.Union.new(@arms, result)
|> XDR.Union.encode_xdr!()
end
@impl true
def decode_xdr(bytes, spec \\ union_spec())
def decode_xdr(bytes, spec) do
case XDR.Union.decode_xdr(bytes, spec) do
{:ok, {{code, result}, rest}} -> {:ok, {new(result, code), rest}}
error -> error
end
end
@impl true
def decode_xdr!(bytes, spec \\ union_spec())
def decode_xdr!(bytes, spec) do
{{code, result}, rest} = XDR.Union.decode_xdr!(bytes, spec)
{new(result, code), rest}
end
@spec union_spec() :: XDR.Union.t()
defp union_spec do
nil
|> InflationResultCode.new()
|> XDR.Union.new(@arms)
end
end
| 25.508197 | 73 | 0.665167 |
9e007e48e3d0b58abbe27b728e585fa03aa960f8 | 1,374 | exs | Elixir | mix.exs | iilyak/httpotion | 8fe4da27f04121a3b0c5fc569f6625985e42f1c0 | [
"Unlicense"
] | null | null | null | mix.exs | iilyak/httpotion | 8fe4da27f04121a3b0c5fc569f6625985e42f1c0 | [
"Unlicense"
] | null | null | null | mix.exs | iilyak/httpotion | 8fe4da27f04121a3b0c5fc569f6625985e42f1c0 | [
"Unlicense"
] | null | null | null | defmodule HTTPotion.Mixfile do
use Mix.Project
def project do
if Mix.env == :dial, do: Application.ensure_all_started(:ex_unit)
[ app: :httpotion,
name: "httpotion",
source_url: "https://github.com/myfreeweb/httpotion",
version: "3.1.2",
elixir: "~> 1.3",
docs: [ extras: ["README.md", "CODE_OF_CONDUCT.md"] ],
description: description(),
deps: deps(),
package: package(),
elixirc_paths: elixirc_paths(Mix.env),
test_pattern: "*_test.ex",
warn_test_pattern: "*_test.exs",
preferred_cli_env: [ dialyzer: :dial ] ]
end
def application do
[ applications: [:ssl, :ibrowse] ]
end
defp description do
"""
Fancy HTTP client for Elixir, based on ibrowse.
"""
end
defp deps do
[ {:ibrowse, "== 4.4.0"},
{:ex_doc, "~> 0.18", only: [:dev, :test, :docs]} ]
end
defp package do
[ files: [ "lib", "mix.exs", "README.md", "CODE_OF_CONDUCT.md", "UNLICENSE" ],
maintainers: [ "Greg V", "Aleksei Magusev" ],
licenses: [ "Unlicense" ],
links: %{ "GitHub" => "https://github.com/myfreeweb/httpotion" } ]
end
# http://learningelixir.joekain.com/dialyzer-and-integration-tests/
# modified to only compile for dialyzer, not for running tests
defp elixirc_paths(:dial), do: ["lib", "test"]
defp elixirc_paths(_), do: ["lib"]
end
| 28.625 | 82 | 0.607715 |
9e013a4aca7060b4ab7962be86b59f4143be6a07 | 2,715 | exs | Elixir | test/surface/integrations/slot_change_tracking_test.exs | inspired-consulting/surface | 3a6a3a454704a9aaf83cd23a6393cbd09bdca8bd | [
"MIT"
] | 1,161 | 2019-12-12T02:30:52.000Z | 2021-03-11T17:55:44.000Z | test/surface/integrations/slot_change_tracking_test.exs | inspired-consulting/surface | 3a6a3a454704a9aaf83cd23a6393cbd09bdca8bd | [
"MIT"
] | 244 | 2019-12-12T14:05:07.000Z | 2021-03-11T07:04:17.000Z | test/surface/integrations/slot_change_tracking_test.exs | inspired-consulting/surface | 3a6a3a454704a9aaf83cd23a6393cbd09bdca8bd | [
"MIT"
] | 73 | 2019-12-12T13:57:41.000Z | 2021-03-11T21:46:10.000Z | defmodule Surface.SlotChangeTrackingTest do
use ExUnit.Case, async: true
import Phoenix.ConnTest
import Phoenix.LiveViewTest
@endpoint Endpoint
defmodule Outer do
use Surface.LiveComponent
slot default, args: [:param]
def render(assigns) do
~F"""
<div><#slot :args={param: "Param from Outer"}/></div>
"""
end
end
defmodule ViewComponentWithInnerContent do
use Surface.LiveView
alias Surface.CheckUpdated
data count, :integer, default: 0
data test_pid, :integer
def mount(_params, %{"test_pid" => test_pid}, socket) do
{:ok, assign(socket, test_pid: test_pid)}
end
def render(assigns) do
~F"""
<Outer id="outer" :let={param: param}>
Count: {@count}
<CheckUpdated id="1" dest={@test_pid} content={param} />
<CheckUpdated id="2" dest={@test_pid} />
</Outer>
"""
end
def handle_event("update_count", _, socket) do
{:noreply, update(socket, :count, &(&1 + 1))}
end
end
defmodule Counter do
use Surface.LiveComponent
slot default, args: [:value]
data value, :integer, default: 0
def render(assigns) do
~F"""
<div>
Value in the Counter: {@value}
<#slot :args={value: @value}/>
<button id="incButton" :on-click="inc">+</button>
</div>
"""
end
def handle_event("inc", _, socket) do
{:noreply, update(socket, :value, &(&1 + 1))}
end
end
defmodule ViewWithCounter do
use Surface.LiveView
def render(assigns) do
~F"""
<Counter id="counter" :let={value: value}>
Value in the View: {value}
</Counter>
"""
end
end
test "changing a slot arg updates any view/component using it" do
{:ok, view, html} = live_isolated(build_conn(), ViewWithCounter)
assert html =~ "Value in the Counter: 0"
assert html =~ "Value in the View: 0"
html =
view
|> element("#incButton", "+")
|> render_click()
assert html =~ "Value in the Counter: 1"
assert html =~ "Value in the View: 1"
end
test "change tracking is disabled if a child component uses a passed slot arg" do
{:ok, view, html} =
live_isolated(build_conn(), ViewComponentWithInnerContent, session: %{"test_pid" => self()})
assert html =~ "Count: 0"
assert_receive {:updated, "1"}
assert_receive {:updated, "2"}
refute_receive {:updated, _}
html = render_click(view, :update_count)
assert html =~ "Count: 1"
# Component using slot args should be updated
assert_receive {:updated, "1"}
# Component not using the slot arg should not be updated
refute_receive {:updated, "2"}
end
end
| 23.405172 | 98 | 0.609945 |
9e013b7dd0b3b0a283117b601cd177b3c78591e0 | 1,247 | ex | Elixir | clients/iam/lib/google_api/iam/v1/model/create_service_account_request.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/iam/lib/google_api/iam/v1/model/create_service_account_request.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/iam/lib/google_api/iam/v1/model/create_service_account_request.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.IAM.V1.Model.CreateServiceAccountRequest do
@moduledoc """
The service account create request.
"""
@derive [Poison.Encoder]
defstruct [
:"accountId",
:"serviceAccount"
]
end
defimpl Poison.Decoder, for: GoogleApi.IAM.V1.Model.CreateServiceAccountRequest do
import GoogleApi.IAM.V1.Deserializer
def decode(value, options) do
value
|> deserialize(:"serviceAccount", :struct, GoogleApi.IAM.V1.Model.ServiceAccount, options)
end
end
| 31.175 | 94 | 0.752205 |
9e0170399ba915f3c9bded11b3a9f112972ed74b | 1,915 | exs | Elixir | clients/android_management/mix.exs | jamesvl/elixir-google-api | 6c87fb31d996f08fb42ce6066317e9d652a87acc | [
"Apache-2.0"
] | null | null | null | clients/android_management/mix.exs | jamesvl/elixir-google-api | 6c87fb31d996f08fb42ce6066317e9d652a87acc | [
"Apache-2.0"
] | null | null | null | clients/android_management/mix.exs | jamesvl/elixir-google-api | 6c87fb31d996f08fb42ce6066317e9d652a87acc | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AndroidManagement.Mixfile do
use Mix.Project
@version "0.33.0"
def project() do
[
app: :google_api_android_management,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/android_management"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.4"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Android Management API client library. The Android Management API provides remote enterprise management of Android devices and apps.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/android_management",
"Homepage" => "https://developers.google.com/android/management"
}
]
end
end
| 28.58209 | 136 | 0.667885 |
9e0175e76d25434dc5698cabe8f664d14cd10879 | 1,160 | exs | Elixir | apps/api/config/config.exs | norbu09/e_no_time | 16a0db136dd91cdcf38d4aab5f11b0684dae289d | [
"BSD-2-Clause"
] | null | null | null | apps/api/config/config.exs | norbu09/e_no_time | 16a0db136dd91cdcf38d4aab5f11b0684dae289d | [
"BSD-2-Clause"
] | null | null | null | apps/api/config/config.exs | norbu09/e_no_time | 16a0db136dd91cdcf38d4aab5f11b0684dae289d | [
"BSD-2-Clause"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
config :maru, Api.API,
http: [port: 4001]
# You can configure your application as:
#
# config :api, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:api, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 34.117647 | 73 | 0.744828 |
9e0187960f85db2947fafcd4940528b43ae00ea3 | 840 | exs | Elixir | config/config.exs | underhilllabs/phx-bookmarks | 6c761ef0f602dbde267c337906a17f098f6bbe50 | [
"MIT"
] | null | null | null | config/config.exs | underhilllabs/phx-bookmarks | 6c761ef0f602dbde267c337906a17f098f6bbe50 | [
"MIT"
] | 2 | 2015-07-03T18:50:46.000Z | 2015-07-03T18:52:14.000Z | config/config.exs | underhilllabs/phx-bookmarks | 6c761ef0f602dbde267c337906a17f098f6bbe50 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# Configures the endpoint
config :phx_bkmark, PhxBkmark.Endpoint,
url: [host: "localhost"],
root: Path.dirname(__DIR__),
secret_key_base: "3LYgkLB678z9XSs/9e1duzeZsIu7iLJbwtO+KpKB39dC9ZBlazCp4RL9ClgUP9cI",
debug_errors: false,
pubsub: [name: PhxBkmark.PubSub,
adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env}.exs"
| 33.6 | 86 | 0.761905 |
9e01a06198b3d79ea91acd21058546ed3a8059f2 | 1,193 | exs | Elixir | test/inflator_web/controllers/inflation_controller_test.exs | astoica1986/inflator | ca0e99a8e114956edae0b0456eb8b8d97c67a342 | [
"Apache-2.0"
] | null | null | null | test/inflator_web/controllers/inflation_controller_test.exs | astoica1986/inflator | ca0e99a8e114956edae0b0456eb8b8d97c67a342 | [
"Apache-2.0"
] | null | null | null | test/inflator_web/controllers/inflation_controller_test.exs | astoica1986/inflator | ca0e99a8e114956edae0b0456eb8b8d97c67a342 | [
"Apache-2.0"
] | null | null | null | defmodule InflatorWeb.InflationControllerTest do
use InflatorWeb.ConnCase
alias Inflator.Price
alias Inflator.Price.Inflation
import Mock
@create_attrs %{borough: "some borough", date: "01/04/2010", index: 120.5}
@update_attrs %{borough: "some updated borough", date: "01/05/2011", index: 456.7}
@invalid_attrs %{borough: nil, date: nil, index: nil}
@mock_inflated_price 500_123.345
def fixture(:inflation) do
{:ok, inflation} = Price.create_inflation(@create_attrs)
inflation
end
setup %{conn: conn} do
{:ok, conn: put_req_header(conn, "accept", "application/json")}
end
describe "get_inflated_price" do
test "return inflated price json", %{conn: conn} do
with_mock Price, [get_inflated_price: fn(_to, _from, _price, _borough) -> {:ok, @mock_inflated_price} end] do
conn = get conn, inflation_path(
conn,
:get_inflated_price,
to: "a", from: "b", price: 0, borough: "abc"
)
assert json_response(conn, 200) == %{ "inflated_price" => @mock_inflated_price}
end
end
end
defp create_inflation(_) do
inflation = fixture(:inflation)
{:ok, inflation: inflation}
end
end
| 30.589744 | 115 | 0.668902 |
9e01acdc3d76a6b8caf1324a08c9de296be03155 | 400 | ex | Elixir | lib/rubber_band/client/drivers/httpoison.ex | tlux/rubberband | bece85cf8049ba487bba1d5df0906f6fbfa146eb | [
"MIT"
] | null | null | null | lib/rubber_band/client/drivers/httpoison.ex | tlux/rubberband | bece85cf8049ba487bba1d5df0906f6fbfa146eb | [
"MIT"
] | null | null | null | lib/rubber_band/client/drivers/httpoison.ex | tlux/rubberband | bece85cf8049ba487bba1d5df0906f6fbfa146eb | [
"MIT"
] | null | null | null | defmodule RubberBand.Client.Drivers.HTTPoison do
@moduledoc """
An adapter implementation that uses HTTPoison to dispatch requests to
Elasticsearch.
"""
@behaviour RubberBand.Client.Driver
@impl true
def request(verb, url, body, headers, opts) do
{:ok, _} = Application.ensure_all_started(:httpoison)
HTTPoison.request(verb, URI.to_string(url), body, headers, opts)
end
end
| 26.666667 | 71 | 0.7375 |
9e01b07689a61587dd323a5863df03feabf0b707 | 577 | ex | Elixir | web/router.ex | thluiz/quartoElugRJ | 4988fe0fd05ed92a43f92f5d43ebbc2983021c30 | [
"MIT"
] | 1 | 2016-05-06T00:00:10.000Z | 2016-05-06T00:00:10.000Z | web/router.ex | thluiz/quartoElugRJ | 4988fe0fd05ed92a43f92f5d43ebbc2983021c30 | [
"MIT"
] | null | null | null | web/router.ex | thluiz/quartoElugRJ | 4988fe0fd05ed92a43f92f5d43ebbc2983021c30 | [
"MIT"
] | null | null | null | defmodule QuartoElugRJ.Router do
use QuartoElugRJ.Web, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", QuartoElugRJ do
pipe_through :browser # Use the default browser stack
get "/", PageController, :index
get "/webrtc", WebRTCController, :index
end
# Other scopes may use custom stacks.
# scope "/api", QuartoElugRJ do
# pipe_through :api
# end
end
| 20.607143 | 57 | 0.684575 |
9e01d1d445d019ee9156c5fb2b0f46cf03bab8b1 | 3,248 | ex | Elixir | clients/data_fusion/lib/google_api/data_fusion/v1beta1/model/expr.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/data_fusion/lib/google_api/data_fusion/v1beta1/model/expr.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/data_fusion/lib/google_api/data_fusion/v1beta1/model/expr.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DataFusion.V1beta1.Model.Expr do
@moduledoc """
Represents a textual expression in the Common Expression Language (CEL) syntax. CEL is a C-like expression language. The syntax and semantics of CEL are documented at https://github.com/google/cel-spec. Example (Comparison): title: "Summary size limit" description: "Determines if a summary is less than 100 chars" expression: "document.summary.size() < 100" Example (Equality): title: "Requestor is owner" description: "Determines if requestor is the document owner" expression: "document.owner == request.auth.claims.email" Example (Logic): title: "Public documents" description: "Determine whether the document should be publicly visible" expression: "document.type != 'private' && document.type != 'internal'" Example (Data Manipulation): title: "Notification string" description: "Create a notification string with a timestamp." expression: "'New message received at ' + string(document.create_time)" The exact variables and functions that may be referenced within an expression are determined by the service that evaluates it. See the service documentation for additional information.
## Attributes
* `description` (*type:* `String.t`, *default:* `nil`) - Optional. Description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
* `expression` (*type:* `String.t`, *default:* `nil`) - Textual representation of an expression in Common Expression Language syntax.
* `location` (*type:* `String.t`, *default:* `nil`) - Optional. String indicating the location of the expression for error reporting, e.g. a file name and a position in the file.
* `title` (*type:* `String.t`, *default:* `nil`) - Optional. Title for the expression, i.e. a short string describing its purpose. This can be used e.g. in UIs which allow to enter the expression.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:description => String.t() | nil,
:expression => String.t() | nil,
:location => String.t() | nil,
:title => String.t() | nil
}
field(:description)
field(:expression)
field(:location)
field(:title)
end
defimpl Poison.Decoder, for: GoogleApi.DataFusion.V1beta1.Model.Expr do
def decode(value, options) do
GoogleApi.DataFusion.V1beta1.Model.Expr.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DataFusion.V1beta1.Model.Expr do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 58 | 1,092 | 0.734914 |
9e01de162c74b9519535eae3d777fbfcddd9a689 | 1,297 | ex | Elixir | lib/ibu/athlete.ex | ericgoodwin/ibu | 949ce9baf4f794ed74c0903f43e25c1f9caec840 | [
"Apache-2.0"
] | 1 | 2021-01-18T17:39:05.000Z | 2021-01-18T17:39:05.000Z | lib/ibu/athlete.ex | ericgoodwin/ibu | 949ce9baf4f794ed74c0903f43e25c1f9caec840 | [
"Apache-2.0"
] | null | null | null | lib/ibu/athlete.ex | ericgoodwin/ibu | 949ce9baf4f794ed74c0903f43e25c1f9caec840 | [
"Apache-2.0"
] | null | null | null | defmodule IBU.Athlete do
import IBU.DateHelper, only: [to_birth_date: 1]
alias IBU.Stats
defstruct([
:family_name,
:given_name,
:birth_date,
:gender,
:ibu_id,
:flag_uri,
:photo_uri,
:country_code,
:status,
:stats
])
@type t :: %__MODULE__{
family_name: binary,
given_name: binary,
birth_date: Date.t(),
gender: binary,
ibu_id: binary,
flag_uri: binary,
photo_uri: binary,
country_code: binary,
status: binary,
stats: list
}
@spec build_from_api(map) :: t
def build_from_api(data) when is_map(data) do
%__MODULE__{
family_name: data["FamilyName"],
given_name: data["GivenName"],
birth_date: data["Birthdate"] |> to_birth_date,
gender: data["GenderId"],
ibu_id: data["IBUId"],
flag_uri: data["FlagURI"],
photo_uri: data["PhotoURI"],
country_code: data["NAT"],
status: get_status(data["Functions"]),
stats: Stats.build(data)
}
end
@spec get_status(binary) :: binary
defp get_status("Athlete"), do: "athlete"
defp get_status("Athlete, Official"), do: "athlete"
defp get_status("Not active"), do: "inactive"
defp get_status("Official"), do: "official"
end
| 24.471698 | 53 | 0.598304 |
9e01ed754fa586d212d6e23de451091bb2dcecf7 | 20,150 | exs | Elixir | test/lib/bamboo/adapters/send_grid_adapter_test.exs | vpotapev/bamboo | c7efd8d83d7aaeb66e699a4b788a839eb8fa9633 | [
"MIT"
] | null | null | null | test/lib/bamboo/adapters/send_grid_adapter_test.exs | vpotapev/bamboo | c7efd8d83d7aaeb66e699a4b788a839eb8fa9633 | [
"MIT"
] | null | null | null | test/lib/bamboo/adapters/send_grid_adapter_test.exs | vpotapev/bamboo | c7efd8d83d7aaeb66e699a4b788a839eb8fa9633 | [
"MIT"
] | null | null | null | defmodule Bamboo.SendGridAdapterTest do
use ExUnit.Case
alias Bamboo.Email
alias Bamboo.SendGridAdapter
alias Bamboo.Test.User
@config %{adapter: SendGridAdapter, api_key: "123_abc"}
@config_with_bad_key %{adapter: SendGridAdapter, api_key: nil}
@config_with_env_var_key %{adapter: SendGridAdapter, api_key: {:system, "SENDGRID_API"}}
@config_with_sandbox_enabled %{adapter: SendGridAdapter, api_key: "123_abc", sandbox: true}
defmodule FakeSendgrid do
use Plug.Router
plug(
Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Jason
)
plug(:match)
plug(:dispatch)
def start_server(parent) do
Agent.start_link(fn -> Map.new() end, name: __MODULE__)
Agent.update(__MODULE__, &Map.put(&1, :parent, parent))
port = get_free_port()
Application.put_env(:bamboo, :sendgrid_base_uri, "http://localhost:#{port}")
Plug.Adapters.Cowboy.http(__MODULE__, [], port: port, ref: __MODULE__)
end
defp get_free_port do
{:ok, socket} = :ranch_tcp.listen(port: 0)
{:ok, port} = :inet.port(socket)
:erlang.port_close(socket)
port
end
def shutdown do
Plug.Adapters.Cowboy.shutdown(__MODULE__)
end
post "/mail/send" do
case get_in(conn.params, ["from", "email"]) do
"INVALID_EMAIL" -> conn |> send_resp(500, "Error!!") |> send_to_parent
_ -> conn |> send_resp(200, "SENT") |> send_to_parent
end
end
defp send_to_parent(conn) do
parent = Agent.get(__MODULE__, fn set -> Map.get(set, :parent) end)
send(parent, {:fake_sendgrid, conn})
conn
end
end
setup do
FakeSendgrid.start_server(self())
on_exit(fn ->
FakeSendgrid.shutdown()
end)
:ok
end
test "raises if the api key is nil" do
assert_raise ArgumentError, ~r/no API key set/, fn ->
new_email(from: "[email protected]") |> SendGridAdapter.deliver(@config_with_bad_key)
end
assert_raise ArgumentError, ~r/no API key set/, fn ->
SendGridAdapter.handle_config(%{})
end
end
test "can read the api key from an ENV var" do
System.put_env("SENDGRID_API", "123_abc")
config = SendGridAdapter.handle_config(@config_with_env_var_key)
assert config[:api_key] == "123_abc"
end
test "raises if an invalid ENV var is used for the API key" do
System.delete_env("SENDGRID_API")
assert_raise ArgumentError, ~r/no API key set/, fn ->
new_email(from: "[email protected]") |> SendGridAdapter.deliver(@config_with_env_var_key)
end
assert_raise ArgumentError, ~r/no API key set/, fn ->
SendGridAdapter.handle_config(@config_with_env_var_key)
end
end
test "deliver/2 sends the to the right url" do
new_email() |> SendGridAdapter.deliver(@config)
assert_receive {:fake_sendgrid, %{request_path: request_path}}
assert request_path == "/mail/send"
end
test "deliver/2 sends from, html and text body, subject, headers and attachment" do
email =
new_email(
from: {"From", "[email protected]"},
subject: "My Subject",
text_body: "TEXT BODY",
html_body: "HTML BODY"
)
|> Email.put_header("Reply-To", "[email protected]")
|> Email.put_attachment(Path.join(__DIR__, "../../../support/attachment.txt"))
email |> SendGridAdapter.deliver(@config)
assert SendGridAdapter.supports_attachments?()
assert_receive {:fake_sendgrid, %{params: params, req_headers: headers}}
assert params["from"]["name"] == email.from |> elem(0)
assert params["from"]["email"] == email.from |> elem(1)
assert params["subject"] == email.subject
assert Enum.member?(params["content"], %{"type" => "text/plain", "value" => email.text_body})
assert Enum.member?(params["content"], %{"type" => "text/html", "value" => email.html_body})
assert Enum.member?(headers, {"authorization", "Bearer #{@config[:api_key]}"})
assert params["attachments"] == [
%{
"type" => "text/plain",
"filename" => "attachment.txt",
"content" => "VGVzdCBBdHRhY2htZW50Cg=="
}
]
end
test "deliver/2 correctly custom args" do
email = new_email()
email
|> Email.put_private(:custom_args, %{post_code: "123"})
|> SendGridAdapter.deliver(@config)
assert_receive {:fake_sendgrid, %{params: params}}
personalization = List.first(params["personalizations"])
assert personalization["custom_args"] == %{"post_code" => "123"}
end
test "deliver/2 without custom args" do
email = new_email()
email
|> SendGridAdapter.deliver(@config)
assert_receive {:fake_sendgrid, %{params: params}}
personalization = List.first(params["personalizations"])
assert personalization["custom_args"] == nil
end
test "deliver/2 correctly formats recipients" do
email =
new_email(
to: [{"To", "[email protected]"}, {nil, "[email protected]"}],
cc: [{"CC", "[email protected]"}],
bcc: [{"BCC", "[email protected]"}]
)
email |> SendGridAdapter.deliver(@config)
assert_receive {:fake_sendgrid, %{params: params}}
addressees = List.first(params["personalizations"])
assert addressees["to"] == [
%{"name" => "To", "email" => "[email protected]"},
%{"email" => "[email protected]"}
]
assert addressees["cc"] == [%{"name" => "CC", "email" => "[email protected]"}]
assert addressees["bcc"] == [%{"name" => "BCC", "email" => "[email protected]"}]
end
test "deliver/2 correctly handles templates" do
email =
new_email(
from: {"From", "[email protected]"},
subject: "My Subject"
)
email
|> Bamboo.SendGridHelper.with_template("a4ca8ac9-3294-4eaf-8edc-335935192b8d")
|> Bamboo.SendGridHelper.substitute("%foo%", "bar")
|> SendGridAdapter.deliver(@config)
assert_receive {:fake_sendgrid, %{params: params}}
personalization = List.first(params["personalizations"])
refute Map.has_key?(params, "content")
assert params["template_id"] == "a4ca8ac9-3294-4eaf-8edc-335935192b8d"
assert personalization["substitutions"] == %{"%foo%" => "bar"}
end
test "deliver/2 correctly handles ip_pool_name" do
email =
new_email(
from: {"From", "[email protected]"},
subject: "My Subject"
)
email
|> Bamboo.SendGridHelper.with_ip_pool_name("my-ip-pool-name")
|> SendGridAdapter.deliver(@config)
assert_receive {:fake_sendgrid, %{params: params}}
assert Map.get(params, "ip_pool_name") == "my-ip-pool-name"
end
test "deliver/2 correctly handles an asm_group_id" do
email =
new_email(
from: {"From", "[email protected]"},
subject: "My Subject"
)
email
|> Bamboo.SendGridHelper.with_asm_group_id(1234)
|> SendGridAdapter.deliver(@config)
assert_receive {:fake_sendgrid, %{params: params}}
assert params["asm"]["group_id"] == 1234
end
test "deliver/2 correctly handles a bypass_list_management" do
email =
new_email(
from: {"From", "[email protected]"},
subject: "My Subject"
)
email
|> Bamboo.SendGridHelper.with_bypass_list_management(true)
|> SendGridAdapter.deliver(@config)
assert_receive {:fake_sendgrid, %{params: params}}
assert params["mail_settings"]["bypass_list_management"]["enable"] == true
end
test "deliver/2 correctly handles with_google_analytics that's enabled with no utm_params" do
email =
new_email(
from: {"From", "[email protected]"},
subject: "My Subject"
)
email
|> Bamboo.SendGridHelper.with_google_analytics(true)
|> SendGridAdapter.deliver(@config)
assert_receive {:fake_sendgrid, %{params: params}}
assert params["tracking_settings"]["ganalytics"]["enable"] == true
end
test "deliver/2 correctly handles with_google_analytics that's disabled with no utm_params" do
email =
new_email(
from: {"From", "[email protected]"},
subject: "My Subject"
)
email
|> Bamboo.SendGridHelper.with_google_analytics(false)
|> SendGridAdapter.deliver(@config)
assert_receive {:fake_sendgrid, %{params: params}}
assert params["tracking_settings"]["ganalytics"]["enable"] == false
end
test "deliver/2 correctly handles with_google_analytics that's enabled with utm_params" do
email =
new_email(
from: {"From", "[email protected]"},
subject: "My Subject"
)
utm_params = %{
utm_source: "source",
utm_medium: "medium",
utm_campaign: "campaign",
utm_term: "term",
utm_content: "content"
}
email
|> Bamboo.SendGridHelper.with_google_analytics(true, utm_params)
|> SendGridAdapter.deliver(@config)
assert_receive {:fake_sendgrid, %{params: params}}
assert params["tracking_settings"]["ganalytics"]["enable"] == true
assert params["tracking_settings"]["ganalytics"]["utm_source"] == "source"
assert params["tracking_settings"]["ganalytics"]["utm_medium"] == "medium"
assert params["tracking_settings"]["ganalytics"]["utm_campaign"] == "campaign"
assert params["tracking_settings"]["ganalytics"]["utm_term"] == "term"
assert params["tracking_settings"]["ganalytics"]["utm_content"] == "content"
end
test "deliver/2 correctly handles a sendgrid_send_at timestamp" do
email =
new_email(
from: {"From", "[email protected]"},
subject: "My Subject"
)
email
|> Bamboo.SendGridHelper.with_send_at(1_580_485_560)
|> SendGridAdapter.deliver(@config)
assert_receive {:fake_sendgrid, %{params: params}}
assert params["send_at"] == 1_580_485_560
end
test "deliver/2 doesn't force a subject" do
email = new_email(from: {"From", "[email protected]"})
email
|> SendGridAdapter.deliver(@config)
assert_receive {:fake_sendgrid, %{params: params}}
refute Map.has_key?(params, "subject")
end
test "deliver/2 correctly formats reply-to from headers" do
email = new_email(headers: %{"reply-to" => "[email protected]"})
email |> SendGridAdapter.deliver(@config)
assert_receive {:fake_sendgrid, %{params: params}}
assert params["reply_to"] == %{"email" => "[email protected]"}
end
test "deliver/2 correctly formats Reply-To from headers" do
email = new_email(headers: %{"Reply-To" => "[email protected]"})
email |> SendGridAdapter.deliver(@config)
assert_receive {:fake_sendgrid, %{params: params}}
assert params["reply_to"] == %{"email" => "[email protected]"}
end
test "deliver/2 correctly formats Reply-To from headers with name and email" do
email = new_email(headers: %{"Reply-To" => {"Foo Bar", "[email protected]"}})
email |> SendGridAdapter.deliver(@config)
assert_receive {:fake_sendgrid, %{params: params}}
assert params["reply_to"] == %{"email" => "[email protected]", "name" => "Foo Bar"}
end
test "deliver/2 correctly formats reply-to from headers with name and email" do
email = new_email(headers: %{"reply-to" => {"Foo Bar", "[email protected]"}})
email |> SendGridAdapter.deliver(@config)
assert_receive {:fake_sendgrid, %{params: params}}
assert params["reply_to"] == %{"email" => "[email protected]", "name" => "Foo Bar"}
end
test "deliver/2 correctly sends headers" do
email =
new_email(
headers: %{
"In-Reply-To" => "message_id",
"References" => "message_id"
}
)
email |> SendGridAdapter.deliver(@config)
assert_receive {:fake_sendgrid, %{params: params}}
assert params["headers"] ==
%{"In-Reply-To" => "message_id", "References" => "message_id"}
end
test "deliver/2 removes 'reply-to' and 'Reply-To' headers" do
email =
new_email(
headers: %{
"X-Custom-Header" => "ohai",
"Reply-To" => "something",
"reply-to" => {"a", "tuple"}
}
)
email |> SendGridAdapter.deliver(@config)
assert_receive {:fake_sendgrid, %{params: params}}
refute Map.has_key?(params["headers"], "Reply-To")
refute Map.has_key?(params["headers"], "reply-to")
end
test "deliver/2 omits attachments key if no attachments" do
email = new_email()
email |> SendGridAdapter.deliver(@config)
assert_receive {:fake_sendgrid, %{params: params}}
refute Map.has_key?(params, "attachments")
end
test "deliver/2 handles multiple personalizations" do
{:ok, dt, _} = DateTime.from_iso8601("2020-01-01 00:00:00Z")
personalization2 = %{
bcc: [%{"email" => "[email protected]", "name" => "BCC2"}],
cc: [%{"email" => "[email protected]", "name" => "CC2"}],
custom_args: %{"post_code" => "223"},
substitutions: %{"%foo%" => "bar2"},
headers: [%{"X-Fun-Header" => "Fun Value"}],
to: [
%{"email" => "[email protected]", "name" => "To2"},
%{"email" => "[email protected]"}
],
send_at: dt
}
personalization3 = %{
custom_args: %{"thinger" => "bob"},
to: [
%{"email" => "[email protected]", "name" => "To3"}
],
cc: [],
subject: "Custom subject",
send_at: 1_580_485_561
}
email =
new_email(
to: [{"To", "[email protected]"}, {nil, "[email protected]"}],
cc: [{"CC", "[email protected]"}],
subject: "My Subject",
bcc: [{"BCC", "[email protected]"}]
)
|> Email.put_header("Reply-To", "[email protected]")
|> Bamboo.SendGridHelper.substitute("%foo%", "bar")
|> Bamboo.SendGridHelper.with_send_at(1_580_485_562)
|> Bamboo.SendGridHelper.add_personalizations([personalization2, personalization3])
|> Email.put_private(:custom_args, %{post_code: "123"})
email
|> SendGridAdapter.deliver(@config)
assert_receive {:fake_sendgrid, %{params: params}}
personalizations = params["personalizations"]
[got_personalization1, got_personalization2, got_personalization3] = personalizations
assert got_personalization1 == %{
"bcc" => [%{"email" => "[email protected]", "name" => "BCC"}],
"cc" => [%{"email" => "[email protected]", "name" => "CC"}],
"custom_args" => %{"post_code" => "123"},
"substitutions" => %{"%foo%" => "bar"},
"to" => [
%{"email" => "[email protected]", "name" => "To"},
%{"email" => "[email protected]"}
],
"send_at" => 1_580_485_562
}
assert got_personalization2 == %{
"bcc" => [%{"email" => "[email protected]", "name" => "BCC2"}],
"cc" => [%{"email" => "[email protected]", "name" => "CC2"}],
"custom_args" => %{"post_code" => "223"},
"headers" => [%{"X-Fun-Header" => "Fun Value"}],
"substitutions" => %{"%foo%" => "bar2"},
"to" => [
%{"email" => "[email protected]", "name" => "To2"},
%{"email" => "[email protected]"}
],
"send_at" => 1_577_836_800
}
assert got_personalization3 ==
%{
"custom_args" => %{"thinger" => "bob"},
"to" => [
%{"email" => "[email protected]", "name" => "To3"}
],
"cc" => [],
"subject" => "Custom subject",
"send_at" => 1_580_485_561
}
end
test "deliver/2 handles setting params only via personalizations" do
base_personalization = %{
bcc: [%{"email" => "[email protected]", "name" => "BCC"}],
subject: "Here is your email"
}
personalizations =
Enum.map(
[
%{to: "[email protected]"},
%{to: "[email protected]", send_at: 1_580_485_560}
],
&Map.merge(base_personalization, &1)
)
email =
new_email()
|> Email.put_header("Reply-To", "[email protected]")
|> Bamboo.SendGridHelper.add_personalizations(personalizations)
email
|> SendGridAdapter.deliver(@config)
assert_receive {:fake_sendgrid, %{params: params}}
personalizations = params["personalizations"]
[got_personalization1, got_personalization2] = personalizations
assert got_personalization1 == %{
"bcc" => [%{"email" => "[email protected]", "name" => "BCC"}],
"subject" => "Here is your email",
"to" => [%{"email" => "[email protected]"}]
}
assert got_personalization2 == %{
"bcc" => [%{"email" => "[email protected]", "name" => "BCC"}],
"subject" => "Here is your email",
"to" => [%{"email" => "[email protected]"}],
"send_at" => 1_580_485_560
}
end
test "deliver/2 personalizations require a 'to' field" do
email =
new_email()
|> Email.put_header("Reply-To", "[email protected]")
|> Bamboo.SendGridHelper.add_personalizations([%{subject: "This will fail"}])
assert_raise RuntimeError, ~r/'to' field/, fn ->
email
|> SendGridAdapter.deliver(@config)
end
end
test "deliver/2 personalization send_at field must be either DateTime or epoch timestamp" do
email =
new_email()
|> Email.put_header("Reply-To", "[email protected]")
|> Bamboo.SendGridHelper.add_personalizations([%{to: "[email protected]", send_at: "now"}])
assert_raise RuntimeError, ~r/'send_at' time/, fn ->
email
|> SendGridAdapter.deliver(@config)
end
end
test "deliver/2 correctly formats email addresses in personalizations" do
personalization = %{
to: "[email protected]",
cc: [{"Baz", "[email protected]"}, %User{first_name: "fred", email: "[email protected]"}],
bcc: [%{"email" => "[email protected]", "name" => "BCC"}, {nil, "[email protected]"}],
subject: "Here is your email"
}
email =
new_email()
|> Email.put_header("Reply-To", "[email protected]")
|> Bamboo.SendGridHelper.add_personalizations([personalization])
email
|> SendGridAdapter.deliver(@config)
assert_receive {:fake_sendgrid, %{params: params}}
[got_personalization] = params["personalizations"]
assert got_personalization == %{
"subject" => "Here is your email",
"to" => [%{"email" => "[email protected]"}],
"cc" => [
%{"name" => "Baz", "email" => "[email protected]"},
%{"name" => "fred", "email" => "[email protected]"}
],
"bcc" => [%{"name" => "BCC", "email" => "[email protected]"}, %{"email" => "[email protected]"}]
}
end
test "deliver/2 personalization address-as-map must contain at least an email field" do
email =
new_email()
|> Email.put_header("Reply-To", "[email protected]")
|> Bamboo.SendGridHelper.add_personalizations([%{to: %{"name" => "Lou"}, send_at: "now"}])
assert_raise RuntimeError, ~r/'email' field/, fn ->
email
|> SendGridAdapter.deliver(@config)
end
end
test "deliver/2 will set sandbox mode correctly" do
email = new_email()
email |> SendGridAdapter.deliver(@config_with_sandbox_enabled)
assert_receive {:fake_sendgrid, %{params: params}}
assert params["mail_settings"]["sandbox_mode"]["enable"] == true
end
test "deliver/2 with sandbox mode enabled, does not overwrite other mail_settings" do
email = new_email()
email
|> Bamboo.SendGridHelper.with_bypass_list_management(true)
|> SendGridAdapter.deliver(@config_with_sandbox_enabled)
assert_receive {:fake_sendgrid, %{params: params}}
assert params["mail_settings"]["sandbox_mode"]["enable"] == true
assert params["mail_settings"]["bypass_list_management"]["enable"] == true
end
test "raises if the response is not a success" do
email = new_email(from: "INVALID_EMAIL")
assert_raise Bamboo.ApiError, fn ->
email |> SendGridAdapter.deliver(@config)
end
end
test "removes api key from error output" do
email = new_email(from: "INVALID_EMAIL")
assert_raise Bamboo.ApiError, ~r/"key" => "\[FILTERED\]"/, fn ->
email |> SendGridAdapter.deliver(@config)
end
end
defp new_email(attrs \\ []) do
attrs = Keyword.merge([from: "[email protected]", to: []], attrs)
Email.new_email(attrs) |> Bamboo.Mailer.normalize_addresses()
end
end
| 31.337481 | 97 | 0.604069 |
9e01f77d37e906399b651bbfc797b6f83e88b03d | 1,410 | ex | Elixir | lib/tesla/middleware/tuples.ex | DNNX/tesla | a1a60ad74de1b88782ab50a7f52719d7cecef94a | [
"MIT"
] | null | null | null | lib/tesla/middleware/tuples.ex | DNNX/tesla | a1a60ad74de1b88782ab50a7f52719d7cecef94a | [
"MIT"
] | null | null | null | lib/tesla/middleware/tuples.ex | DNNX/tesla | a1a60ad74de1b88782ab50a7f52719d7cecef94a | [
"MIT"
] | null | null | null | defmodule Tesla.Middleware.Tuples do
@behaviour Tesla.Middleware
@moduledoc """
Return `:ok` / `:error` tuples for successful HTTP transations, i.e. when the request is completed
(no network errors etc) - but it can still be an application-level error (i.e. 404 or 500).
**NOTE**: This middleware must be included as the first in the stack (before other middleware)
### Example usage
```
defmodule MyClient do
use Tesla
plug Tesla.Middleware.Tuples
plug Tesla.Middleware.JSON
end
```
### Options
- `:rescue_errors` - list exceptions to be rescued, defaults to `:all` (See below)
The default behaviour is to rescue Tesla.Error exceptions but let other pass through.
It can be customized by passing a `rescue_error:` option:
### Rescue other exceptions
```
plug Tesla.Middleware.Tuples, rescue_errors: [MyCustomError]
```
### Rescue all exceptions
```
plug Tesla.Middleware.Tuples, rescue_errors: :all
```
"""
def call(env, next, opts) do
{:ok, Tesla.run(env, next)}
rescue
ex in Tesla.Error ->
{:error, ex}
ex ->
case opts[:rescue_errors] do
nil ->
reraise ex, System.stacktrace()
:all ->
{:error, ex}
list ->
if ex.__struct__ in list do
{:error, ex}
else
reraise ex, System.stacktrace()
end
end
end
end
| 22.380952 | 100 | 0.626241 |
9e01f7fb1b6d0d590988494cd5ef884b6177f7dc | 12,632 | ex | Elixir | lib/reader.ex | kianmeng/monet | 6b148aa19721e13d84934a0dd16e911353312559 | [
"ISC"
] | null | null | null | lib/reader.ex | kianmeng/monet | 6b148aa19721e13d84934a0dd16e911353312559 | [
"ISC"
] | null | null | null | lib/reader.ex | kianmeng/monet | 6b148aa19721e13d84934a0dd16e911353312559 | [
"ISC"
] | null | null | null | defmodule Monet.Reader do
@moduledoc """
Reads and parses responses from the server. Should not be called directly from
outside this library.
"""
use Bitwise, only: [bsr: 2, band: 1]
import NimbleParsec
import Monet.Connection, only: [connection: 2]
alias Monet.{Error, Result, Prepared}
@doc "Reads the result from a query"
def result(conn) do
with {:ok, payload} <- message(conn, nil) do
parse_result(payload, conn)
end
end
@doc "Reads a single message"
def message(conn, acc \\ nil) do
conn
|> read_n(2)
|> payload(conn, acc)
end
defp payload({:ok, <<1, 0>>}, _conn, _acc), do: {:ok, ""}
defp payload({:ok, <<header::little-16>>}, conn, acc) do
len = bsr(header, 1)
fin = band(header, 1)
case read_n(conn, len) do
{:ok, <<"!", rest::binary>>} -> monet_error(rest)
{:ok, data} ->
cond do
fin == 0 -> message(conn, [acc || [], data])
acc == nil -> {:ok, data}
true -> {:ok, :erlang.iolist_to_binary([acc, data])}
end
err -> err
end
end
defp payload({:error, err}, _conn, _acc) do
{:error, Error.new(:network, err)}
end
defp monet_error(<<err::binary>>) do
{message, code} = case Integer.parse(err) do
{code, <<?!, message::binary>>} -> {message, code}
_ -> {err, nil}
end
{:error, %Error{source: :monetd, message: message, code: code}}
end
defp read_n(conn, n) do
socket = connection(conn, :socket)
timeout = connection(conn, :read_timeout)
:gen_tcp.recv(socket, n, timeout)
end
# result from a select
defp parse_result(<<"&1 ", data::binary>>, _conn) do
case String.split(data, "\n", parts: 6) do
[header, _tables, columns, types, _length, rows] ->
with {:ok, types} <- parse_result_types(types),
{:ok, row_count, header} <- parse_result_header(header),
{:ok, columns} <- parse_result_columns(columns),
{:ok, rows} <- parse_result_rows(row_count, types, rows)
do
{:ok, Result.new(header, columns, rows, row_count)}
end
_ -> {:error, Error.new(:driver, "invalid query response", data)}
end
end
# result from an insert or update
defp parse_result(<<"&2 ", data::binary>>, _conn) do
with {row_count, <<" ", rest::binary>>} <- Integer.parse(data),
{last_id, _} <- Integer.parse(rest)
do
{:ok, Result.upsert(data, row_count, last_id)}
else
_ -> {:error, Error.new(:driver, "invalid insert/update result", data)}
end
end
# result from a create or drop
defp parse_result(<<"&3 ", data::binary>>, _conn) do
case :binary.split(data, "\n") do
[_, <<"!", rest::binary>>] -> monet_error(rest)
_ -> {:ok, Result.meta(String.trim_trailing(data))}
end
end
# Result from a transaction. We expect it to be in auto-commit false (hence the f)
defp parse_result("&4 f\n", _conn) do
{:ok, Result.meta("&4 f")}
end
# result from a prepared request
defp parse_result(<<"&5 ", _::binary>> = data, conn) do
Prepared.build(conn, data)
end
# result from a QBLOCK ??
defp parse_result(<<"&6 ", _data::binary>>, _conn) do
raise "QBLOCK result parsing not implemented"
end
defp parse_result(<<unknown::binary>>, _conn) do
{:error, Error.new(:driver, "unknown query result", unknown)}
end
defp parse_result_types(<<types::binary>>) do
l = byte_size(types) - 9
case types do
<<"% ", types::bytes-size(l), " # type">> -> {:ok, types |> String.split(",\t") |> Enum.map(&String.to_atom/1)}
_ -> {:error, Error.new(:driver, "invalid result type header", types)}
end
end
defp parse_result_header(<<header::binary>>) do
with [_query_id, rest] <- :binary.split(header, " "),
{row_count, _} <- Integer.parse(rest)
do
{:ok, row_count, header}
else
_ -> {:error, Error.new(:driver, "invalid result header", header)}
end
end
defp parse_result_columns(<<columns::binary>>) do
l = byte_size(columns) - 9
case columns do
<<"% ", columns::bytes-size(l), " # name">> -> {:ok, String.split(columns, ",\t")}
_ -> {:error, Error.new(:driver, "invalid result columns header", columns)}
end
end
defp parse_result_rows(0, _types, <<_data::binary>>), do: {:ok, []}
defp parse_result_rows(_row_count, types, <<data::binary>>) do
do_parse_result_rows(types, data, [])
end
defp do_parse_result_rows(types, <<data::binary>>, acc) do
case parse_row(types, data) do
{:ok, "", row} -> {:ok, Enum.reverse([row | acc])}
{:ok, rest, row} -> do_parse_result_rows(types, rest, [row | acc])
err -> err
end
end
# first value in the row, strip out the leading "[ "
defp parse_row(types, <<"[ ", data::binary>>) do
parse_row(types, data, [])
end
defp parse_row(_types, <<data::binary>>) do
{:error, Error.new(:driver, "invalid row prefix", data)}
end
# last value in the row, special handling to strip out the trailing data
defp parse_row([type], <<data::binary>>, row) do
case parse_value(type, data) do
{:ok, <<"\t]\n", rest::binary>>, value} -> {:ok, rest, Enum.reverse([value | row])}
{:ok, {:text, <<"]\n", rest::binary>>}, value} -> {:ok, rest, Enum.reverse([value | row])}
{:ok, _, _} -> {:error, Error.new(:driver, "invalid row terminator", data)}
err -> err
end
end
defp parse_row([type | types], <<data::binary>>, row) do
case parse_value(type, data) do
{:ok, <<",\t", rest::binary>>, value} -> parse_row(types, rest, [value | row])
{:ok, {:text, <<rest::binary>>}, value} -> parse_row(types, rest, [value | row])
{:ok, _, _value} -> {:error, Error.new(:driver, "invalid value separator", data)}
err -> err
end
end
defp parse_value(_type, <<"NULL", rest::binary>>), do: {:ok, rest, nil}
defp parse_value(type, <<data::binary>>) when type in [:int, :tinyint, :bigint, :hugeint, :oid, :smallint, :serial] do
case Integer.parse(data) do
{value, rest} -> {:ok, rest, value}
:error -> {:error, Error.new(:driver, "invalid integer", data)}
end
end
defp parse_value(type, <<data::binary>>) when type in [:double, :float, :real] do
case Float.parse(data) do
{value, rest} -> {:ok, rest, value}
:error -> {:error, Error.new(:driver, "invalid float", data)}
end
end
defp parse_value(:decimal, <<data::binary>>) do
case Decimal.parse(data) do
{value, rest} -> {:ok, rest, value}
:error -> {:error, Error.new(:driver, "invalid decimal", data)}
end
end
defp parse_value(:boolean, <<"true", rest::binary>>), do: {:ok, rest, true}
defp parse_value(:boolean, <<"false", rest::binary>>), do: {:ok, rest, false}
defp parse_value(:boolean, invalid), do: {:error, Error.new(:driver, "invalid boolean", invalid)}
@string_types [:char, :varchar, :clob, :text, :json]
defp parse_value(type, <<?", data::binary>>) when type in @string_types do
# Unlike the other functions, this actually strips out the trailing delimiter
# (the "\t" or ",\t" depending on if it's the last column or not).
# This breaks a lot of our parsing since we expect "rest" to not be consumed.
# To solve this, and to avoid re-concatenating the separator, we return a special
# "rest" of {:text, rest} which the other parses can special case.
[string, rest] = :binary.split(data, "\t")
{:ok, {:text, rest}, string |> parse_string() |> :erlang.iolist_to_binary()}
end
defp parse_value(type, <<invalid::binary>>) when type in @string_types do
{:error, Error.new(:driver, "invalid string prefix", invalid)}
end
defp parse_value(:uuid, <<uuid::bytes-size(36), rest::binary>>) do
{:ok, rest, uuid}
end
defp parse_value(:blob, <<data::binary>>) do
{value, rest} = extract_token(data)
case Base.decode16(value) do
{:ok, value} -> {:ok, rest, value}
:error -> {:error, Error.new(:driver, "invalid blob", data)}
end
end
defp parse_value(:time, <<data::binary>>) do
with {:ok, data, rest, _, _, _} <- extract_time(data),
{:ok, time} <- build_time(data)
do
{:ok, rest, time}
else
_ -> {:error, Error.new(:driver, "invalid time", data)}
end
end
# MonetDB strips out any leading zeros from the year, so we can't use Date.from_iso8601
defp parse_value(:date, <<data::binary>>) do
with {:ok, [year, month, day], rest, _, _, _} <- extract_date(data),
{:ok, date} <- Date.new(year, month, day)
do
{:ok, rest, date}
else
_ -> {:error, Error.new(:driver, "invalid date", data)}
end
end
defp parse_value(:timestamp, <<data::binary>>) do
with {:ok, <<" ", rest::binary>>, date} <- parse_value(:date, data),
{:ok, rest, time} <- parse_value(:time, rest),
{:ok, datetime} <- NaiveDateTime.new(date, time)
do
{:ok, rest, datetime}
else
_ -> {:error, Error.new(:driver, "invalid timestamp", data)}
end
end
# I'm pretty this timezone stuff isn't right
defp parse_value(:timestamptz, <<data::binary>>) do
with {:ok, <<" ", rest::binary>>, date} <- parse_value(:date, data),
{:ok, rest, time} <- parse_value(:time, rest),
{:ok, time_zone, rest, _, _, _} <- extract_time_zone(rest)
do
{timezone, abbreviation, offset} = build_time_zone(time_zone)
datetime = %DateTime{
year: date.year,
month: date.month,
day: date.day,
hour: time.hour,
minute: time.minute,
second: time.second,
microsecond: time.microsecond,
utc_offset: offset,
std_offset: 0, # ??
time_zone: timezone,
zone_abbr: abbreviation,
}
{:ok, rest, datetime}
else
_ -> {:error, Error.new(:driver, "invalid timestamptz", data)}
end
end
defp parse_value(type, <<data::binary>>) do
{:error, Error.new(:driver, "unsupported type: #{type}", data)}
end
# We don't have to do a perfect job here, just need to figure out the boundaries.
# The problem with :binary.split is that:
# a) we want to keep the separator/terminator to keep everything consistent
# b) the separator/terminator can be 2 different things
defp extract_token(<<data::binary>>) do
len = token_length(data, 0)
<<value::bytes-size(len), rest::binary>> = data
{value, rest}
end
defp token_length(<<?,, _rest::binary>>, len), do: len
defp token_length(<<?\t, _rest::binary>>, len), do: len
defp token_length(<<_, rest::binary>>, len), do: token_length(rest, len + 1)
defp parse_string(<<data::binary>>, acc \\ []) do
case :binary.split(data, "\\") do
[text, <<?e, rest::binary>>] -> parse_string(rest, [acc, text, ?\e])
[text, <<?f, rest::binary>>] -> parse_string(rest, [acc, text, ?\f])
[text, <<?n, rest::binary>>] -> parse_string(rest, [acc, text, ?\n])
[text, <<?r, rest::binary>>] -> parse_string(rest, [acc, text, ?\r])
[text, <<?t, rest::binary>>] -> parse_string(rest, [acc, text, ?\t])
[text, <<?v, rest::binary>>] -> parse_string(rest, [acc, text, ?\v])
[text, <<?\\, rest::binary>>] -> parse_string(rest, [acc, text, ?\\])
[text, <<?', rest::binary>>] -> parse_string(rest, [acc, text, ?'])
[text, <<?", rest::binary>>] -> parse_string(rest, [acc, text, ?"])
[text] ->
# The last chunk can be terminated with either '"' or '",' depending
# on whether or not it's the last column. Strip it either way.
len1 = byte_size(text) - 1
len2 = len1 - 1
case text do
<<text::bytes-size(len1), ?">> -> [acc, text]
<<text::bytes-size(len2), ~s(",)>> -> [acc, text]
end
end
end
defp build_time([hour, minute, seconds]) do
Time.new(hour, minute, seconds)
end
defp build_time([hour, minute, seconds, milli]) do
Time.new(hour, minute, seconds, {milli * 1000, 3})
end
defp build_time([hour, minute, seconds, milli, micro]) do
Time.new(hour, minute, seconds, {milli * 1000 + micro, 6})
end
@utc {"Etc/UTC", "UTC", 0}
defp build_time_zone(["z"]), do: @utc
defp build_time_zone(["Z"]), do: @utc
defp build_time_zone(["+00:00"]), do: @utc
defp build_time_zone(["-00:00"]), do: @utc
defp build_time_zone([sign, hh, mm]) do
time = "#{sign}#{hh}:#{mm}"
hours = String.to_integer(hh)
minutes = String.to_integer(mm)
offset = hours * 3600 + minutes * 60
offset = case sign do
"+" -> offset
"-" -> -offset
end
{"Etc/UTC" <> time, time, offset}
end
date =
integer(min: 1, max: 4)
|> ignore(string("-"))
|> integer(2)
|> ignore(string("-"))
|> integer(2)
time =
integer(2)
|> ignore(string(":"))
|> integer(2)
|> ignore(string(":"))
|> integer(2)
|> optional(
ignore(string("."))
|> integer(3)
|> optional(integer(3))
)
time_zone =
choice([
string("z"),
string("Z"),
string("+00:00"),
string("-00:00"),
string("+") |> integer(2) |> ignore(string(":")) |> integer(2),
string("-") |> integer(2) |> ignore(string(":")) |> integer(2)
])
defparsec :extract_date, date, inline: true
defparsec :extract_time, time, inline: true
defparsec :extract_time_zone, time_zone, inline: true
end
| 31.422886 | 119 | 0.623179 |
9e020b92f2c192c0ce9088699444c07fce1e08b6 | 286 | exs | Elixir | priv/repo/migrations/20181220212927_create_playlists.exs | IamTheLime/listentothis | cc2b4ca3a11623e68f5657d8538a586fddeba85b | [
"Apache-2.0"
] | null | null | null | priv/repo/migrations/20181220212927_create_playlists.exs | IamTheLime/listentothis | cc2b4ca3a11623e68f5657d8538a586fddeba85b | [
"Apache-2.0"
] | null | null | null | priv/repo/migrations/20181220212927_create_playlists.exs | IamTheLime/listentothis | cc2b4ca3a11623e68f5657d8538a586fddeba85b | [
"Apache-2.0"
] | null | null | null | defmodule Listentothis.Repo.Migrations.CreatePlaylists do
use Ecto.Migration
def change do
create table("playlists") do
add :name, :string, size: 256
add :description, :string, size: 2048
add :user_id, references(:users)
timestamps()
end
end
end
| 20.428571 | 57 | 0.674825 |
9e02391464700e73da57fd027c32d2dc38544363 | 7,213 | exs | Elixir | test/bitcoin/protocol/messages/block_test.exs | coinscript/bitcoinsv-elixir | 2dda03c81edc5662743ed2922abb5b1910d9c09a | [
"Apache-2.0"
] | 2 | 2019-08-12T04:53:57.000Z | 2019-09-03T03:47:33.000Z | test/bitcoin/protocol/messages/block_test.exs | coinscript/bitcoinsv-elixir | 2dda03c81edc5662743ed2922abb5b1910d9c09a | [
"Apache-2.0"
] | null | null | null | test/bitcoin/protocol/messages/block_test.exs | coinscript/bitcoinsv-elixir | 2dda03c81edc5662743ed2922abb5b1910d9c09a | [
"Apache-2.0"
] | null | null | null | defmodule Bitcoin.Protocol.Messages.BlockTest do
use ExUnit.Case
alias Bitcoin.Protocol.Messages.Block
alias Bitcoin.Protocol.Messages.Tx
alias Bitcoin.Protocol.Types.TxInput
alias Bitcoin.Protocol.Types.TxOutput
alias Bitcoin.Protocol.Types.Outpoint
test "block 100_000" do
# Block no 100_000 from the Bitcoin blockchain
{:ok, payload} = File.read("test/data/blk_100000.dat")
struct = %Block{
version: 1,
bits: 453_281_356,
merkle_root:
<<102, 87, 169, 37, 42, 172, 213, 192, 178, 148, 9, 150, 236, 255, 149, 34, 40, 195, 6,
124, 195, 141, 72, 133, 239, 181, 164, 172, 66, 71, 233, 243>>,
previous_block:
<<80, 18, 1, 25, 23, 42, 97, 4, 33, 166, 195, 1, 29, 211, 48, 217, 223, 7, 182, 54, 22,
194, 204, 31, 28, 208, 2, 0, 0, 0, 0, 0>>,
timestamp: 1_293_623_863,
nonce: 274_148_111,
transactions: [
%Tx{
inputs: [
%TxInput{
previous_output: %Outpoint{
hash:
<<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0>>,
index: 4_294_967_295
},
sequence: 4_294_967_295,
signature_script: <<4, 76, 134, 4, 27, 2, 6, 2>>
}
],
lock_time: 0,
outputs: [
%TxOutput{
pk_script:
<<65, 4, 27, 14, 140, 37, 103, 193, 37, 54, 170, 19, 53, 123, 121, 160, 115, 220,
68, 68, 172, 184, 60, 78, 199, 160, 226, 249, 157, 215, 69, 117, 22, 197, 129,
114, 66, 218, 121, 105, 36, 202, 78, 153, 148, 125, 8, 127, 237, 249, 206, 70,
124, 185, 247, 198, 40, 112, 120, 248, 1, 223, 39, 111, 223, 132, 172>>,
value: 5_000_000_000
}
],
version: 1
},
%Tx{
inputs: [
%TxInput{
previous_output: %Outpoint{
hash:
<<3, 46, 56, 233, 192, 168, 76, 96, 70, 214, 135, 209, 5, 86, 220, 172, 196, 29,
39, 94, 197, 95, 192, 7, 121, 172, 136, 253, 243, 87, 161, 135>>,
index: 0
},
sequence: 4_294_967_295,
signature_script:
<<73, 48, 70, 2, 33, 0, 195, 82, 211, 221, 153, 58, 152, 27, 235, 164, 166, 58,
209, 92, 32, 146, 117, 202, 148, 112, 171, 252, 213, 125, 169, 59, 88, 228, 235,
93, 206, 130, 2, 33, 0, 132, 7, 146, 188, 31, 69, 96, 98, 129, 159, 21, 211, 62,
231, 5, 92, 247, 181, 238, 26, 241, 235, 204, 96, 40, 217, 205, 177, 195, 175,
119, 72, 1, 65, 4, 244, 109, 181, 233, 214, 26, 157, 194, 123, 141, 100, 173,
35, 231, 56, 58, 78, 108, 161, 100, 89, 60, 37, 39, 192, 56, 192, 133, 126, 182,
126, 232, 232, 37, 220, 166, 80, 70, 184, 44, 147, 49, 88, 108, 130, 224, 253,
31, 99, 63, 37, 248, 124, 22, 27, 198, 248, 166, 48, 18, 29, 242, 179, 211>>
}
],
lock_time: 0,
outputs: [
%TxOutput{
pk_script:
<<118, 169, 20, 195, 152, 239, 169, 195, 146, 186, 96, 19, 197, 224, 78, 231, 41,
117, 94, 247, 245, 139, 50, 136, 172>>,
value: 556_000_000
},
%TxOutput{
pk_script:
<<118, 169, 20, 148, 140, 118, 90, 105, 20, 212, 63, 42, 122, 193, 119, 218, 44,
47, 107, 82, 222, 61, 124, 136, 172>>,
value: 4_444_000_000
}
],
version: 1
},
%Tx{
inputs: [
%TxInput{
previous_output: %Outpoint{
hash:
<<195, 62, 191, 242, 167, 9, 241, 61, 159, 154, 117, 105, 171, 22, 163, 39, 134,
175, 125, 126, 45, 224, 146, 101, 228, 28, 97, 208, 120, 41, 78, 207>>,
index: 1
},
sequence: 4_294_967_295,
signature_script:
<<71, 48, 68, 2, 32, 3, 45, 48, 223, 94, 230, 245, 127, 164, 108, 221, 181, 235,
141, 13, 159, 232, 222, 107, 52, 45, 39, 148, 42, 233, 10, 50, 49, 224, 186, 51,
62, 2, 32, 61, 238, 232, 6, 15, 220, 112, 35, 10, 127, 91, 74, 215, 215, 188,
62, 98, 140, 190, 33, 154, 136, 107, 132, 38, 158, 174, 184, 30, 38, 180, 254,
1, 65, 4, 174, 49, 195, 27, 249, 18, 120, 217, 155, 131, 119, 163, 91, 188, 229,
178, 125, 159, 255, 21, 69, 104, 57, 233, 25, 69, 63, 199, 179, 247, 33, 240,
186, 64, 63, 249, 108, 157, 238, 182, 128, 229, 253, 52, 28, 15, 195, 167, 185,
13, 164, 99, 30, 227, 149, 96, 99, 157, 180, 98, 233, 203, 133, 15>>
}
],
lock_time: 0,
outputs: [
%TxOutput{
pk_script:
<<118, 169, 20, 176, 220, 191, 151, 234, 191, 68, 4, 227, 29, 149, 36, 119, 206,
130, 45, 173, 190, 126, 16, 136, 172>>,
value: 1_000_000
},
%TxOutput{
pk_script:
<<118, 169, 20, 107, 18, 129, 238, 194, 90, 180, 225, 224, 121, 63, 244, 224, 138,
177, 171, 179, 64, 156, 217, 136, 172>>,
value: 299_000_000
}
],
version: 1
},
%Tx{
inputs: [
%TxInput{
previous_output: %Outpoint{
hash:
<<11, 96, 114, 179, 134, 212, 167, 115, 35, 82, 55, 246, 76, 17, 38, 172, 59,
36, 12, 132, 185, 23, 163, 144, 155, 161, 196, 61, 237, 95, 81, 244>>,
index: 0
},
sequence: 4_294_967_295,
signature_script:
<<73, 48, 70, 2, 33, 0, 187, 26, 210, 109, 249, 48, 165, 28, 206, 17, 12, 244, 79,
122, 72, 195, 197, 97, 253, 151, 117, 0, 177, 174, 93, 107, 111, 209, 61, 11,
63, 74, 2, 33, 0, 197, 180, 41, 81, 172, 237, 255, 20, 171, 186, 39, 54, 253,
87, 75, 219, 70, 95, 62, 111, 141, 161, 46, 44, 83, 3, 149, 74, 202, 127, 120,
243, 1, 65, 4, 167, 19, 91, 254, 130, 76, 151, 236, 192, 30, 199, 215, 227, 54,
24, 92, 129, 226, 170, 44, 65, 171, 23, 84, 7, 192, 148, 132, 206, 150, 148,
180, 73, 83, 252, 183, 81, 32, 101, 100, 169, 194, 77, 208, 148, 212, 47, 219,
253, 213, 170, 211, 224, 99, 206, 106, 244, 207, 170, 234, 78, 161, 79, 187>>
}
],
lock_time: 0,
outputs: [
%TxOutput{
pk_script:
<<118, 169, 20, 57, 170, 61, 86, 158, 6, 161, 215, 146, 109, 196, 190, 17, 147,
201, 155, 242, 235, 158, 224, 136, 172>>,
value: 1_000_000
}
],
version: 1
}
]
}
assert Block.parse(payload) == struct
assert Block.serialize(struct) == payload
end
end
| 43.191617 | 98 | 0.435048 |
9e023f3be0f03cb9876787b19f20f13227de95e8 | 988 | ex | Elixir | config/dispatcher/dispatcher.ex | Youssef-98/dispatcher-error | 93fad82d8e4d30217a29500cceb681b5cf78a420 | [
"MIT"
] | 1 | 2021-08-02T21:24:18.000Z | 2021-08-02T21:24:18.000Z | config/dispatcher/dispatcher.ex | Youssef-98/dispatcher-error | 93fad82d8e4d30217a29500cceb681b5cf78a420 | [
"MIT"
] | null | null | null | config/dispatcher/dispatcher.ex | Youssef-98/dispatcher-error | 93fad82d8e4d30217a29500cceb681b5cf78a420 | [
"MIT"
] | null | null | null | defmodule Dispatcher do
use Matcher
define_accept_types [
html: [ "text/html", "application/xhtml+html" ],
json: [ "application/json", "application/vnd.api+json" ],
]
@any %{}
@json %{ accept: %{ json: true } }
@html %{ accept: %{ html: true } }
# In order to forward the 'themes' resource to the
# resource service, use the following forward rule.
#
# docker-compose stop; docker-compose rm; docker-compose up
# after altering this file.
#
# match "/themes/*path", @json do
# Proxy.forward conn, path, "http://resource/themes/"
# end
match "/books/*path" do
Proxy.forward conn, path, "http://resource/books/"
end
# match "/authors/*path" do
# Proxy.forward conn, path, "http://resource/authors/"
# end
match "/publishers/*path" do
Proxy.forward conn, path, "http://resource/publishers/"
end
match "_", %{ last_call: true } do
send_resp( conn, 404, "Route not found. See config/dispatcher.ex" )
end
end
| 24.7 | 72 | 0.63664 |
9e024972d744e8a0d67f9434e3422fb59f7de321 | 1,566 | exs | Elixir | test/chaos_spawn/time_test.exs | meadsteve/chaos-spawn | ca983ad96cb1e8541474e726f727d20636828c24 | [
"MIT"
] | 90 | 2015-09-29T16:40:17.000Z | 2022-02-20T18:45:54.000Z | test/chaos_spawn/time_test.exs | meadsteve/chaos-spawn | ca983ad96cb1e8541474e726f727d20636828c24 | [
"MIT"
] | 17 | 2015-09-29T15:37:43.000Z | 2015-12-09T08:46:41.000Z | test/chaos_spawn/time_test.exs | meadsteve/chaos-spawn | ca983ad96cb1e8541474e726f727d20636828c24 | [
"MIT"
] | 6 | 2015-12-03T16:03:35.000Z | 2018-06-28T00:54:08.000Z |
defmodule TimeTest do
use ExUnit.Case
alias ChaosSpawn.Time
# In DEV and TEST this is always the result of ChaosSpawn.Time.now
@fake_now_time {{2014, 12, 13}, {14, 50, 00}}
test "between? returns false for times lower than the bottom bound" do
time = {12, 0, 0}
result = time |> Time.between?({13, 0, 0}, {15, 0, 0})
assert result == false
end
test "between? returns true for times between the bounds" do
time = {14, 30, 0}
result = time |> Time.between?({13, 0, 0}, {15, 0, 0})
assert result == true
end
test "between? returns false for times greater than the top bound" do
time = {15, 30, 0}
result = time |> Time.between?({13, 0, 0}, {15, 0, 0})
assert result == false
end
test "between? accepts inputs from timex" do
time = Timex.to_datetime({{2015, 6, 24}, {14, 50, 34}})
result = time |> Time.between?({13, 0, 0}, {15, 0, 0})
assert result == true
end
test "now returns a fixed time for testing purposes" do
now_datetime = Time.now |> Timex.to_erl
assert now_datetime == @fake_now_time
end
test "Can check if the current day is in a list of days" do
allowed_days = [:sat, :sun, :mon]
#@fake_now_time is a :sat
contained = Time.on_one_of_days?(@fake_now_time, allowed_days)
assert contained == true
end
test "Can check if the current day is NOT in a list of days" do
allowed_days = [:mon, :tue, :wed]
#@fake_now_time is a :sat
contained = Time.on_one_of_days?(@fake_now_time, allowed_days)
assert contained == false
end
end
| 29.54717 | 72 | 0.643678 |
9e024e90d58669b1f1ed88cee41ecffacafe3357 | 2,336 | ex | Elixir | lib/ggity/scale/fill_viridis.ex | srowley/ggity | e49f40141ea7b56e07cf9e00a20c04ed4d42df2f | [
"MIT"
] | 47 | 2020-06-21T15:23:54.000Z | 2022-03-13T01:24:19.000Z | lib/ggity/scale/fill_viridis.ex | srowley/ggity | e49f40141ea7b56e07cf9e00a20c04ed4d42df2f | [
"MIT"
] | 3 | 2020-11-28T11:00:59.000Z | 2020-11-30T18:20:37.000Z | lib/ggity/scale/fill_viridis.ex | srowley/ggity | e49f40141ea7b56e07cf9e00a20c04ed4d42df2f | [
"MIT"
] | 2 | 2020-11-28T10:40:10.000Z | 2021-05-28T06:44:47.000Z | defmodule GGity.Scale.Fill.Viridis do
@moduledoc false
alias GGity.{Draw, Labels}
alias GGity.Scale.{Color, Fill}
defstruct transform: nil,
levels: nil,
labels: :waivers,
guide: :legend,
option: :viridis
@type t() :: %__MODULE__{}
@spec new(keyword()) :: Fill.Viridis.t()
def new(options \\ []), do: struct(Fill.Viridis, options)
@spec train(Fill.Viridis.t(), list(binary())) :: Fill.Viridis.t()
def train(scale, [level | _other_levels] = levels) when is_list(levels) and is_binary(level) do
color_struct =
Color.Viridis
|> struct(Map.from_struct(scale))
|> Color.Viridis.train(levels)
struct(Fill.Viridis, Map.from_struct(color_struct))
end
@spec draw_legend(Fill.Viridis.t(), binary(), atom(), number(), keyword()) :: iolist()
def draw_legend(
%Fill.Viridis{guide: :none},
_label,
_key_glyph,
_key_height,
_fixed_aesthetics
),
do: []
def draw_legend(%Fill.Viridis{levels: [_]}, _label, _key_glyp, _key_heighth, _fixed_aesthetics),
do: []
def draw_legend(
%Fill.Viridis{levels: levels} = scale,
label,
key_glyph,
key_height,
fixed_aesthetics
) do
[
Draw.text(
"#{label}",
x: "0",
y: "-5",
class: "gg-text gg-legend-title",
text_anchor: "left"
),
Stream.with_index(levels)
|> Enum.map(fn {level, index} ->
draw_legend_item(scale, {level, index}, key_glyph, key_height, fixed_aesthetics)
end)
]
end
defp draw_legend_item(scale, {level, index}, key_glyph, key_height, fixed_aesthetics) do
[
draw_key_glyph(scale, level, index, key_glyph, key_height, fixed_aesthetics),
Draw.text(
"#{Labels.format(scale, level)}",
x: "#{5 + key_height}",
y: "#{10 + key_height * index}",
class: "gg-text gg-legend-text",
text_anchor: "left"
)
]
end
defp draw_key_glyph(scale, level, index, :rect, key_height, fixed_aesthetics) do
Draw.rect(
x: "0",
y: "#{key_height * index}",
height: key_height,
width: key_height,
style: "fill:#{scale.transform.(level)}; fill-opacity:#{fixed_aesthetics[:alpha]};",
class: "gg-legend-key"
)
end
end
| 26.850575 | 98 | 0.590325 |
9e02611739d18b34b175a9ac655e9a99e0bfcf73 | 8,912 | exs | Elixir | test/shp_test.exs | ssbb/exshape | 7f0de9e7013cfc17d1e417bd2cf61b8be5259a63 | [
"MIT"
] | null | null | null | test/shp_test.exs | ssbb/exshape | 7f0de9e7013cfc17d1e417bd2cf61b8be5259a63 | [
"MIT"
] | null | null | null | test/shp_test.exs | ssbb/exshape | 7f0de9e7013cfc17d1e417bd2cf61b8be5259a63 | [
"MIT"
] | null | null | null | defmodule ShpTest do
use ExUnit.Case
import TestHelper
alias Exshape.Shp
alias Exshape.Shp.{Bbox,
Point, PointM,
Multipoint, MultipointM,
Polyline, PolylineM,
Polygon, PolygonM
}
doctest Exshape
test "can read points" do
[_header | points] = fixture("point.shp")
|> Shp.read
|> Enum.into([])
assert [
%Point{x: 10, y: 10},
%Point{x: 5, y: 5},
%Point{x: 0, y: 10}
] == points
end
test "can read multipoints" do
[_header, multipoint] = fixture("multipoint.shp")
|> Shp.read
|> Enum.into([])
assert multipoint == %Multipoint{
points: [
%Point{x: 10, y: 10},
%Point{x: 5, y: 5},
%Point{x: 0, y: 10}
],
bbox: %Bbox{xmin: 0, ymin: 5, xmax: 10, ymax: 10}
}
end
test "can read polyline" do
[_ | lines] = fixture("polyline.shp")
|> Shp.read
|> Enum.into([])
assert lines == [
%Polyline{
parts: [0],
points: [
[
%Point{x: 0, y: 0},
%Point{x: 5, y: 5},
%Point{x: 10, y: 10}
]
],
bbox: %Bbox{xmin: 0, ymin: 0, xmax: 10, ymax: 10}
},
%Polyline{
parts: [0],
points: [
[
%Point{x: 15, y: 15},
%Point{x: 20, y: 20},
%Point{x: 25, y: 25}
]
],
bbox: %Bbox{xmin: 15, ymin: 15, xmax: 25, ymax: 25}
}
]
end
test "can read polygons" do
[_header, polygon] = fixture("polygons.shp")
|> Shp.read
|> Enum.into([])
assert polygon == %Polygon{
points: [
[
[
%Point{x: 0, y: 0},
%Point{x: 0, y: 5},
%Point{x: 5, y: 5},
%Point{x: 5, y: 0},
%Point{x: 0, y: 0}
]
],
[
[
%Point{x: 0, y: 0},
%Point{x: 0, y: 5},
%Point{x: 5, y: 5},
%Point{x: 5, y: 0},
%Point{x: 0, y: 0}
]
]
],
parts: [0, 5],
bbox: %Bbox{xmin: 0, ymin: 0, xmax: 5, ymax: 5}
}
end
test "can read pointm" do
[_header | pointms] = fixture("pointm.shp")
|> Shp.read
|> Enum.into([])
assert pointms == [
%PointM{x: 10, y: 10, m: 100},
%PointM{x: 5, y: 5, m: 50},
%PointM{x: 0, y: 10, m: 75}
]
end
test "can read multipointm" do
[_header | multipointms] = fixture("multipointm.shp")
|> Shp.read
|> Enum.into([])
assert multipointms == [
%MultipointM{
points: [
%PointM{x: 10, y: 10, m: 100},
%PointM{x: 5, y: 5, m: 50},
%PointM{x: 0, y: 10, m: 75}
],
bbox: %Bbox{xmin: 0, xmax: 10, ymin: 5, ymax: 10, mmax: 100, mmin: 50}
}
]
end
test "can read polylinem" do
[_header | polylinems] = fixture("polylinem.shp")
|> Shp.read
|> Enum.into([])
assert polylinems == [
%PolylineM{
points: [
[
%PointM{x: 0, y: 0, m: 0},
%PointM{x: 5, y: 5, m: 5},
%PointM{x: 10, y: 10, m: 10},
]
],
parts: [0],
bbox: %Bbox{xmin: 0, xmax: 10, ymin: 0, ymax: 10, mmin: 0, mmax: 10}
},
%PolylineM{
points: [
[
%PointM{x: 15, y: 15, m: 15},
%PointM{x: 20, y: 20, m: 20},
%PointM{x: 25, y: 25, m: 25}
]
],
parts: [0],
bbox: %Bbox{xmin: 15, xmax: 25, ymin: 15, ymax: 25, mmin: 15, mmax: 25}
}
]
end
test "can read polygonm" do
[_header | polygonms] = fixture("polygonm.shp")
|> Shp.read
|> Enum.into([])
assert polygonms == [
%PolygonM{
points: [
[
[
%PointM{x: 0, y: 0, m: 0},
%PointM{x: 0, y: 5, m: 5},
%PointM{x: 5, y: 5, m: 10},
%PointM{x: 5, y: 0, m: 15},
%PointM{x: 0, y: 0, m: 0}
]
]
],
parts: [0],
bbox: %Bbox{xmin: 0, xmax: 5, ymin: 0, ymax: 5, mmin: 0, mmax: 15}
}
]
end
test "can nest holes" do
assert Shp.nest_polygon(%Polygon{
parts: [0, 5],
points: Enum.reverse([
%Point{x: 0, y: 4},
%Point{x: 4, y: 4},
%Point{x: 4, y: 0},
%Point{x: 0, y: 0},
%Point{x: 0, y: 4},
%Point{x: 2, y: 2},
%Point{x: 1, y: 2},
%Point{x: 1, y: 1},
%Point{x: 2, y: 1},
%Point{x: 2, y: 2}
])
}) == [
[
[
%Point{x: 0, y: 4},
%Point{x: 4, y: 4},
%Point{x: 4, y: 0},
%Point{x: 0, y: 0},
%Point{x: 0, y: 4},
],
[
%Point{x: 2, y: 2},
%Point{x: 1, y: 2},
%Point{x: 1, y: 1},
%Point{x: 2, y: 1},
%Point{x: 2, y: 2}
]
]
]
end
test "appends a part to the polygon when the part is clockwise" do
assert Shp.nest_polygon(%Polygon{
parts: [0, 5],
points: Enum.reverse([
%Point{x: 0, y: 4},
%Point{x: 4, y: 4},
%Point{x: 4, y: 0},
%Point{x: 0, y: 0},
%Point{x: 0, y: 4},
%Point{x: 2, y: 2},
%Point{x: 3, y: 2},
%Point{x: 3, y: 1},
%Point{x: 2, y: 1},
%Point{x: 2, y: 2}
])
}) == [
[
[
%Point{x: 0, y: 4},
%Point{x: 4, y: 4},
%Point{x: 4, y: 0},
%Point{x: 0, y: 0},
%Point{x: 0, y: 4},
]
],
[
[
%Point{x: 2, y: 2},
%Point{x: 3, y: 2},
%Point{x: 3, y: 1},
%Point{x: 2, y: 1},
%Point{x: 2, y: 2}
]
]
]
end
test "clockwise" do
assert Shp.is_clockwise?(
[
%Point{x: 0, y: 4},
%Point{x: 4, y: 4},
%Point{x: 4, y: 0},
%Point{x: 0, y: 0},
%Point{x: 0, y: 4}
]) == true
assert Shp.is_clockwise?(
[
%Point{x: 4, y: 4},
%Point{x: 0, y: 4},
%Point{x: 0, y: 0},
%Point{x: 4, y: 0},
%Point{x: 4, y: 4}
]) == false
end
test "contains" do
assert Shp.ring_contains?(
[
%Point{x: 0, y: 0},
%Point{x: 4, y: 0},
%Point{x: 4, y: 4},
%Point{x: 0, y: 4},
%Point{x: 0, y: 0}
],
%Point{x: 1, y: 1}
) == true
assert Shp.ring_contains?(
[
%Point{x: 0, y: 0},
%Point{x: 4, y: 0},
%Point{x: 4, y: 4},
%Point{x: 0, y: 4},
%Point{x: 0, y: 0}
],
%Point{x: 5, y: 5}
) == false
end
test "can nest many holes" do
assert Shp.nest_polygon(%Polygon{
parts: [0, 5, 10],
points: Enum.reverse([
%Point{x: 0, y: 5},
%Point{x: 5, y: 5},
%Point{x: 5, y: 0},
%Point{x: 0, y: 0},
%Point{x: 0, y: 5},
%Point{x: 2, y: 2},
%Point{x: 1, y: 2},
%Point{x: 1, y: 1},
%Point{x: 2, y: 1},
%Point{x: 2, y: 2},
%Point{x: 4, y: 3},
%Point{x: 3, y: 3},
%Point{x: 3, y: 2},
%Point{x: 4, y: 2},
%Point{x: 4, y: 3}
])
}) == [
[
[
%Point{x: 0, y: 5},
%Point{x: 5, y: 5},
%Point{x: 5, y: 0},
%Point{x: 0, y: 0},
%Point{x: 0, y: 5},
],
[
%Point{x: 2, y: 2},
%Point{x: 1, y: 2},
%Point{x: 1, y: 1},
%Point{x: 2, y: 1},
%Point{x: 2, y: 2}
],
[
%Point{x: 4, y: 3},
%Point{x: 3, y: 3},
%Point{x: 3, y: 2},
%Point{x: 4, y: 2},
%Point{x: 4, y: 3}
]
]
]
end
test "can nest holes and rings" do
assert Shp.nest_polygon(%Polygon{
parts: [0, 5, 10],
points: Enum.reverse([
%Point{x: 0, y: 5},
%Point{x: 5, y: 5},
%Point{x: 5, y: 0},
%Point{x: 0, y: 0},
%Point{x: 0, y: 5},
%Point{x: 2, y: 2},
%Point{x: 1, y: 2},
%Point{x: 1, y: 1},
%Point{x: 2, y: 1},
%Point{x: 2, y: 2},
%Point{x: 10, y: 10},
%Point{x: 11, y: 10},
%Point{x: 11, y: 9},
%Point{x: 10, y: 9},
%Point{x: 10, y: 10}
])
}) == [
[
[
%Point{x: 0, y: 5},
%Point{x: 5, y: 5},
%Point{x: 5, y: 0},
%Point{x: 0, y: 0},
%Point{x: 0, y: 5},
],
[
%Point{x: 2, y: 2},
%Point{x: 1, y: 2},
%Point{x: 1, y: 1},
%Point{x: 2, y: 1},
%Point{x: 2, y: 2}
]
],
[
[
%Point{x: 10, y: 10},
%Point{x: 11, y: 10},
%Point{x: 11, y: 9},
%Point{x: 10, y: 9},
%Point{x: 10, y: 10}
]
]
]
end
end
| 22.004938 | 79 | 0.37399 |
9e02a2f16ac1beedf211241918d58054ea6cfacc | 1,026 | ex | Elixir | lib/changelog_web/views/news_issue_view.ex | PsOverflow/changelog.com | 53f4ecfc39b021c6b8cfcc0fa11f29aff8038a7f | [
"MIT"
] | 1 | 2020-05-20T16:58:17.000Z | 2020-05-20T16:58:17.000Z | lib/changelog_web/views/news_issue_view.ex | type1fool/changelog.com | fbec3528cc3f5adfdc75b008bb92b17efc4f248f | [
"MIT"
] | null | null | null | lib/changelog_web/views/news_issue_view.ex | type1fool/changelog.com | fbec3528cc3f5adfdc75b008bb92b17efc4f248f | [
"MIT"
] | null | null | null | defmodule ChangelogWeb.NewsIssueView do
use ChangelogWeb, :public_view
alias Changelog.{NewsItem, NewsAd}
alias ChangelogWeb.{NewsItemView, SponsorView, PodcastView}
def items_with_ads(items, []), do: items
def items_with_ads(items, ads), do: items_with_ads(items, ads, 3)
def items_with_ads(items, ads, every) do
items
|> Enum.chunk_every(every)
|> Enum.with_index()
|> Enum.map(fn{items, index} ->
case Enum.at(ads, index) do
nil -> items
ad -> items ++ [ad]
end
end)
|> List.flatten()
end
def render_item_or_ad(ad = %NewsAd{}, assigns) do
render("_ad.html", Map.merge(assigns, %{ad: ad, sponsor: ad.sponsorship.sponsor}))
end
def render_item_or_ad(item = %NewsItem{}, assigns) do
template = case item.type do
:audio -> "_item_audio.html"
_else -> "_item.html"
end
render(template, Map.merge(assigns, %{item: item}))
end
def spacer_url do
"https://changelog-assets.s3.amazonaws.com/weekly/spacer.gif"
end
end
| 27 | 86 | 0.658869 |
9e02aa9f8b46509e64481fcc2e3e07bcc0224f2e | 6,219 | ex | Elixir | lib/codes/codes_g90.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_g90.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_g90.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | defmodule IcdCode.ICDCode.Codes_G90 do
alias IcdCode.ICDCode
def _G9001 do
%ICDCode{full_code: "G9001",
category_code: "G90",
short_code: "01",
full_name: "Carotid sinus syncope",
short_name: "Carotid sinus syncope",
category_name: "Carotid sinus syncope"
}
end
def _G9009 do
%ICDCode{full_code: "G9009",
category_code: "G90",
short_code: "09",
full_name: "Other idiopathic peripheral autonomic neuropathy",
short_name: "Other idiopathic peripheral autonomic neuropathy",
category_name: "Other idiopathic peripheral autonomic neuropathy"
}
end
def _G901 do
%ICDCode{full_code: "G901",
category_code: "G90",
short_code: "1",
full_name: "Familial dysautonomia [Riley-Day]",
short_name: "Familial dysautonomia [Riley-Day]",
category_name: "Familial dysautonomia [Riley-Day]"
}
end
def _G902 do
%ICDCode{full_code: "G902",
category_code: "G90",
short_code: "2",
full_name: "Horner's syndrome",
short_name: "Horner's syndrome",
category_name: "Horner's syndrome"
}
end
def _G903 do
%ICDCode{full_code: "G903",
category_code: "G90",
short_code: "3",
full_name: "Multi-system degeneration of the autonomic nervous system",
short_name: "Multi-system degeneration of the autonomic nervous system",
category_name: "Multi-system degeneration of the autonomic nervous system"
}
end
def _G904 do
%ICDCode{full_code: "G904",
category_code: "G90",
short_code: "4",
full_name: "Autonomic dysreflexia",
short_name: "Autonomic dysreflexia",
category_name: "Autonomic dysreflexia"
}
end
def _G9050 do
%ICDCode{full_code: "G9050",
category_code: "G90",
short_code: "50",
full_name: "Complex regional pain syndrome I, unspecified",
short_name: "Complex regional pain syndrome I, unspecified",
category_name: "Complex regional pain syndrome I, unspecified"
}
end
def _G90511 do
%ICDCode{full_code: "G90511",
category_code: "G90",
short_code: "511",
full_name: "Complex regional pain syndrome I of right upper limb",
short_name: "Complex regional pain syndrome I of right upper limb",
category_name: "Complex regional pain syndrome I of right upper limb"
}
end
def _G90512 do
%ICDCode{full_code: "G90512",
category_code: "G90",
short_code: "512",
full_name: "Complex regional pain syndrome I of left upper limb",
short_name: "Complex regional pain syndrome I of left upper limb",
category_name: "Complex regional pain syndrome I of left upper limb"
}
end
def _G90513 do
%ICDCode{full_code: "G90513",
category_code: "G90",
short_code: "513",
full_name: "Complex regional pain syndrome I of upper limb, bilateral",
short_name: "Complex regional pain syndrome I of upper limb, bilateral",
category_name: "Complex regional pain syndrome I of upper limb, bilateral"
}
end
def _G90519 do
%ICDCode{full_code: "G90519",
category_code: "G90",
short_code: "519",
full_name: "Complex regional pain syndrome I of unspecified upper limb",
short_name: "Complex regional pain syndrome I of unspecified upper limb",
category_name: "Complex regional pain syndrome I of unspecified upper limb"
}
end
def _G90521 do
%ICDCode{full_code: "G90521",
category_code: "G90",
short_code: "521",
full_name: "Complex regional pain syndrome I of right lower limb",
short_name: "Complex regional pain syndrome I of right lower limb",
category_name: "Complex regional pain syndrome I of right lower limb"
}
end
def _G90522 do
%ICDCode{full_code: "G90522",
category_code: "G90",
short_code: "522",
full_name: "Complex regional pain syndrome I of left lower limb",
short_name: "Complex regional pain syndrome I of left lower limb",
category_name: "Complex regional pain syndrome I of left lower limb"
}
end
def _G90523 do
%ICDCode{full_code: "G90523",
category_code: "G90",
short_code: "523",
full_name: "Complex regional pain syndrome I of lower limb, bilateral",
short_name: "Complex regional pain syndrome I of lower limb, bilateral",
category_name: "Complex regional pain syndrome I of lower limb, bilateral"
}
end
def _G90529 do
%ICDCode{full_code: "G90529",
category_code: "G90",
short_code: "529",
full_name: "Complex regional pain syndrome I of unspecified lower limb",
short_name: "Complex regional pain syndrome I of unspecified lower limb",
category_name: "Complex regional pain syndrome I of unspecified lower limb"
}
end
def _G9059 do
%ICDCode{full_code: "G9059",
category_code: "G90",
short_code: "59",
full_name: "Complex regional pain syndrome I of other specified site",
short_name: "Complex regional pain syndrome I of other specified site",
category_name: "Complex regional pain syndrome I of other specified site"
}
end
def _G908 do
%ICDCode{full_code: "G908",
category_code: "G90",
short_code: "8",
full_name: "Other disorders of autonomic nervous system",
short_name: "Other disorders of autonomic nervous system",
category_name: "Other disorders of autonomic nervous system"
}
end
def _G909 do
%ICDCode{full_code: "G909",
category_code: "G90",
short_code: "9",
full_name: "Disorder of the autonomic nervous system, unspecified",
short_name: "Disorder of the autonomic nervous system, unspecified",
category_name: "Disorder of the autonomic nervous system, unspecified"
}
end
end
| 36.798817 | 85 | 0.630809 |
9e02c199aecb562b9840af84be6c144949cf6b8b | 3,930 | ex | Elixir | clients/drive/lib/google_api/drive/v3/model/about.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/drive/lib/google_api/drive/v3/model/about.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/drive/lib/google_api/drive/v3/model/about.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Drive.V3.Model.About do
@moduledoc """
Information about the user, the user's Drive, and system capabilities.
## Attributes
- appInstalled (boolean()): Whether the user has installed the requesting app. Defaults to: `null`.
- canCreateDrives (boolean()): Whether the user can create shared drives. Defaults to: `null`.
- canCreateTeamDrives (boolean()): Deprecated - use canCreateDrives instead. Defaults to: `null`.
- driveThemes ([AboutDriveThemes]): A list of themes that are supported for shared drives. Defaults to: `null`.
- exportFormats (%{optional(String.t) => [String.t]}): A map of source MIME type to possible targets for all supported exports. Defaults to: `null`.
- folderColorPalette ([String.t]): The currently supported folder colors as RGB hex strings. Defaults to: `null`.
- importFormats (%{optional(String.t) => [String.t]}): A map of source MIME type to possible targets for all supported imports. Defaults to: `null`.
- kind (String.t): Identifies what kind of resource this is. Value: the fixed string \"drive#about\". Defaults to: `null`.
- maxImportSizes (%{optional(String.t) => String.t}): A map of maximum import sizes by MIME type, in bytes. Defaults to: `null`.
- maxUploadSize (String.t): The maximum upload size in bytes. Defaults to: `null`.
- storageQuota (AboutStorageQuota): Defaults to: `null`.
- teamDriveThemes ([AboutTeamDriveThemes]): Deprecated - use driveThemes instead. Defaults to: `null`.
- user (User): The authenticated user. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:appInstalled => any(),
:canCreateDrives => any(),
:canCreateTeamDrives => any(),
:driveThemes => list(GoogleApi.Drive.V3.Model.AboutDriveThemes.t()),
:exportFormats => map(),
:folderColorPalette => list(any()),
:importFormats => map(),
:kind => any(),
:maxImportSizes => map(),
:maxUploadSize => any(),
:storageQuota => GoogleApi.Drive.V3.Model.AboutStorageQuota.t(),
:teamDriveThemes => list(GoogleApi.Drive.V3.Model.AboutTeamDriveThemes.t()),
:user => GoogleApi.Drive.V3.Model.User.t()
}
field(:appInstalled)
field(:canCreateDrives)
field(:canCreateTeamDrives)
field(:driveThemes, as: GoogleApi.Drive.V3.Model.AboutDriveThemes, type: :list)
field(:exportFormats, type: :map)
field(:folderColorPalette, type: :list)
field(:importFormats, type: :map)
field(:kind)
field(:maxImportSizes, type: :map)
field(:maxUploadSize)
field(:storageQuota, as: GoogleApi.Drive.V3.Model.AboutStorageQuota)
field(:teamDriveThemes, as: GoogleApi.Drive.V3.Model.AboutTeamDriveThemes, type: :list)
field(:user, as: GoogleApi.Drive.V3.Model.User)
end
defimpl Poison.Decoder, for: GoogleApi.Drive.V3.Model.About do
def decode(value, options) do
GoogleApi.Drive.V3.Model.About.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Drive.V3.Model.About do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 46.785714 | 158 | 0.708906 |
9e030fd4021f10a99c94dc0b301f9da2b24db6cd | 857 | exs | Elixir | priv/repo/1-seed-non-users.exs | marick/eecrit | 50b1ebeadc5cf21ea9f9df6add65e4d7037e2482 | [
"MIT"
] | 10 | 2016-07-15T15:57:33.000Z | 2018-06-09T00:40:46.000Z | priv/repo/1-seed-non-users.exs | marick/eecrit | 50b1ebeadc5cf21ea9f9df6add65e4d7037e2482 | [
"MIT"
] | null | null | null | priv/repo/1-seed-non-users.exs | marick/eecrit | 50b1ebeadc5cf21ea9f9df6add65e4d7037e2482 | [
"MIT"
] | 6 | 2016-07-15T15:57:41.000Z | 2018-03-22T16:38:00.000Z | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# Eecrit.Repo.insert!(%Eecrit.SomeModel{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
Code.load_file "priv/repo/util.exs"
alias Eecrit.U
U.fresh_start!
# Ability Groups
# TODO: These will be split into finer granularity
U.add_ability_group! name: "superuser", is_superuser: true, is_admin: true
U.add_ability_group! name: "admin", is_admin: true
U.add_ability_group! name: "user"
# Organizations
U.add_org! short_name: "test org", full_name: "Critter4Us Test Organization"
U.add_org! short_name: "uiuc/aacup",
full_name: "University of Illinois Agricultural Animal Care and Use"
| 29.551724 | 79 | 0.736289 |
9e03214b2f3654cc7abe737e2bf6c1e7d40f61e9 | 1,487 | ex | Elixir | lib/glimesh/application.ex | MemoryLeakDeath/glimesh.tv | 1462c4b939da899f5e3f67c3f28850025d59a10f | [
"MIT"
] | null | null | null | lib/glimesh/application.ex | MemoryLeakDeath/glimesh.tv | 1462c4b939da899f5e3f67c3f28850025d59a10f | [
"MIT"
] | null | null | null | lib/glimesh/application.ex | MemoryLeakDeath/glimesh.tv | 1462c4b939da899f5e3f67c3f28850025d59a10f | [
"MIT"
] | null | null | null | defmodule Glimesh.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
topologies = Application.get_env(:libcluster, :topologies)
children = [
Glimesh.PromEx,
{Cluster.Supervisor, [topologies, [name: Glimesh.ClusterSupervisor]]},
# Start the Ecto repository
Glimesh.Repo,
Glimesh.Repo.ReadReplica,
# Start the Telemetry supervisor
GlimeshWeb.Telemetry,
# Start the PubSub system
{Phoenix.PubSub, name: Glimesh.PubSub},
# Who and where are you?
Glimesh.Presence,
# Start the Endpoint (http/https)
GlimeshWeb.Endpoint,
{Rihanna.Supervisor, [postgrex: Glimesh.Repo.config()]},
{ConCache,
[
name: Glimesh.QueryCache.name(),
ttl_check_interval: :timer.seconds(5),
global_ttl: :timer.seconds(30)
]},
{Absinthe.Subscription, GlimeshWeb.Endpoint}
]
GlimeshWeb.ApiLogger.start_logger()
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Glimesh.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
GlimeshWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 29.74 | 76 | 0.682582 |
9e032a3f2a7bb8c5759498bbd21fb96ff0810e2a | 826 | ex | Elixir | lib/crit_web/audit.ex | jesseshieh/crit19 | 0bba407fea09afed72cbb90ca579ba34c537edef | [
"MIT"
] | null | null | null | lib/crit_web/audit.ex | jesseshieh/crit19 | 0bba407fea09afed72cbb90ca579ba34c537edef | [
"MIT"
] | null | null | null | lib/crit_web/audit.ex | jesseshieh/crit19 | 0bba407fea09afed72cbb90ca579ba34c537edef | [
"MIT"
] | null | null | null | defmodule CritWeb.Audit do
import CritWeb.Plugs.Accessors
alias Crit.Audit.CreationStruct
alias Crit.Users.User
def events do
%{created_user: "created user",
created_animals: "created animals",
}
end
def created_user(conn, %User{} = user) do
log(conn, events().created_user, %{user_id: user.id, auth_id: user.auth_id})
end
def created_animals(conn, data) do
log(conn, events().created_animals, data)
end
## UTIL
defp log(conn, event, data) do
send_struct(conn,
%CreationStruct{event: event,
event_owner_id: user_id(conn),
data: data})
conn
end
defp send_struct(conn, struct),
do: apply(
audit_server(conn),
:put,
[audit_pid(conn), struct, institution(conn)]
)
end
| 21.179487 | 80 | 0.612591 |
9e0335634d6562d5212b26854c59a042ed138fd0 | 668 | exs | Elixir | config/config.exs | noircir/website-1 | bfe43f5a7b022dfc009802d9d068d438e83e73f9 | [
"MIT"
] | null | null | null | config/config.exs | noircir/website-1 | bfe43f5a7b022dfc009802d9d068d438e83e73f9 | [
"MIT"
] | null | null | null | config/config.exs | noircir/website-1 | bfe43f5a7b022dfc009802d9d068d438e83e73f9 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# By default, the umbrella project as well as each child
# application will require this configuration file, ensuring
# they all use the same configuration. While one could
# configure all applications here, we prefer to delegate
# back to each application for organization purposes.
import_config "../apps/*/config/config.exs"
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Import environment specific config.
import_config "#{Mix.env()}.exs"
| 35.157895 | 61 | 0.769461 |
9e033f62a8a2c66e6f145544391645398e7c5611 | 484 | ex | Elixir | test/fixtures/example_code/clean_redux.ex | codeclimate-community/credo | b960a25d604b4499a2577321f9d61b39dc4b0437 | [
"MIT"
] | 4,590 | 2015-09-28T06:01:43.000Z | 2022-03-29T08:48:57.000Z | test/fixtures/example_code/clean_redux.ex | codeclimate-community/credo | b960a25d604b4499a2577321f9d61b39dc4b0437 | [
"MIT"
] | 890 | 2015-11-16T21:07:07.000Z | 2022-03-29T08:52:07.000Z | test/fixtures/example_code/clean_redux.ex | codeclimate-community/credo | b960a25d604b4499a2577321f9d61b39dc4b0437 | [
"MIT"
] | 479 | 2015-11-17T19:42:40.000Z | 2022-03-29T00:09:21.000Z | defmodule X do
defp escape_charlist(reversed_result, [?" | remainder], needs_quote?),
do: escape_charlist('"\\' ++ reversed_result, remainder, needs_quote?)
@doc ~S"""
Escape a subsection name before saving.
"""
def escape_subsection(""), do: "\"\""
def escape_subsection(x) when is_binary(x) do
x
|> String.to_charlist()
|> escape_subsection_impl([])
|> Enum.reverse()
|> to_quoted_string()
end
defp to_quoted_string(s), do: ~s["#{s}"]
end
| 24.2 | 74 | 0.644628 |
9e035414cb85d5c5c50c18b0c0d5ce449abeaaef | 521 | exs | Elixir | test/models/user_test.exs | everydayhoodie/phoenix-trello-notes | ca535a69c610f5edbfca85e34b06a679422d4ba2 | [
"MIT"
] | 1 | 2020-12-23T18:28:53.000Z | 2020-12-23T18:28:53.000Z | test/models/user_test.exs | everydayhoodie/phoenix-trello-notes | ca535a69c610f5edbfca85e34b06a679422d4ba2 | [
"MIT"
] | null | null | null | test/models/user_test.exs | everydayhoodie/phoenix-trello-notes | ca535a69c610f5edbfca85e34b06a679422d4ba2 | [
"MIT"
] | null | null | null | defmodule PhoenixTrello.UserTest do
use PhoenixTrello.ModelCase
alias PhoenixTrello.User
@valid_attrs %{email: "some content", encrypted_password: "some content", first_name: "some content", last_name: "some content"}
@invalid_attrs %{}
test "changeset with valid attributes" do
changeset = User.changeset(%User{}, @valid_attrs)
assert changeset.valid?
end
test "changeset with invalid attributes" do
changeset = User.changeset(%User{}, @invalid_attrs)
refute changeset.valid?
end
end
| 27.421053 | 130 | 0.737044 |
9e0356c14928eb618f011733a583f00a952fb42e | 3,317 | ex | Elixir | lib/google_api/storage/v1/model/bucket_access_control.ex | albert-io/elixir-google-api-storage | 302144ec2c1261104d1a20942482796b2fc0905f | [
"Apache-2.0"
] | null | null | null | lib/google_api/storage/v1/model/bucket_access_control.ex | albert-io/elixir-google-api-storage | 302144ec2c1261104d1a20942482796b2fc0905f | [
"Apache-2.0"
] | null | null | null | lib/google_api/storage/v1/model/bucket_access_control.ex | albert-io/elixir-google-api-storage | 302144ec2c1261104d1a20942482796b2fc0905f | [
"Apache-2.0"
] | 1 | 2019-04-17T05:52:55.000Z | 2019-04-17T05:52:55.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Storage.V1.Model.BucketAccessControl do
@moduledoc """
An access-control entry.
## Attributes
- bucket (String.t): The name of the bucket. Defaults to: `null`.
- domain (String.t): The domain associated with the entity, if any. Defaults to: `null`.
- email (String.t): The email address associated with the entity, if any. Defaults to: `null`.
- entity (String.t): The entity holding the permission, in one of the following forms: - user-userId - user-email - group-groupId - group-email - domain-domain - project-team-projectId - allUsers - allAuthenticatedUsers Examples: - The user [email protected] would be [email protected]. - The group [email protected] would be [email protected]. - To refer to all members of the Google Apps for Business domain example.com, the entity would be domain-example.com. Defaults to: `null`.
- entityId (String.t): The ID for the entity, if any. Defaults to: `null`.
- etag (String.t): HTTP 1.1 Entity tag for the access-control entry. Defaults to: `null`.
- id (String.t): The ID of the access-control entry. Defaults to: `null`.
- kind (String.t): The kind of item this is. For bucket access control entries, this is always storage#bucketAccessControl. Defaults to: `null`.
- projectTeam (BucketAccessControlProjectTeam): Defaults to: `null`.
- role (String.t): The access permission for the entity. Defaults to: `null`.
- selfLink (String.t): The link to this access-control entry. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:"bucket" => any(),
:"domain" => any(),
:"email" => any(),
:"entity" => any(),
:"entityId" => any(),
:"etag" => any(),
:"id" => any(),
:"kind" => any(),
:"projectTeam" => GoogleApi.Storage.V1.Model.BucketAccessControlProjectTeam.t(),
:"role" => any(),
:"selfLink" => any()
}
field(:"bucket")
field(:"domain")
field(:"email")
field(:"entity")
field(:"entityId")
field(:"etag")
field(:"id")
field(:"kind")
field(:"projectTeam", as: GoogleApi.Storage.V1.Model.BucketAccessControlProjectTeam)
field(:"role")
field(:"selfLink")
end
defimpl Poison.Decoder, for: GoogleApi.Storage.V1.Model.BucketAccessControl do
def decode(value, options) do
GoogleApi.Storage.V1.Model.BucketAccessControl.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Storage.V1.Model.BucketAccessControl do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.950617 | 517 | 0.706663 |
9e035be10eb889e44774522aa30d5982b7b31121 | 7,058 | ex | Elixir | lib/burette/name/en_us.ex | mememori/arand | 97813dfa877e95ed6a575f95defbed21b70a6fc4 | [
"Apache-2.0"
] | 8 | 2016-11-29T09:44:45.000Z | 2018-03-12T14:17:48.000Z | lib/burette/name/en_us.ex | mememori/arand | 97813dfa877e95ed6a575f95defbed21b70a6fc4 | [
"Apache-2.0"
] | 6 | 2017-03-20T18:41:45.000Z | 2017-10-31T19:12:00.000Z | lib/burette/name/en_us.ex | mememori/arand | 97813dfa877e95ed6a575f95defbed21b70a6fc4 | [
"Apache-2.0"
] | 3 | 2017-01-15T23:10:16.000Z | 2020-10-19T03:02:13.000Z | defmodule Burette.Name.En.Us do
alias Burette.Helper.Lexicon
@behaviour Burette.Name
@name Lexicon.build ~w/
Aaron
Abigail
Abbott
Abel
Abraham
Ada
Adam
Adelaide
Addison
Adrian
Agatha
Agnes
Alastair
Albert
Alexander
Alfred
Allan
Alice
Alison
Alvin
Amber
Andrew
Anne
Anthony
Ariana
Arnold
Audrey
Austin
Axel
Barbara
Beatrix
Bella
Benedict
Benjamin
Blair
Blake
Bonnie
Boris
Braden
Brett
Brian
Britney
Brock
Buck
Caitlin
Calvin
Carl
Carlton
Carmen
Caroline
Carter
Catherine
Cecil
Chad
Chloe
Claire
Clyde
Colin
Craig
Damon
Daniel
Daphne
David
Dean
Dennis
Derek
Dexter
Diane
Dirk
Dixon
Donald
Donna
Douglas
Drake
Drew
Durwin
Dwayne
Earl
Edgar
Edith
Edmund
Edward
Edwin
Elaine
Eleanor
Elijah
Elizabeth
Ella
Ellen
Elliott
Eloise
Elton
Elvis
Emily
Emma
Eric
Erika
Ernest
Ethan
Eugene
Eve
Fabian
Faye
Felicia
Felix
Ferdinand
Ferris
Fiona
Fitzgerald
Francis
Frida
Gabriel
Gabrielle
Garth
George
Gerald
Goddard
Godwin
Graham
Gregory
Guy
Harley
Harmony
Harold
Heath
Heather
Hector
Henry
Herbert
Herman
Holly
Homer
Howard
Ian
Igor
Irene
Irving
Isaac
Ivan
Ivy
Jack
Jacob
Jacqueline
Jade
Jane
Janice
Jarvis
Jason
Jeffrey
Jennifer
Jesse
Joan
John
Jonathan
Joshua
Joy
Judith
Julia
Juliet
June
Kane
Karen
Kate
Katherine
Kathleen
Katrina
Kayleigh
Keith
Kelvin
Kirk
Kirsten
Konrad
Kurt
Kyle
Lancelot
Lara
Lars
Laura
Laurence
Laverne
Leah
Lilly
Lindsay
Lisa
Lizzie
Lois
Lola
Lorelei
Lorraine
Louise
Lucy
Luke
Luther
Mabel
Madeleine
Malcom
Margot
Martin
Marvin
Matilda
Matthew
Maurice
Megan
Melinda
Melody
Michelle
Merlin
Michael
Misty
Molly
Monica
Morgan
Morris
Morton
Muriel
Murray
Naomi
Natalie
Nathan
Nicholas
Nigel
Nina
Noel
Nora
Norman
Norris
Odette
Olaf
Olin
Oliver
Olivia
Orson
Oscar
Oswald
Page
Pandora
Patricia
Paul
Pearl
Penelope
Percival
Peter
Philip
Phoebe
Polly
Quentin
Quincy
Ralph
Rachel
Ramona
Ramsey
Randolph
Raymond
Rebecca
Regina
Renata
Richard
Rita
Robert
Roberta
Robin
Roger
Roland
Ronald
Ross
Roy
Royce
Ruby
Rupert
Ruth
Ryan
Sabrina
Salome
Samantha
Samson
Samuel
Sapphire
Scott
Sean
Sebastian
Selena
Serena
Seth
Seymour
Sherlock
Sidney
Sigmund
Simon
Sinclair
Spencer
Stanley
Sterling
Sybil
Thalia
Thea
Theodore
Thomas
Tiffany
Timothy
Titus
Tobias
Todd
Tony
Tracy
Trent
Trevor
Tristan
Trixie
Truman
Tyler
Ursula
Valerie
Vera
Victor
Virginia
Wallace
Walter
Wanda
Whitney
Warren
Washington
Wesley
William
Wyatt
Zelda
Zoe/
@surname Lexicon.build ~w/
Abbey
Abbott
Ackerman
Adams
Alexander
Allen
Anderson
Applegate
Armstrong
Astley
Austen
Axford
Babbage
Baker
Barber
Barker
Barrett
Barry
Bartholomew
Barton
Baskerville
Baxter
Bayfield
Beamont
Beaton
Beck
Beer
Benedict
Bennet
Berkeley
Berry
Biggins
Bird
Black
Blair
Bloomfield
Blyth
Bott
Bowie
Bradley
Brewer
Brimson
Broom
Bruce
Bubb
Bull
Burns
Bush
Byrd
Carpenter
Cartridge
Case
Clapton
Clark
Cliburn
Cloud
Clowney
Cook
Cooper
Cotton
Cox
Crawford
Crowley
Cruise
Cumberbatch
Currie
Curry
Darwin
Dawkins
Dean
Dick
Drake
Driver
Duke
Dunham
Dunn
Eagle
Eastwood
Edwin
Erlang
Evelyn
Farlow
Ferguson
Finch
Finn
Firestone
Fitzgerald
Fleetwood
Fletcher
Florence
Fowler
Fox
Franks
Gay
Ginger
Godfrey
Gold
Goodman
Goodwin
Gowler
Graham
Green
Gregg
Hadley
Hall
Halley
Hallman
Ham
Hammerton
Hammond
Hancock
Hanson
Hardy
Harrold
Haskell
Hawkins
Hayden
Hector
Hill
Hitchcock
Hoffman
Hopkins
House
Hunter
Huxley
Jackson
Jacobs
Johnson
Joplin
Kane
Kingston
Kitchen
Lake
Lamp
Lane
Lason
Lawyer
Lennon
Levingston
Lewis
Light
Little
Love
Lovejoy
MacCoy
Marshall
Martin
Mason
Maxwell
May
Mayor
McCloud
Melton
Merrington
Miller
Mitchell
Monk
Monroe
Moore
Morris
Morrison
Mycroft
Myers
Nash
Natt
New
Nil
Nixon
Null
Orr
Oxley
Palmer
Parker
Parrott
Patrick
Phelps
Phoenix
Pierotti
Piper
Poe
Potter
Powers
Priest
Quill
Randolph
Ravenscroft
Rawling
Record
Sanders
Saxon
Seals
Sherman
Sherwood
Shoemaker
Simon
Simpson
Smith
Sparrow
Spencer
Spielberg
Stanfield
Stanley
Stanton
Stephens
Stone
Stuart
Summerfield
Swaine
Taylor
Teagarden
Thomas
Thompson
Thorn
Throsby
Tillard
Trevor
Trout
Troutman
Tucker
Vincent
Walker
Wall
Wallman/
@fullname_format Lexicon.build [
"{{name}} {{surname}}",
"{{name}} {{name}} {{surname}}",
"{{name}} {{surname}} {{surname}}",
"{{name}} {{surname}}-{{surname}}",
"{{name}} {{surname}} Jr",
"{{name}} {{surname}} Son",
"{{name}} {{surname}} 2nd"
]
def name,
do: Lexicon.take(@name)
def surname,
do: Lexicon.take(@surname)
def fullname do
@fullname_format
|> Lexicon.take()
|> parse()
end
def lexicons do
%{
names: @name,
surnames: @surname,
fullname_formats: @fullname_format
}
end
defp parse(str),
do: parse(str, "")
defp parse("{{name}}" <> rest, acc),
do: parse(rest, acc <> name())
defp parse("{{surname}}" <> rest, acc),
do: parse(rest, acc <> surname())
defp parse(<<char::utf8, rest::binary>>, acc),
do: parse(rest, acc <> <<char::utf8>>)
defp parse("", acc),
do: acc
end
| 11.74376 | 48 | 0.544914 |
9e0394b1e6deaf2070c567aceeda7e0849edf409 | 124 | ex | Elixir | debian/menu.ex | rzr/pfs | d0381a0366d0b42f58ea0e9cc54d02c73211be5f | [
"BSD-2-Clause"
] | 17 | 2016-02-19T14:47:33.000Z | 2022-02-18T01:12:59.000Z | debian/menu.ex | rzr/pfs | d0381a0366d0b42f58ea0e9cc54d02c73211be5f | [
"BSD-2-Clause"
] | 2 | 2015-02-20T23:06:39.000Z | 2017-10-03T11:33:04.000Z | debian/menu.ex | rzr/pfs | d0381a0366d0b42f58ea0e9cc54d02c73211be5f | [
"BSD-2-Clause"
] | 5 | 2015-10-13T20:20:39.000Z | 2019-10-24T06:12:13.000Z | ?package(pcloudfs):needs="X11|text|vc|wm" section="Applications/see-menu-manual"\
title="pcloudfs" command="/usr/bin/pfs"
| 41.333333 | 81 | 0.741935 |
9e03b405d700778f77b146a4a996fd5ead2ed0ac | 317 | exs | Elixir | config/config.exs | straw-hat-team/straw_hat_review | 342dbbfac0ac96287111babd59b5321efdd8728d | [
"MIT"
] | 11 | 2018-04-09T06:32:02.000Z | 2019-09-11T14:18:21.000Z | config/config.exs | straw-hat-labs/straw_hat_review | 342dbbfac0ac96287111babd59b5321efdd8728d | [
"MIT"
] | 64 | 2018-03-30T06:21:49.000Z | 2019-11-01T13:57:34.000Z | config/config.exs | straw-hat-labs/straw_hat_review | 342dbbfac0ac96287111babd59b5321efdd8728d | [
"MIT"
] | 1 | 2018-06-21T23:00:00.000Z | 2018-06-21T23:00:00.000Z | use Mix.Config
config :straw_hat_review, ecto_repos: [StrawHat.Review.Repo]
config :straw_hat_review, StrawHat.Review.Repo,
database: "straw_hat_review_test",
username: "postgres",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
config :arc, storage: Arc.Storage.Local
config :logger, level: :warn
| 22.642857 | 60 | 0.763407 |
9e03cf2fcf3e089dc897518d2e298e4443632778 | 981 | ex | Elixir | apps/astarte_housekeeping/lib/astarte_housekeeping_web/plug/metrics_plug.ex | Annopaolo/astarte | f8190e8bf044759a9b84bdeb5786a55b6f793a4f | [
"Apache-2.0"
] | 191 | 2018-03-30T13:23:08.000Z | 2022-03-02T12:05:32.000Z | apps/astarte_housekeeping/lib/astarte_housekeeping_web/plug/metrics_plug.ex | Annopaolo/astarte | f8190e8bf044759a9b84bdeb5786a55b6f793a4f | [
"Apache-2.0"
] | 402 | 2018-03-30T13:37:00.000Z | 2022-03-31T16:47:10.000Z | apps/astarte_housekeeping/lib/astarte_housekeeping_web/plug/metrics_plug.ex | Annopaolo/astarte | f8190e8bf044759a9b84bdeb5786a55b6f793a4f | [
"Apache-2.0"
] | 24 | 2018-03-30T13:29:48.000Z | 2022-02-28T11:10:26.000Z | #
# This file is part of Astarte.
#
# Copyright 2020 Ispirata Srl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
defmodule Astarte.HousekeepingWeb.MetricsPlug do
import Plug.Conn
def init(_args), do: nil
def call(%{request_path: "/metrics", method: "GET"} = conn, _opts) do
metrics = TelemetryMetricsPrometheus.Core.scrape()
conn
|> put_resp_content_type("text/plain")
|> send_resp(200, metrics)
|> halt()
end
def call(conn, _opts), do: conn
end
| 28.028571 | 74 | 0.72579 |
9e03dfb7e575d0570d261b4e156721be838f7676 | 1,078 | exs | Elixir | mix.exs | Gimi/coders | 28e20558ac26709c62b8463dae963e4e41a759b2 | [
"MIT"
] | null | null | null | mix.exs | Gimi/coders | 28e20558ac26709c62b8463dae963e4e41a759b2 | [
"MIT"
] | null | null | null | mix.exs | Gimi/coders | 28e20558ac26709c62b8463dae963e4e41a759b2 | [
"MIT"
] | null | null | null | defmodule Coders.Mixfile do
use Mix.Project
def project do
[app: :coders,
version: "0.0.1",
elixir: "~> 1.0",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix] ++ Mix.compilers,
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps]
end
# Configuration for the OTP application
#
# Type `mix help compile.app` for more information
def application do
[mod: {Coders, []},
applications: [:phoenix, :phoenix_html, :cowboy, :logger, :httpoison]]
end
# Specifies which paths to compile per environment
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
defp elixirc_paths(_), do: ["lib", "web"]
# Specifies your project dependencies
#
# Type `mix help deps` for examples and options
defp deps do
[{:phoenix, "~> 1.0.2"},
{:ecto, "~> 1.0.3"},
{:rethinkdb, "~> 0.1.0"},
{:httpoison, "~> 0.7.3"},
{:phoenix_html, "~> 2.1"},
{:phoenix_live_reload, "~> 1.0", only: :dev},
{:cowboy, "~> 1.0"}]
end
end
| 26.95 | 75 | 0.585343 |
9e04323188a2467314879b49cd9714483a3c7733 | 2,947 | ex | Elixir | lib/crawler/monova.ex | milandzo/milandzotorismo | 51e92afbc776b8a60d8f9e79fc44d6669724dcf9 | [
"MIT"
] | null | null | null | lib/crawler/monova.ex | milandzo/milandzotorismo | 51e92afbc776b8a60d8f9e79fc44d6669724dcf9 | [
"MIT"
] | null | null | null | lib/crawler/monova.ex | milandzo/milandzotorismo | 51e92afbc776b8a60d8f9e79fc44d6669724dcf9 | [
"MIT"
] | 1 | 2020-10-05T14:16:08.000Z | 2020-10-05T14:16:08.000Z | defmodule Magnetissimo.Crawler.Monova do
use GenServer
alias Magnetissimo.Torrent
alias Magnetissimo.Crawler.Helper
require Logger
def start_link do
queue = initial_queue()
GenServer.start_link(__MODULE__, queue)
end
def init(queue) do
schedule_work()
{:ok, queue}
end
defp schedule_work do
Process.send_after(self(), :work, 1 * 1 * 100)
end
# Callbacks
def handle_info(:work, queue) do
new_queue = case :queue.out(queue) do
{{_value, item}, queue_2} ->
process(item, queue_2)
_ ->
Logger.debug "[Monova] Queue is empty - restarting queue."
initial_queue()
end
schedule_work()
{:noreply, new_queue}
end
def process({:page_link, url}, queue) do
IO.puts "Downloading page: " <> url
torrents = Helper.download(url) |> torrent_links
queue = Enum.reduce(torrents, queue, fn torrent, queue ->
:queue.in({:torrent_link, torrent}, queue)
end)
queue
end
def process({:torrent_link, url}, queue) do
torrent_struct = Helper.download(url) |> torrent_information
Torrent.save_torrent(torrent_struct)
queue
end
## ##
# Parsing #
## ##
def initial_queue do
urls =
for i <- 1..50 do
{:page_link, "https://monova.org/latest?page=#{i}"}
end
:queue.from_list(urls)
end
def torrent_links(cat_body) do
Logger.debug "[Monova] Extracting Torrents"
cat_body
|> Floki.find("a")
|> Floki.attribute("href")
|> Enum.filter(fn(a) -> String.contains?(a, "/torrent/") end)
|> Enum.map(fn(url) -> "https:" <> url end)
end
def torrent_information(torrent_body) do
name = torrent_body
|> Floki.find("title")
|> Floki.text
|> String.replace(" - Torrent", "")
|> String.trim
magnet = torrent_body
|> Floki.find("#download-magnet")
|> Floki.attribute("href")
|> Enum.at(0)
size_html = get_size(torrent_body) |> String.split(" ")
size_value = Enum.at(size_html, 0)
unit = Enum.at(size_html, 1)
size = Helper.size_to_bytes(size_value, unit) |> Kernel.to_string
## Leechers and Seeders informations are not provided by Monova.
%{
name: name,
magnet: magnet,
size: size,
website_source: "Monova",
seeders: 0,
leechers: 0
}
end
@spec get_size(String.t) :: String.t
def get_size(torrent_body) do
size = torrent_body
|> Floki.find(".general-table")
|> Floki.find("tr")
|> Enum.sort
|> Enum.reverse
|> Enum.at(1)
|> extract_size
case size do
{:error, :bad_tree} ->
Logger.error "Couldn't properly parse this page!"
"0"
{:ok, torrent_size} ->
torrent_size
end
end
# A tiny helper.
defp extract_size({"tr", [], [{"td", [], ["Total Size:"]}, {"td", [], [size]}]}) do
{:ok, size}
end
defp extract_size(_), do: {:error, :bad_tree}
end
| 23.576 | 85 | 0.601289 |
9e044fd424999d271ba142d9f6a391a97d6dccf0 | 13,911 | ex | Elixir | lib/remsign/backend.ex | trustcor/remsign | 5186675206654d950e15b718d68f655f03b135dd | [
"MIT"
] | 1 | 2017-09-27T16:13:12.000Z | 2017-09-27T16:13:12.000Z | lib/remsign/backend.ex | trustcor/remsign | 5186675206654d950e15b718d68f655f03b135dd | [
"MIT"
] | null | null | null | lib/remsign/backend.ex | trustcor/remsign | 5186675206654d950e15b718d68f655f03b135dd | [
"MIT"
] | null | null | null | defmodule Remsign.Backend do
@moduledoc """
Functionality for a backend server, which gets registered to one or more
brokers which then send signing requests to it.
"""
use GenServer
import Logger, only: [log: 2]
import Remsign.Utils, only: [get_in_default: 3]
def init([cfg = %{}, kl]) do
sockname = to_string(cfg[:ident]) <> "." <> cfg[:host] <> "." <> to_string(cfg[:port])
log(:debug, "Starting backend: #{sockname}")
sock = case ExChumak.socket(:req, String.to_charlist(sockname)) do
{:error, {:already_started, sockpid}} -> sockpid
{:ok, sockpid} -> sockpid
e ->
log(:error, "#{inspect(e)} on chumak connect() received")
nil
end
{pub, priv} = Remsign.Utils.generate_rsa(Map.get(cfg, :modulus_size, 2048))
{:ok, _pid} = ExChumak.connect(sock, :tcp, String.to_charlist(cfg[:host]), cfg[:port])
ExChumak.send(sock, "ping")
case ExChumak.recv(sock) do
{:ok, "pong"} ->
nst = Map.merge(cfg, %{
sock: sock,
ekpriv: priv,
ekpub: pub,
kl: kl})
GenServer.call(kl, {:set_backend, self})
{_r, dealers, be, sup} = do_register(nst)
me = self
ppid = spawn_link(fn -> pinger(me, sock, get_in_default(cfg, [:timeout], 1000)) end)
{:ok,
Map.put(nst, :backends, be) |>
Map.put(:dealers, dealers) |>
Map.put(:supervisor, sup) |>
Map.put(:pinger, ppid)}
e ->
log(:error, "Non pong error from registrar: #{inspect(e)}")
{:error, :no_registry_connect}
end
end
defp ping(parent, sock) do
case ExChumak.send(sock, "ping") do
:ok ->
case ExChumak.recv(sock) do
{:ok, "pong"} ->
send parent, :ok
_ ->
send parent, :error
end
_ ->
send parent, :error
end
end
defp pinger(parent, sock, timeout) do
me = self
spawn fn -> ping(me, sock) end
continue = receive do
:ok ->
send parent, :ping_ok
:cont
:error ->
send parent, :ping_error
:stop
:stop ->
:stop
after
timeout ->
send parent, :ping_timeout
:stop
end
case continue do
:cont ->
:timer.sleep(500)
pinger(parent, sock, timeout)
:stop ->
:ok
end
end
def start_link(cfg = %{}, kl) do
defaults = %{
num_workers: 5,
sock: nil,
host: "127.0.0.1",
port: 25000,
ident: "backend",
skew: 60,
nstore: fn n -> Remsign.Utils.cc_store_nonce(:nonce_cache, n) end
}
GenServer.start_link __MODULE__, [ Map.merge(defaults, Remsign.Config.atomify(cfg)), kl ], name: String.to_atom(cfg[:ident])
end
defp handle_register_response(%{ "command" => "register", "response" => resp = %{ "ciphertext" => _ } }, st) do
alg = %{alg: :jose_jwe_alg_rsa, enc: :jose_jwe_enc_chacha20_poly1305}
{pt, _m} = JOSE.JWE.block_decrypt(JOSE.JWK.from_map(st[:ekpriv]), {alg, resp})
case Poison.decode(pt) do
{:ok, m = %{}} -> Map.put(m, "command", "register")
_ -> {:error, :malformed_response}
end
end
defp handle_register_response(_, _st) do
{ :error, :malformed_response }
end
defp do_register_h(_, nil, st) do
log(:error, "Unable to load private signature key: #{st[:signkey]}")
{nil, nil, nil}
end
defp do_register_h(msg, sigkey, st) do
import Supervisor.Spec, warn: false
log(:debug, "do_register_h: msg = #{inspect(msg)}, signature alg = #{st[:signalg]}")
m = Remsign.Utils.wrap(msg, st[:signkey], st[:signalg], sigkey )
log(:debug, "register message = #{inspect(m)}")
ExChumak.send(st[:sock],m)
case ExChumak.recv(st[:sock]) do
{:ok, rep} ->
rep2 = Remsign.Utils.unwrap(rep, fn k, _kt -> GenServer.call(st[:kl], {:lookup, k, :public}) end, st[:skew], st[:nstore])
rep = handle_register_response(rep2, st)
dealers = Enum.map(Map.get(rep, "dealers"),
fn {kn, %{ "hmac" => kh, "port" => p}} ->
{kn, case Base.decode16(kh, case: :mixed) do
{:ok, k} -> {p, k}
_ -> {nil, nil}
end} end) |>
Enum.reject(fn {_kn, {_p, k}} -> k == nil end) |>
Enum.into(%{})
children = Enum.map(1..st[:num_workers],
fn n ->
Supervisor.Spec.worker(Remsign.BackendWorker, [{st, dealers, n}],
id: String.to_atom("Remsign.BackendWorker.#{n}"))
end)
{:ok, sup} = Supervisor.start_link(children, strategy: :one_for_one,
restart: :transient)
{rep, dealers, children, sup}
e ->
log(:error, "Unexpected reply from register: #{inspect(e)}")
{nil, nil, nil, nil}
end
end
def do_register(st) do
msg = %{ command: "register",
params: %{
pubkeys: GenServer.call(st[:kl], :list_keys),
ekey: st[:ekpub]
}
}
sigkey = GenServer.call(st[:kl], {:lookup, st[:signkey], :private})
do_register_h(msg, sigkey, st)
end
defp handle_add_key_response(%{ "command" => "add_key", "response" => resp = %{ "ciphertext" => _ } }, st) do
alg = %{alg: :jose_jwe_alg_rsa, enc: :jose_jwe_enc_chacha20_poly1305}
{pt, _m} = JOSE.JWE.block_decrypt(JOSE.JWK.from_map(st[:ekpriv]), {alg, resp})
case Poison.decode(pt) do
{:ok, m = %{}} -> Map.put(m, "command", "add_key")
_ -> {:error, :malformed_response}
end
end
defp handle_add_key_response(_, _st) do
{ :error, :malformed_response }
end
defp do_add_key_h(_, nil, st) do
log(:error, "Unable to load private signature key: #{st[:signkey]}")
nil
end
defp do_add_key_h(msg, sigkey, st) do
m = Remsign.Utils.wrap(msg, st[:signkey], st[:signalg], sigkey)
ExChumak.send(st[:sock], m)
case ExChumak.recv(st[:sock]) do
{:ok, rep} ->
rep2 = Remsign.Utils.unwrap(rep, fn k, _kt -> GenServer.call(st[:kl], {:lookup, k, :public}) end, st[:skew], st[:nstore])
case handle_add_key_response(rep2, st) do
%{ "command" => "add_key", "dealer" => %{ "port" => port, "hmac" => kh }} ->
case Base.decode16(kh, case: :mixed) do
{:ok, k} ->
kn = get_in(msg, [:params, :name])
log(:debug, "Port for kn #{kn} port = #{port}, hmac = #{inspect(k)}")
Enum.each(Supervisor.which_children(st[:supervisor]),
fn {_, cpid, _, _} -> GenServer.call(cpid, {:add_dealer, kn, port, k}) end)
_ -> nil
end
e ->
log(:error, "Add key response yields unexpected reply: #{inspect(e)}")
nil
end
e ->
log(:error, "Unexpected reply from add_key recv: #{inspect(e)}")
nil
end
end
def do_add_key(kn, pk, st) do
msg = %{ command: "add_key",
params: %{
name: kn,
pubkey: pk,
ekey: st[:ekpub]
}
}
log(:debug, "Looking up signing key #{st[:signkey]}")
sigkey = GenServer.call(st[:kl], {:lookup, st[:signkey], :private})
log(:debug, "Signing add_key with #{inspect(sigkey)}")
do_add_key_h(msg, sigkey, st)
end
def hmac(be, kn) do
GenServer.call(be, {:hmac, kn})
end
def handle_call({:store_nonce, n}, _from, st) do
{:reply, st[:nstore].(n), st}
end
def handle_call({:hmac, kn}, _from, st = %{ dealers: d = %{} }) do
{:reply, Map.get(d, kn, {nil, nil}) |> elem(1), st}
end
def handle_call({:add_key, kn, pk}, _from, st) when is_binary(kn) do
log(:debug, "Adding key #{kn}")
{:reply, do_add_key(kn, pk, st), st}
end
def handle_call({:del_key, kn}, _from, st) when is_binary(kn) do
log(:debug, "Deleting key #{kn}")
{:reply, :ok, st}
end
def handle_info(:ping_ok, st) do
# do nothing
{:noreply, st}
end
def handle_info(:ping_error, st) do
log(:error, "Backend #{st[:ident]} ping returns error")
{:stop, :shutdown, st}
end
def handle_info(:ping_timeout, st) do
log(:error, "Backend ping #{st[:ident]} to broker has timed out")
children = Supervisor.which_children(st[:supervisor])
log(:error, "Backend workers = #{inspect(children)}")
sr = Enum.map(children,
fn {_, cpid, _, _} -> GenServer.call(cpid, :killme) end)
log(:error, "Stop children results = #{inspect(sr)}")
{:stop, :shutdown, st}
end
def terminate(reason, st) do
log(:error, "Backend #{st[:ident]} terminating: #{inspect(reason)}")
:shutdown
end
end
defmodule Remsign.BackendWorker do
use GenServer
import Logger, only: [log: 2]
defp make_sock(host, port, hm) do
wid = :crypto.strong_rand_bytes(8) |> Base.encode16(case: :lower)
sock = case ExChumak.socket(:rep, String.to_charlist(wid)) do
{:error, {:already_started, sockpid}} -> sockpid
{:ok, sockpid} -> sockpid
e ->
log(:error, "#{inspect(e)} on chumak connect() received")
nil
end
case ExChumak.connect(sock, :tcp, String.to_charlist(host), port) do
{:ok, _pid} ->
log(:info, "Backend worker connected to port #{port}")
:ok
e ->
log(:error, "Worker unable to connect to #{inspect(host)}:#{port}: #{inspect(e)}")
nil
end
:timer.sleep(5)
{sock, hm, wid}
end
def init({st, dealers, n}) do
socks = Enum.map(dealers, fn {kn, {port, k}} -> {kn, make_sock(st[:host], port, k)} end) |> Enum.into(%{})
log(:debug, "Starting backend worker #{inspect(n)} on #{inspect(socks)}")
me = self
_pids = Enum.map(socks, fn {_kn, {sock, k, wid}} -> spawn(fn -> listener(sock, k, wid, me, st) end) end)
{:ok, Map.merge(st, %{socks: socks, parent: me, ident: n})}
end
def start_link({st, dealers, n}) do
GenServer.start_link __MODULE__, {st, dealers, n}, name: String.to_atom("Remsign.BackendWorker.#{st[:ident]}.#{n}")
end
defp do_sign(d, alg, %{kty: :jose_jwk_kty_rsa}, k), do: :public_key.sign({:digest, d}, alg, k)
defp do_sign(d, alg, %{kty: :jose_jwk_kty_ec}, k), do: :public_key.sign({:digest, d}, alg, k)
defp do_sign(d, alg, %{kty: :jose_jwk_kty_dsa}, k), do: :public_key.sign({:digest, d}, alg, k)
defp do_sign(d, _alg, %{kty: :jose_jwk_kty_okp_ed25519}, k), do: :jose_curve25519.ed25519_sign(d, k)
defp command_reply("sign", %{ "keyname" => kname, "hash_type" => htype, "digest" => digest }, hm, st ) do
log(:debug, "Got sign request for #{inspect(kname)}")
case Remsign.Utils.known_hash(htype) do
nil ->
Poison.encode!(%{ error: :unknown_digest_type })
alg when is_atom(alg) ->
case GenServer.call(st[:kl], {:lookup, kname, :private}) do
nil ->
Poison.encode!(%{ error: :unknown_key })
km ->
log(:debug, "Decoding digest: #{inspect(digest)}")
case Base.decode16(digest, case: :lower) do
{:ok, d} ->
{kty, kk} = JOSE.JWK.from_map(km) |> JOSE.JWK.to_key
%{ payload: do_sign(d, alg, kty, kk) |> Base.encode16(case: :lower) } |>
Remsign.Utils.wrap("backend-key", "HS256", JOSE.JWK.from_oct(hm))
:error ->
Poison.encode!(%{error: :malformed_digest})
end
end
end
end
defp command_reply(c, _, _, _st) do
log(:error, "Unknown command #{inspect(c)}")
Poison.encode!(%{ error: :unknown_command })
end
defp handle_message(m, hm, st) do
msg = Remsign.Utils.unwrap(m,
fn _k, :public -> JOSE.JWK.from_oct(hm) end,
st[:skew], fn n -> st[:nstore].(n) end)
command_reply(Map.get(msg, "command"), Map.get(msg, "parms"), hm, st)
end
defp listener(sock, hm, wid, parent, st) do
case ExChumak.recv(sock) do
{:ok, "ping"} ->
log(:debug, "Ping message received on #{wid}")
send parent, {:reply, sock, "pong"}
{:ok, m} ->
send parent, {:reply, sock, handle_message(m, hm, st)}
e ->
log(:warn, "Unknown message received on #{wid}: #{inspect(e)}")
send parent, {:reply, sock, Poison.encode(%{ error: :unknown_command })}
end
listener(sock, hm, wid, parent, st)
end
def handle_info({:reply, sock, msg}, st) do
ExChumak.send(sock, msg)
{:noreply, st}
end
def handle_call(:killme, from, st) do
log(:info, "Killme called: #{inspect(st[:socks])}")
r = Enum.map(st[:socks], fn {kn, {sock, _hm, _wid}} -> log(:info, "Requesting stop on #{inspect(sock)}/#{kn}"); ExChumak.stop_socket(sock) end)
log(:info, "Killing backend worker #{st[:ident]} by request from backend #{inspect(from)}: socket results: #{inspect(r)}")
{:reply, r, st}
end
def handle_call({:add_dealer, kn, port, hm}, _from, st) do
{sock, hm, wid} = make_sock(st[:host], port, hm)
spawn_link(fn -> listener(sock, hm, wid, st[:parent], st) end)
{:reply, :ok, Map.put(st, :socks, Map.put(st[:socks], kn, sock))}
end
def terminate(reason, st) do
log(:error, "Backend Worker #{st[:ident]} terminating: #{inspect(reason)}")
:shutdown
end
end
| 36.132468 | 149 | 0.53641 |
9e0451712c6b495cc20841ca723ea3433836f08b | 3,487 | ex | Elixir | clients/content/lib/google_api/content/v21/model/orders_return_refund_line_item_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/content/lib/google_api/content/v21/model/orders_return_refund_line_item_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/content/lib/google_api/content/v21/model/orders_return_refund_line_item_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V21.Model.OrdersReturnRefundLineItemRequest do
@moduledoc """
## Attributes
* `lineItemId` (*type:* `String.t`, *default:* `nil`) - The ID of the line item to return. Either lineItemId or productId is required.
* `operationId` (*type:* `String.t`, *default:* `nil`) - The ID of the operation. Unique across all operations for a given order.
* `priceAmount` (*type:* `GoogleApi.Content.V21.Model.Price.t`, *default:* `nil`) - The amount to be refunded. This may be pre-tax or post-tax depending on the location of the order. If omitted, refundless return is assumed.
* `productId` (*type:* `String.t`, *default:* `nil`) - The ID of the product to return. This is the REST ID used in the products service. Either lineItemId or productId is required.
* `quantity` (*type:* `integer()`, *default:* `nil`) - The quantity to return and refund. Quantity is required.
* `reason` (*type:* `String.t`, *default:* `nil`) - The reason for the return. Acceptable values are: - "`customerDiscretionaryReturn`" - "`customerInitiatedMerchantCancel`" - "`deliveredTooLate`" - "`expiredItem`" - "`invalidCoupon`" - "`malformedShippingAddress`" - "`other`" - "`productArrivedDamaged`" - "`productNotAsDescribed`" - "`qualityNotAsExpected`" - "`undeliverableShippingAddress`" - "`unsupportedPoBoxAddress`" - "`wrongProductShipped`"
* `reasonText` (*type:* `String.t`, *default:* `nil`) - The explanation of the reason.
* `taxAmount` (*type:* `GoogleApi.Content.V21.Model.Price.t`, *default:* `nil`) - The amount of tax to be refunded. Optional, but if filled, then priceAmount must be set. Calculated automatically if not provided.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:lineItemId => String.t() | nil,
:operationId => String.t() | nil,
:priceAmount => GoogleApi.Content.V21.Model.Price.t() | nil,
:productId => String.t() | nil,
:quantity => integer() | nil,
:reason => String.t() | nil,
:reasonText => String.t() | nil,
:taxAmount => GoogleApi.Content.V21.Model.Price.t() | nil
}
field(:lineItemId)
field(:operationId)
field(:priceAmount, as: GoogleApi.Content.V21.Model.Price)
field(:productId)
field(:quantity)
field(:reason)
field(:reasonText)
field(:taxAmount, as: GoogleApi.Content.V21.Model.Price)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V21.Model.OrdersReturnRefundLineItemRequest do
def decode(value, options) do
GoogleApi.Content.V21.Model.OrdersReturnRefundLineItemRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V21.Model.OrdersReturnRefundLineItemRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 51.279412 | 456 | 0.704904 |
9e046d2b8bdeb6488a6aa41ca86e868f9754aff8 | 18,269 | exs | Elixir | apps/graphql/test/web/resolvers/reimbursement_contract_resolver_test.exs | ehealth-ua/ehealth.api | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 8 | 2019-06-14T11:34:49.000Z | 2021-08-05T19:14:24.000Z | apps/graphql/test/web/resolvers/reimbursement_contract_resolver_test.exs | edenlabllc/ehealth.api.public | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 1 | 2019-07-08T15:20:22.000Z | 2019-07-08T15:20:22.000Z | apps/graphql/test/web/resolvers/reimbursement_contract_resolver_test.exs | ehealth-ua/ehealth.api | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 6 | 2018-05-11T13:59:32.000Z | 2022-01-19T20:15:22.000Z | defmodule GraphQL.ReimbursementContractResolverTest do
@moduledoc false
use GraphQL.ConnCase, async: true
import Core.Factories, only: [insert: 2, insert: 3, build: 2]
import Core.Expectations.Mithril
import Core.Expectations.Signature, only: [edrpou_signed_content: 2]
import Mox
alias Absinthe.Relay.Node
alias Core.ContractRequests.ReimbursementContractRequest
alias Core.Utils.TypesConverter
alias Ecto.UUID
@list_query """
query ListContractsQuery($filter: ReimbursementContractFilter) {
reimbursementContracts(first: 10, filter: $filter) {
nodes {
id
databaseId
status
startDate
nhsSigner {
databaseId
}
medicalProgram {
name
}
}
}
}
"""
@get_by_id_query """
query GetContractQuery($id: ID!) {
reimbursementContract(id: $id) {
id
}
}
"""
@printout_content_query """
query GetContractQuery($id: ID!) {
reimbursementContract(id: $id) {
id
printoutContent
}
}
"""
setup :verify_on_exit!
setup %{conn: conn} do
conn = put_scope(conn, "contract:terminate contract:read")
{:ok, %{conn: conn}}
end
describe "list" do
test "filter by legal entity relation", %{conn: conn} do
nhs(2)
from = insert(:prm, :legal_entity)
to = insert(:prm, :legal_entity)
insert(:prm, :related_legal_entity, merged_from: from, merged_to: to)
contract_related_from = insert(:prm, :reimbursement_contract, %{contractor_legal_entity: from})
contract_related_to = insert(:prm, :reimbursement_contract, %{contractor_legal_entity: to})
# merged from
variables = %{
filter: %{
legalEntityRelation: "MERGED_FROM",
isSuspended: false,
status: contract_related_from.status
}
}
resp_body =
conn
|> put_client_id()
|> post_query(@list_query, variables)
|> json_response(200)
resp_entities = get_in(resp_body, ~w(data reimbursementContracts nodes))
refute resp_body["errors"]
assert 1 == length(resp_entities)
assert contract_related_from.id == hd(resp_entities)["databaseId"]
# merged to
variables = %{filter: %{legalEntityRelation: "MERGED_TO"}}
resp_body =
conn
|> put_client_id()
|> post_query(@list_query, variables)
|> json_response(200)
resp_entities = get_in(resp_body, ~w(data reimbursementContracts nodes))
refute resp_body["errors"]
assert 1 == length(resp_entities)
assert contract_related_to.id == hd(resp_entities)["databaseId"]
end
test "order by contractor legal_entity edrpou", %{conn: conn} do
nhs()
contract3 =
insert(:prm, :reimbursement_contract, contractor_legal_entity: build(:legal_entity, edrpou: "77744433322"))
contract1 =
insert(:prm, :reimbursement_contract, contractor_legal_entity: build(:legal_entity, edrpou: "33344433322"))
contract2 =
insert(:prm, :reimbursement_contract, contractor_legal_entity: build(:legal_entity, edrpou: "55544433322"))
query = """
query ListContractsQuery($orderBy: ReimbursementContractOrderBy) {
reimbursementContracts(first: 10, orderBy: $orderBy) {
nodes {
databaseId
}
}
}
"""
variables = %{orderBy: "CONTRACTOR_LEGAL_ENTITY_EDRPOU_ASC"}
resp_body =
conn
|> put_client_id()
|> post_query(query, variables)
|> json_response(200)
resp_entities = get_in(resp_body, ~w(data reimbursementContracts nodes))
refute resp_body["errors"]
assert 3 == length(resp_entities)
assert [contract1.id, contract2.id, contract3.id] == Enum.map(resp_entities, & &1["databaseId"])
end
test "order by medical_program name", %{conn: conn} do
nhs()
insert_contract =
&insert(:prm, :reimbursement_contract, medical_program: insert(:prm, :medical_program, name: &1))
medical_programs_name = ["Unknown program 3", "Available medications 1", "Free medications 2"]
[contract3, contract1, contract2] = Enum.map(medical_programs_name, &insert_contract.(&1))
query = """
query ListContractsQuery($orderBy: ReimbursementContractOrderBy) {
reimbursementContracts(first: 10, orderBy: $orderBy) {
nodes {
databaseId
medicalProgram {
name
}
}
}
}
"""
variables = %{orderBy: "MEDICAL_PROGRAM_NAME_ASC"}
resp_body =
conn
|> put_client_id()
|> post_query(query, variables)
|> json_response(200)
resp_entities = get_in(resp_body, ~w(data reimbursementContracts nodes))
assert [contract1.id, contract2.id, contract3.id] == Enum.map(resp_entities, & &1["databaseId"])
end
test "filter by medical_program attributes", %{conn: conn} do
nhs()
insert_medical_program = &insert(:prm, :medical_program, is_active: &1, name: &2)
insert_contract = &insert(:prm, :reimbursement_contract, medical_program: insert_medical_program.(&1, &2))
insert_contract.(true, "Medical program")
insert_contract.(true, "Available program")
insert_contract.(true, "Available medications")
insert_contract.(false, "Free medications program")
insert_contract.(false, "Unknown drugs")
query = """
query ListContractsQuery($filter: ReimbursementContractFilter) {
reimbursementContracts(first: 10, filter: $filter) {
nodes {
databaseId
medicalProgram {
name
isActive
}
}
}
}
"""
variables = %{filter: %{medicalProgram: %{isActive: true, name: "program"}}}
resp_body =
conn
|> put_client_id()
|> post_query(query, variables)
|> json_response(200)
[entity, _] = resp_entities = get_in(resp_body, ~w(data reimbursementContracts nodes))
refute resp_body["errors"]
assert 2 == length(resp_entities)
assert true == entity["medicalProgram"]["isActive"]
assert String.contains?(entity["medicalProgram"]["name"], "program")
end
test "filter by contractor_legal_entity attributes", %{conn: conn} do
nhs()
insert_contract = &insert(:prm, :reimbursement_contract, contractor_legal_entity: build(:legal_entity, &1))
%{id: contract_id} = insert_contract.(nhs_verified: true)
%{id: contract_id2} = insert_contract.(edrpou: "111", nhs_verified: true)
insert_contract.(edrpou: "222", nhs_verified: false)
query = """
query ListContractsQuery($filter: ReimbursementContractFilter!) {
reimbursementContracts(first: 10, filter: $filter) {
nodes {
databaseId
contractorLegalEntity {
databaseId
nhsVerified
}
}
}
}
"""
variables = %{filter: %{contractorLegalEntity: %{nhsVerified: true}}}
resp_body =
conn
|> put_client_id()
|> post_query(query, variables)
|> json_response(200)
resp_entities = get_in(resp_body, ~w(data reimbursementContracts nodes))
resp_entities_ids = get_in(resp_entities, [Access.all(), "databaseId"])
refute resp_body["errors"]
assert 2 == length(resp_entities)
assert true == hd(resp_entities)["contractorLegalEntity"]["nhsVerified"]
assert MapSet.new([contract_id, contract_id2]) == MapSet.new(resp_entities_ids)
end
end
describe "get by id" do
setup %{conn: conn} do
contract = insert(:prm, :reimbursement_contract)
global_contract_id = Node.to_global_id("ReimbursementContract", contract.id)
{:ok, conn: conn, contract: contract, global_contract_id: global_contract_id}
end
test "success for NHS client", %{conn: conn, global_contract_id: global_contract_id} do
nhs()
variables = %{id: global_contract_id}
resp_body =
conn
|> put_client_id()
|> post_query(@get_by_id_query, variables)
|> json_response(200)
resp_entity = get_in(resp_body, ~w(data reimbursementContract))
refute resp_body["errors"]
assert global_contract_id == resp_entity["id"]
end
test "success for correct PHARMACY client",
%{conn: conn, contract: contract, global_contract_id: global_contract_id} do
pharmacy()
variables = %{id: global_contract_id}
resp_body =
conn
|> put_client_id(contract.contractor_legal_entity_id)
|> post_query(@get_by_id_query, variables)
|> json_response(200)
resp_entity = get_in(resp_body, ~w(data reimbursementContract))
refute resp_body["errors"]
assert global_contract_id == resp_entity["id"]
end
test "return nothing for incorrect PHARMACY client", %{conn: conn} = context do
pharmacy()
variables = %{id: context.global_contract_id}
resp_body =
conn
|> put_client_id()
|> post_query(@get_by_id_query, variables)
|> json_response(200)
resp_entity = get_in(resp_body, ~w(data reimbursementContract))
refute resp_body["errors"]
refute resp_entity
end
test "return forbidden error for incorrect client type", %{conn: conn} = context do
mis()
variables = %{id: context.global_contract_id}
resp_body =
conn
|> put_client_id()
|> post_query(@get_by_id_query, variables)
|> json_response(200)
resp_entity = get_in(resp_body, ~w(data reimbursementContract))
assert is_list(resp_body["errors"])
assert match?(%{"extensions" => %{"code" => "FORBIDDEN"}}, hd(resp_body["errors"]))
refute resp_entity
end
test "success for printoutContent field", %{conn: conn} do
nhs()
expect(MediaStorageMock, :create_signed_url, fn _, _, _, _ ->
{:ok, %{secret_url: "http://localhost/good_upload_1"}}
end)
expect(MediaStorageMock, :get_signed_content, 1, fn _ ->
{:ok, %{body: "", status_code: 200}}
end)
printout_content = "<html>Some printout content is here</html>"
contract_request = insert(:il, :reimbursement_contract_request, printout_content: printout_content)
contract = insert(:prm, :reimbursement_contract, contract_request_id: contract_request.id)
variables = %{id: Node.to_global_id("ReimbursementContract", contract.id)}
legal_entity_signer = insert(:prm, :legal_entity, edrpou: "10002000")
edrpou_signed_content(TypesConverter.atoms_to_strings(contract_request), legal_entity_signer.edrpou)
resp_body =
conn
|> put_client_id()
|> post_query(@printout_content_query, variables)
|> json_response(200)
resp_entity = get_in(resp_body, ~w(data reimbursementContract))
refute resp_body["errors"]
assert printout_content == resp_entity["printoutContent"]
end
test "fails on reimbursementContract not found resolving printoutContent", %{conn: conn} do
nhs()
contract = insert(:prm, :reimbursement_contract, contract_request_id: UUID.generate())
variables = %{id: Node.to_global_id("ReimbursementContract", contract.id)}
resp_body =
conn
|> put_client_id()
|> post_query(@printout_content_query, variables)
|> json_response(200)
resp_entity = get_in(resp_body, ~w(data reimbursementContract))
assert resp_entity
refute resp_entity["printoutContent"]
assert %{"errors" => [error]} = resp_body
assert "NOT_FOUND" == error["extensions"]["code"]
end
test "success with related entities", %{conn: conn} do
nhs()
parent_contract = insert(:prm, :reimbursement_contract)
contractor_legal_entity = insert(:prm, :legal_entity)
contractor_owner = insert(:prm, :employee)
contractor_employee = insert(:prm, :employee)
nhs_signer = insert(:prm, :employee)
nhs_legal_entity = insert(:prm, :legal_entity)
contractor_division = insert(:prm, :division, name: "Будьте здорові!")
contractor_employee_division = insert(:prm, :division, name: "Та Ви не хворійте!")
contract_request = insert(:il, :reimbursement_contract_request)
medical_program = insert(:prm, :medical_program)
contract =
insert(
:prm,
:reimbursement_contract,
parent_contract: parent_contract,
contractor_legal_entity: contractor_legal_entity,
contractor_owner: contractor_owner,
nhs_signer: nhs_signer,
nhs_legal_entity: nhs_legal_entity,
contract_request_id: contract_request.id,
medical_program: medical_program
)
insert(
:prm,
:contract_employee,
contract_id: contract.id,
employee_id: contractor_employee.id,
division_id: contractor_division.id
)
insert(
:prm,
:contract_employee,
contract_id: contract.id,
employee_id: contractor_employee.id,
division_id: contractor_employee_division.id
)
insert(:prm, :contract_division, contract_id: contract.id, division_id: contractor_division.id)
insert(:prm, :contract_division, contract_id: contract.id, division_id: contractor_employee_division.id)
id = Node.to_global_id("ReimbursementContract", contract.id)
query = """
query GetContractWithRelatedEntitiesQuery(
$id: ID!,
$divisionFilter: DivisionFilter!)
{
reimbursementContract(id: $id) {
contractorLegalEntity {
databaseId
}
contractorOwner {
databaseId
}
contractorDivisions(first: 1, filter: $divisionFilter) {
nodes{
databaseId
name
}
}
contractRequest {
databaseId
}
nhsSigner {
databaseId
}
nhsLegalEntity {
databaseId
}
parentContract {
databaseId
}
medicalProgram {
name
databaseId
}
insertedAt
updatedAt
}
}
"""
variables = %{
id: id,
divisionFilter: %{
databaseId: contractor_division.id,
name: "здоров"
}
}
resp_body =
conn
|> put_client_id()
|> post_query(query, variables)
|> json_response(200)
resp_entity = get_in(resp_body, ~w(data reimbursementContract))
refute resp_body["errors"]
assert resp_entity["insertedAt"]
assert resp_entity["updatedAt"]
assert parent_contract.id == resp_entity["parentContract"]["databaseId"]
assert contract_request.id == resp_entity["contractRequest"]["databaseId"]
assert medical_program.id == resp_entity["medicalProgram"]["databaseId"]
assert contractor_legal_entity.id == resp_entity["contractorLegalEntity"]["databaseId"]
assert contractor_owner.id == resp_entity["contractorOwner"]["databaseId"]
assert contractor_division.id == hd(resp_entity["contractorDivisions"]["nodes"])["databaseId"]
assert contractor_division.name == hd(resp_entity["contractorDivisions"]["nodes"])["name"]
assert nhs_signer.id == resp_entity["nhsSigner"]["databaseId"]
assert nhs_legal_entity.id == resp_entity["nhsLegalEntity"]["databaseId"]
end
test "success with attached documents", %{conn: conn} do
nhs()
expect(MediaStorageMock, :create_signed_url, 3, fn _, _, resource_name, id ->
{:ok, %{secret_url: "http://example.com/#{id}/#{resource_name}"}}
end)
expect(MediaStorageMock, :get_signed_content, 2, fn _url -> {:ok, %{status_code: 200, body: ""}} end)
contract_request =
insert(:il, :reimbursement_contract_request, status: ReimbursementContractRequest.status(:signed))
contract = insert(:prm, :reimbursement_contract, contract_request_id: contract_request.id)
id = Node.to_global_id("ReimbursementContract", contract.id)
query = """
query GetContractWithAttachedDocumentsQuery($id: ID!) {
reimbursementContract(id: $id) {
attachedDocuments {
type
url
}
}
}
"""
variables = %{id: id}
resp_body =
conn
|> put_client_id()
|> post_query(query, variables)
|> json_response(200)
attached_documents = get_in(resp_body, ~w(data reimbursementContract attachedDocuments))
refute resp_body["errors"]
assert 3 == length(attached_documents)
Enum.each(attached_documents, fn document ->
assert Map.has_key?(document, "type")
assert Map.has_key?(document, "url")
end)
end
test "Media Storage invalid response for attachedDocuments", %{conn: conn} do
nhs()
expect(MediaStorageMock, :create_signed_url, 1, fn _, _, _resource_name, _id ->
{:error, %{"error" => %{"message" => "not found"}}}
end)
contract_request = insert(:il, :reimbursement_contract_request)
contract = insert(:prm, :reimbursement_contract, contract_request_id: contract_request.id)
id = Node.to_global_id("ReimbursementContract", contract.id)
query = """
query GetContractWithAttachedDocumentsQuery($id: ID!) {
reimbursementContract(id: $id) {
attachedDocuments {
url
}
}
}
"""
variables = %{id: id}
resp_body =
conn
|> put_client_id()
|> post_query(query, variables)
|> json_response(200)
assert resp_body["errors"]
refute get_in(resp_body, ~w(data reimbursementContract attachedDocuments))
end
end
end
| 30.60134 | 115 | 0.621764 |
9e0488970b44f15cacd3ff62d502e8822d1ba24b | 463 | ex | Elixir | lib/ark_client/api/two/vote.ex | supaiku0/elixir-client | 2d5549ce3a876a18750a36a14ff769427688c5bb | [
"MIT"
] | null | null | null | lib/ark_client/api/two/vote.ex | supaiku0/elixir-client | 2d5549ce3a876a18750a36a14ff769427688c5bb | [
"MIT"
] | null | null | null | lib/ark_client/api/two/vote.ex | supaiku0/elixir-client | 2d5549ce3a876a18750a36a14ff769427688c5bb | [
"MIT"
] | null | null | null | defmodule ArkEcosystem.Client.API.Two.Account do
@moduledoc """
Documentation for ArkEcosystem.Client.API.One.Account.
"""
import ArkClient
@spec account(Tesla.Client.t(), List.t()) :: ArkEcosystem.Client.response()
def list(client, parameters \\ []) do
client |> get("votes", parameters)
end
@spec account(Tesla.Client.t(), String.t()) :: ArkEcosystem.Client.response()
def show(client, id) do
client |> get("votes/#{id}")
end
end
| 25.722222 | 79 | 0.678186 |
9e04a79089c25c867d4298fb5e465cfbeaeef655 | 761 | exs | Elixir | config/test.exs | ysaito8015/communitex | d469447a62029d59883d95df4df3c9b09e0022e2 | [
"Apache-2.0"
] | 7 | 2021-07-14T15:45:55.000Z | 2022-01-25T11:13:01.000Z | config/test.exs | ysaito8015/communitex | d469447a62029d59883d95df4df3c9b09e0022e2 | [
"Apache-2.0"
] | 10 | 2021-08-09T15:54:05.000Z | 2022-02-17T04:18:38.000Z | config/test.exs | ysaito8015/communitex | d469447a62029d59883d95df4df3c9b09e0022e2 | [
"Apache-2.0"
] | 5 | 2021-07-23T05:54:35.000Z | 2022-01-28T04:14:51.000Z | use Mix.Config
# Only in tests, remove the complexity from the password hashing algorithm
config :pbkdf2_elixir, :rounds, 1
# Configure your database
#
# The MIX_TEST_PARTITION environment variable can be used
# to provide built-in test partitioning in CI environment.
# Run `mix help test` for more information.
config :basic, Basic.Repo,
username: "postgres",
password: "postgres",
database: "basic_test#{System.get_env("MIX_TEST_PARTITION")}",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :basic, BasicWeb.Endpoint,
http: [port: 4002],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
| 29.269231 | 74 | 0.751643 |
9e04af7f9fc7b2a59371f3e9310ba5260758f616 | 2,476 | ex | Elixir | clients/dlp/lib/google_api/dlp/v2beta1/model/google_longrunning_operation.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/dlp/lib/google_api/dlp/v2beta1/model/google_longrunning_operation.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/dlp/lib/google_api/dlp/v2beta1/model/google_longrunning_operation.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.DLP.V2beta1.Model.GoogleLongrunningOperation do
@moduledoc """
This resource represents a long-running operation that is the result of a network API call.
## Attributes
- done (Boolean): If the value is `false`, it means the operation is still in progress. If true, the operation is completed, and either `error` or `response` is available. Defaults to: `null`.
- error (GoogleRpcStatus): The error result of the operation in case of failure or cancellation. Defaults to: `null`.
- metadata (Object): This field will contain an InspectOperationMetadata object. This will always be returned with the Operation. Defaults to: `null`.
- name (String): The server-assigned name, The `name` should have the format of `inspect/operations/<identifier>`. Defaults to: `null`.
- response (Object): This field will contain an InspectOperationResult object. Defaults to: `null`.
"""
defstruct [
:"done",
:"error",
:"metadata",
:"name",
:"response"
]
end
defimpl Poison.Decoder, for: GoogleApi.DLP.V2beta1.Model.GoogleLongrunningOperation do
import GoogleApi.DLP.V2beta1.Deserializer
def decode(value, options) do
value
|> deserialize(:"error", :struct, GoogleApi.DLP.V2beta1.Model.GoogleRpcStatus, options)
|> deserialize(:"metadata", :struct, GoogleApi.DLP.V2beta1.Model.Object, options)
|> deserialize(:"response", :struct, GoogleApi.DLP.V2beta1.Model.Object, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DLP.V2beta1.Model.GoogleLongrunningOperation do
def encode(value, options) do
GoogleApi.DLP.V2beta1.Deserializer.serialize_non_nil(value, options)
end
end
| 42.689655 | 224 | 0.744346 |
9e04e1c64af1e424e5f1ca1c5c993c6a36e9cd28 | 578 | ex | Elixir | lib/scp_to_anywhere/password.ex | daph/scp_to_anywhere | dacdc1b273af694e3c5cba263e88118eb87fd964 | [
"BSD-3-Clause"
] | null | null | null | lib/scp_to_anywhere/password.ex | daph/scp_to_anywhere | dacdc1b273af694e3c5cba263e88118eb87fd964 | [
"BSD-3-Clause"
] | null | null | null | lib/scp_to_anywhere/password.ex | daph/scp_to_anywhere | dacdc1b273af694e3c5cba263e88118eb87fd964 | [
"BSD-3-Clause"
] | null | null | null | defmodule ScpToAnywhere.Password do
require Logger
def auth(user, pass, _info, _state) do
user = List.to_string(user)
pass = List.to_string(pass)
Logger.info("#{user} #{pass}")
case get_password(user) do
{:ok, user_pass} ->
pass == user_pass
{:error, :no_password} ->
false
end
end
def get_password(user) do
{_, pass, _} =
Application.get_env(:scp_to_anywhere, :users)
|> Enum.find(fn({u, _, _}) -> u == user end)
if pass do
{:ok, pass}
else
{:error, :no_password}
end
end
end
| 20.642857 | 51 | 0.579585 |
9e05367eb5e8b3805988a5307d654b5fbd6a9e35 | 2,938 | exs | Elixir | test/stripe_app_web/controllers/getplan_controller_test.exs | hotpyn/stripe_demo | 2a0ac3ab34a616ffcd6d7e979c25517b5f1636b5 | [
"MIT"
] | null | null | null | test/stripe_app_web/controllers/getplan_controller_test.exs | hotpyn/stripe_demo | 2a0ac3ab34a616ffcd6d7e979c25517b5f1636b5 | [
"MIT"
] | null | null | null | test/stripe_app_web/controllers/getplan_controller_test.exs | hotpyn/stripe_demo | 2a0ac3ab34a616ffcd6d7e979c25517b5f1636b5 | [
"MIT"
] | null | null | null | defmodule StripeAppWeb.GetplanControllerTest do
use StripeAppWeb.ConnCase
alias StripeApp.Products
@create_attrs %{plan_id: 42, price: 120.5, status: 42, stripe_sub_id: "some stripe_sub_id", user_id: 42}
@update_attrs %{plan_id: 43, price: 456.7, status: 43, stripe_sub_id: "some updated stripe_sub_id", user_id: 43}
@invalid_attrs %{plan_id: nil, price: nil, status: nil, stripe_sub_id: nil, user_id: nil}
def fixture(:getplan) do
{:ok, getplan} = Products.create_getplan(@create_attrs)
getplan
end
describe "index" do
test "lists all getplans", %{conn: conn} do
conn = get conn, getplan_path(conn, :index)
assert html_response(conn, 200) =~ "Listing Getplans"
end
end
describe "new getplan" do
test "renders form", %{conn: conn} do
conn = get conn, getplan_path(conn, :new)
assert html_response(conn, 200) =~ "New Getplan"
end
end
describe "create getplan" do
test "redirects to show when data is valid", %{conn: conn} do
conn = post conn, getplan_path(conn, :create), getplan: @create_attrs
assert %{id: id} = redirected_params(conn)
assert redirected_to(conn) == getplan_path(conn, :show, id)
conn = get conn, getplan_path(conn, :show, id)
assert html_response(conn, 200) =~ "Show Getplan"
end
test "renders errors when data is invalid", %{conn: conn} do
conn = post conn, getplan_path(conn, :create), getplan: @invalid_attrs
assert html_response(conn, 200) =~ "New Getplan"
end
end
describe "edit getplan" do
setup [:create_getplan]
test "renders form for editing chosen getplan", %{conn: conn, getplan: getplan} do
conn = get conn, getplan_path(conn, :edit, getplan)
assert html_response(conn, 200) =~ "Edit Getplan"
end
end
describe "update getplan" do
setup [:create_getplan]
test "redirects when data is valid", %{conn: conn, getplan: getplan} do
conn = put conn, getplan_path(conn, :update, getplan), getplan: @update_attrs
assert redirected_to(conn) == getplan_path(conn, :show, getplan)
conn = get conn, getplan_path(conn, :show, getplan)
assert html_response(conn, 200) =~ "some updated stripe_sub_id"
end
test "renders errors when data is invalid", %{conn: conn, getplan: getplan} do
conn = put conn, getplan_path(conn, :update, getplan), getplan: @invalid_attrs
assert html_response(conn, 200) =~ "Edit Getplan"
end
end
describe "delete getplan" do
setup [:create_getplan]
test "deletes chosen getplan", %{conn: conn, getplan: getplan} do
conn = delete conn, getplan_path(conn, :delete, getplan)
assert redirected_to(conn) == getplan_path(conn, :index)
assert_error_sent 404, fn ->
get conn, getplan_path(conn, :show, getplan)
end
end
end
defp create_getplan(_) do
getplan = fixture(:getplan)
{:ok, getplan: getplan}
end
end
| 33.011236 | 114 | 0.674609 |
9e05453770afe37a1975c9a9c1aa7addc44208e1 | 1,250 | ex | Elixir | lib/vutuv_web/endpoint.ex | hendri-tobing/vutuv | 50a3095e236fe96739a79954157b74b4c4025921 | [
"MIT"
] | null | null | null | lib/vutuv_web/endpoint.ex | hendri-tobing/vutuv | 50a3095e236fe96739a79954157b74b4c4025921 | [
"MIT"
] | null | null | null | lib/vutuv_web/endpoint.ex | hendri-tobing/vutuv | 50a3095e236fe96739a79954157b74b4c4025921 | [
"MIT"
] | null | null | null | defmodule VutuvWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :vutuv
socket "/socket", VutuvWeb.UserSocket,
websocket: true,
longpoll: false
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :vutuv,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Logger
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
plug Plug.Session,
store: :cookie,
key: "_vutuv_key",
signing_salt: "VWnNQeFe"
plug VutuvWeb.Router
end
| 26.595745 | 69 | 0.7064 |
9e0575d106d5f008d102684010f2c325b39d8699 | 637 | ex | Elixir | web/controllers/public_post_controller.ex | allen-garvey/artour | fce27b234d11a3e434c897b5fa3178b7c126245f | [
"MIT"
] | 4 | 2019-10-04T16:11:15.000Z | 2021-08-18T21:00:13.000Z | apps/artour/web/controllers/public_post_controller.ex | allen-garvey/phoenix-umbrella | 1d444bbd62a5e7b5f51d317ce2be71ee994125d5 | [
"MIT"
] | 5 | 2020-03-16T23:52:25.000Z | 2021-09-03T16:52:17.000Z | apps/artour/web/controllers/public_post_controller.ex | allen-garvey/phoenix-umbrella | 1d444bbd62a5e7b5f51d317ce2be71ee994125d5 | [
"MIT"
] | null | null | null | defmodule Artour.PublicPostController do
use Artour.Web, :controller
alias Artour.Public
@doc """
Used on public site to show all posts in reverse chronological order
"""
def index(conn, _params) do
posts = Public.list_posts()
render conn, "index.html", page_title: "Posts", posts: posts
end
@doc """
Used on public site to show individual post
"""
def show(conn, %{"slug" => slug}) do
post = Public.get_post_by_slug!(slug)
render conn, "show.html", page_title: Artour.PostView.display_name(post), post: post, is_nsfw: post.is_nsfw, javascript: true, facebook_image: post.cover_image
end
end
| 26.541667 | 163 | 0.703297 |
9e0578e0537de361076ed3257e578799b5741d99 | 2,662 | ex | Elixir | clients/books/lib/google_api/books/v1/model/review.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/books/lib/google_api/books/v1/model/review.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/books/lib/google_api/books/v1/model/review.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Books.V1.Model.Review do
@moduledoc """
## Attributes
- author (ReviewAuthor): Defaults to: `null`.
- content (String.t): Review text. Defaults to: `null`.
- date (String.t): Date of this review. Defaults to: `null`.
- fullTextUrl (String.t): URL for the full review text, for reviews gathered from the web. Defaults to: `null`.
- kind (String.t): Resource type for a review. Defaults to: `null`.
- rating (String.t): Star rating for this review. Possible values are ONE, TWO, THREE, FOUR, FIVE or NOT_RATED. Defaults to: `null`.
- source (ReviewSource): Defaults to: `null`.
- title (String.t): Title for this review. Defaults to: `null`.
- type (String.t): Source type for this review. Possible values are EDITORIAL, WEB_USER or GOOGLE_USER. Defaults to: `null`.
- volumeId (String.t): Volume that this review is for. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:author => GoogleApi.Books.V1.Model.ReviewAuthor.t(),
:content => any(),
:date => any(),
:fullTextUrl => any(),
:kind => any(),
:rating => any(),
:source => GoogleApi.Books.V1.Model.ReviewSource.t(),
:title => any(),
:type => any(),
:volumeId => any()
}
field(:author, as: GoogleApi.Books.V1.Model.ReviewAuthor)
field(:content)
field(:date)
field(:fullTextUrl)
field(:kind)
field(:rating)
field(:source, as: GoogleApi.Books.V1.Model.ReviewSource)
field(:title)
field(:type)
field(:volumeId)
end
defimpl Poison.Decoder, for: GoogleApi.Books.V1.Model.Review do
def decode(value, options) do
GoogleApi.Books.V1.Model.Review.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Books.V1.Model.Review do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.493333 | 134 | 0.684448 |
9e059bd36c21ce298dcff1dc07739113285825b7 | 6,720 | ex | Elixir | host_core/lib/host_core/host.ex | adobe-platform/wasmcloud-otp | bcfcdf9814bc529e67c954eacabdc9a05c772cfa | [
"Apache-2.0"
] | null | null | null | host_core/lib/host_core/host.ex | adobe-platform/wasmcloud-otp | bcfcdf9814bc529e67c954eacabdc9a05c772cfa | [
"Apache-2.0"
] | null | null | null | host_core/lib/host_core/host.ex | adobe-platform/wasmcloud-otp | bcfcdf9814bc529e67c954eacabdc9a05c772cfa | [
"Apache-2.0"
] | null | null | null | defmodule HostCore.Host do
@moduledoc false
use GenServer, restart: :transient
alias HostCore.CloudEvent
require Logger
# To set this value in a release, edit the `env.sh` file that is generated
# by a mix release.
defmodule State do
@moduledoc false
defstruct [:host_key, :lattice_prefix]
end
@doc """
Starts the host server
"""
def start_link(opts) do
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
end
@doc """
NATS is used for all lattice communications, which includes communication between actors and capability providers,
whether those capability providers are local or remote.
The following is an outline of the important subject spaces required for providers, the host, and RPC listeners. All
subscriptions are not in a queue group unless otherwise specified.
* `wasmbus.rpc.{prefix}.{public_key}` - Send invocations to an actor Invocation->InvocationResponse
* `wasmbus.rpc.{prefix}.{public_key}.{link_name}` - Send invocations (from actors only) to Providers Invocation->InvocationResponse
* `wasmbus.rpc.{prefix}.{public_key}.{link_name}.linkdefs.put` - Publish link definition (e.g. bind to an actor)
* `wasmbus.rpc.{prefix}.{public_key}.{link_name}.linkdefs.get` - Query all link defs for this provider. (queue subscribed)
* `wasmbus.rpc.{prefix}.{public_key}.{link_name}.linkdefs.del` - Remove a link def.
* `wasmbus.rpc.{prefix}.claims.put` - Publish discovered claims
* `wasmbus.rpc.{prefix}.claims.get` - Query all claims (queue subscribed by hosts)
* `wasmbus.rpc.{prefix}.refmaps.put` - Publish a reference map, e.g. OCI ref -> PK, call alias -> PK
* `wasmbus.rpc.{prefix}.refmaps.get` - Query all reference maps (queue subscribed by hosts)
"""
@impl true
def init(opts) do
configure_ets()
:ets.insert(:config_table, {:config, opts})
Logger.info("Host #{opts.host_key} started.")
Logger.info("Valid cluster signers: #{opts.cluster_issuers}")
if opts.cluster_adhoc do
warning = """
WARNING. You are using an ad hoc generated cluster seed.
For any other host or CLI tool to communicate with this host,
you MUST copy the following seed key and use it as the value
of the WASMCLOUD_CLUSTER_SEED environment variable:
#{opts.cluster_seed}
You must also ensure the following cluster signer is in the list of valid
signers for any new host you start:
#{opts.cluster_issuers |> Enum.at(0)}
"""
Logger.warn(warning)
end
publish_host_started()
{:ok,
%State{
host_key: opts.host_key,
lattice_prefix: opts.lattice_prefix
}}
end
defp get_env_host_labels() do
keys =
System.get_env() |> Map.keys() |> Enum.filter(fn k -> String.starts_with?(k, "HOST_") end)
Map.new(keys, fn k ->
{String.slice(k, 5..999) |> String.downcase(), System.get_env(k, "")}
end)
end
@impl true
def handle_call(:get_labels, _from, state) do
labels = get_env_host_labels()
labels = Map.merge(labels, HostCore.WasmCloud.Native.detect_core_host_labels())
{:reply, labels, state}
end
@impl true
def handle_info({:do_stop, _timeout_ms}, state) do
# TODO: incorporate timeout into graceful shutdown
purge()
publish_host_stopped()
# Give a little bit of time for the event to get sent before shutting down
:timer.sleep(300)
:init.stop()
{:noreply, state}
end
defp publish_host_stopped() do
prefix = HostCore.Host.lattice_prefix()
msg =
%{}
|> CloudEvent.new("host_stopped")
topic = "wasmbus.evt.#{prefix}"
Gnat.pub(:control_nats, topic, msg)
end
defp publish_host_started() do
prefix = HostCore.Host.lattice_prefix()
msg =
%{}
|> CloudEvent.new("host_started")
topic = "wasmbus.evt.#{prefix}"
Gnat.pub(:control_nats, topic, msg)
end
defp configure_ets() do
:ets.new(:provider_table, [:named_table, :set, :public])
:ets.new(:linkdef_table, [:named_table, :set, :public])
:ets.new(:claims_table, [:named_table, :set, :public])
:ets.new(:refmap_table, [:named_table, :set, :public])
:ets.new(:callalias_table, [:named_table, :set, :public])
:ets.new(:config_table, [:named_table, :set, :public])
end
def lattice_prefix() do
case :ets.lookup(:config_table, :config) do
[config: config_map] -> config_map[:lattice_prefix]
_ -> "default"
end
end
def host_key() do
case :ets.lookup(:config_table, :config) do
[config: config_map] -> config_map[:host_key]
_ -> ""
end
end
def seed() do
case :ets.lookup(:config_table, :config) do
[config: config_map] -> config_map[:host_seed]
_ -> ""
end
end
def cluster_seed() do
case :ets.lookup(:config_table, :config) do
[config: config_map] -> config_map[:cluster_seed]
_ -> ""
end
end
def provider_shutdown_delay() do
case :ets.lookup(:config_table, :config) do
[config: config_map] -> config_map[:provider_delay]
_ -> 300
end
end
def rpc_timeout() do
case :ets.lookup(:config_table, :config) do
[config: config_map] -> config_map[:rpc_timeout]
_ -> 2_000
end
end
def cluster_issuers() do
case :ets.lookup(:config_table, :config) do
[config: config_map] -> config_map[:cluster_issuers]
_ -> []
end
end
def host_labels() do
GenServer.call(__MODULE__, :get_labels)
end
def purge() do
Logger.info("Host purge requested")
HostCore.Actors.ActorSupervisor.terminate_all()
HostCore.Providers.ProviderSupervisor.terminate_all()
end
def generate_hostinfo_for(provider_key, link_name, instance_id) do
{url, jwt, seed, tls} =
case :ets.lookup(:config_table, :config) do
[config: config_map] ->
{"#{config_map[:prov_rpc_host]}:#{config_map[:prov_rpc_port]}",
config_map[:prov_rpc_jwt], config_map[:prov_rpc_seed], config_map[:prov_rpc_tls]}
_ ->
{"127.0.0.1:4222", "", ""}
end
lds =
HostCore.Linkdefs.Manager.get_link_definitions()
|> Enum.filter(fn %{link_name: ln, provider_id: prov} ->
ln == link_name && prov == provider_key
end)
%{
host_id: host_key(),
lattice_rpc_prefix: lattice_prefix(),
link_name: link_name,
lattice_rpc_user_jwt: jwt,
lattice_rpc_user_seed: seed,
lattice_rpc_url: url,
lattice_rpc_tls: tls,
instance_id: instance_id,
provider_key: provider_key,
link_definitions: lds,
# TODO
env_values: %{},
cluster_issuers: cluster_issuers(),
invocation_seed: cluster_seed()
}
|> Jason.encode!()
end
end
| 28.595745 | 134 | 0.661905 |
9e05a3842e4a1fd40e5308b2227621c8b03fcd64 | 452 | ex | Elixir | apps/resource_manager/lib/identities/client_applications.ex | dcdourado/watcher_ex | ce80df81610a6e9b77612911aac2a6d6cf4de8d5 | [
"Apache-2.0"
] | 9 | 2020-10-13T14:11:37.000Z | 2021-08-12T18:40:08.000Z | apps/resource_manager/lib/identities/client_applications.ex | dcdourado/watcher_ex | ce80df81610a6e9b77612911aac2a6d6cf4de8d5 | [
"Apache-2.0"
] | 28 | 2020-10-04T14:43:48.000Z | 2021-12-07T16:54:22.000Z | apps/resource_manager/lib/identities/client_applications.ex | dcdourado/watcher_ex | ce80df81610a6e9b77612911aac2a6d6cf4de8d5 | [
"Apache-2.0"
] | 3 | 2020-11-25T20:59:47.000Z | 2021-08-30T10:36:58.000Z | defmodule ResourceManager.Identities.ClientApplications do
@moduledoc """
Client application are subject identities that are not impersonated by a person.
A client application is allowed to do certain actions by authenticating in the authentication
provider with success.
What a client application can do is defined by it's scopes.
"""
use ResourceManager.Domain, schema_model: ResourceManager.Identities.Schemas.ClientApplication
end
| 34.769231 | 96 | 0.807522 |
9e05ec25e8605b4aa857fb1a183921b955710ed7 | 1,545 | ex | Elixir | lib/hive/vehicle/vehicle_supervisor.ex | hive-fleet/hive-state | 12478d8540cc93863237f6456ade0de68c56501e | [
"Apache-2.0"
] | 5 | 2020-05-05T17:10:44.000Z | 2021-03-24T20:37:48.000Z | lib/hive/vehicle/vehicle_supervisor.ex | hive-fleet/hive-state | 12478d8540cc93863237f6456ade0de68c56501e | [
"Apache-2.0"
] | 10 | 2020-05-09T22:06:07.000Z | 2020-05-09T22:10:14.000Z | lib/hive/vehicle/vehicle_supervisor.ex | hive-fleet/hive-state | 12478d8540cc93863237f6456ade0de68c56501e | [
"Apache-2.0"
] | 2 | 2020-05-06T14:47:33.000Z | 2021-06-11T21:12:29.000Z | defmodule Hive.VehicleSupervisor do
@moduledoc false
use DynamicSupervisor
require Logger
use Hive.Base
import Hive.Vehicle.Helpers
# Client
def start_link(arg) do
DynamicSupervisor.start_link(@mod, arg, name: @mod)
end
@doc """
Infleet `%Vehicle{}` and supervise in case if we need to
keep more information about `%Vehicle{}`.
Will return `{:error, {:already_started, pid}}`.
"""
def infleet(%Vehicle{} = vehicle) do
DynamicSupervisor.start_child(@mod, {VehicleWorker, vehicle})
end
@doc """
Defleet vehicle, will stop the process and in case
if there is no process then `{:error, :not_found}` returned.
"""
def defleet(%Vehicle{} = vehicle) do
proc_name = make_name(vehicle)
case Registry.lookup(@registry, proc_name) do
[{pid, _}] ->
Logger.info("Stopping process name=#{proc_name} with pid=#{inspect(pid)}")
GenServer.stop(pid)
{:ok, pid}
_ ->
Logger.info("Process not found name=#{proc_name}")
{:error, :not_found}
end
end
@doc """
Check if `VehicleWorker` is running.
"""
def alive?(proc_name) do
case Registry.lookup(@registry, proc_name) do
[{_pid, _}] -> true
_ -> false
end
end
@doc """
Get state of a given vehicle
"""
def get_vehicle(vehicle_id) do
if alive?(make_name(%Vehicle{id: vehicle_id})) do
VehicleWorker.get_vehicle(vehicle_id)
else
nil
end
end
# Server
def init(_arg) do
DynamicSupervisor.init(strategy: :one_for_one)
end
end
| 22.720588 | 82 | 0.644013 |
9e060941ecf270b79c4a9aa4c26882ce1e9acf95 | 896 | ex | Elixir | clients/health_care/lib/google_api/health_care/v1beta1/metadata.ex | MShaffar19/elixir-google-api | aac92cd85f423e6d08c9571ee97cf21545df163f | [
"Apache-2.0"
] | null | null | null | clients/health_care/lib/google_api/health_care/v1beta1/metadata.ex | MShaffar19/elixir-google-api | aac92cd85f423e6d08c9571ee97cf21545df163f | [
"Apache-2.0"
] | null | null | null | clients/health_care/lib/google_api/health_care/v1beta1/metadata.ex | MShaffar19/elixir-google-api | aac92cd85f423e6d08c9571ee97cf21545df163f | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.HealthCare.V1beta1 do
@moduledoc """
API client metadata for GoogleApi.HealthCare.V1beta1.
"""
@discovery_revision "20200917"
def discovery_revision(), do: @discovery_revision
end
| 33.185185 | 74 | 0.762277 |
9e064f6c7aa1ae8fae6505ca4aa1428bd25ac34e | 1,838 | ex | Elixir | clients/android_management/lib/google_api/android_management/v1/model/managed_configuration_template.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/android_management/lib/google_api/android_management/v1/model/managed_configuration_template.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/android_management/lib/google_api/android_management/v1/model/managed_configuration_template.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.AndroidManagement.V1.Model.ManagedConfigurationTemplate do
@moduledoc """
The managed configurations template for the app, saved from the managed configurations iframe.
## Attributes
* `configurationVariables` (*type:* `map()`, *default:* `nil`) - Optional, a map containing <key, value> configuration variables defined for the configuration.
* `templateId` (*type:* `String.t`, *default:* `nil`) - The ID of the managed configurations template.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:configurationVariables => map(),
:templateId => String.t()
}
field(:configurationVariables, type: :map)
field(:templateId)
end
defimpl Poison.Decoder, for: GoogleApi.AndroidManagement.V1.Model.ManagedConfigurationTemplate do
def decode(value, options) do
GoogleApi.AndroidManagement.V1.Model.ManagedConfigurationTemplate.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AndroidManagement.V1.Model.ManagedConfigurationTemplate do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.76 | 163 | 0.749184 |
9e06986c0bf8080bfc0706cb779c617105861957 | 990 | ex | Elixir | lib/ikki/endpoint.ex | ulissesalmeida/ikki | a6f3104acea76a0eac917e98f2c8df910c668fdc | [
"MIT"
] | 15 | 2015-09-01T03:38:30.000Z | 2019-01-29T16:16:12.000Z | lib/ikki/endpoint.ex | ulissesalmeida/ikki | a6f3104acea76a0eac917e98f2c8df910c668fdc | [
"MIT"
] | null | null | null | lib/ikki/endpoint.ex | ulissesalmeida/ikki | a6f3104acea76a0eac917e98f2c8df910c668fdc | [
"MIT"
] | 3 | 2015-09-18T18:42:29.000Z | 2018-09-25T21:45:30.000Z | defmodule Ikki.Endpoint do
use Phoenix.Endpoint, otp_app: :ikki
socket "/socket", Ikki.UserSocket
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phoenix.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/", from: :ikki, gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Logger
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Poison
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session,
store: :cookie,
key: "_ikki_key",
signing_salt: "y6Epglhm"
plug Ikki.Router
end
| 24.75 | 69 | 0.70404 |
9e06be6ce6f024357e0be2aff71481fbacb4f076 | 1,737 | ex | Elixir | lib/sentinel/web/controllers/html/account_controller.ex | suranyami/sentinel | aeb421e2e61a4bc14abe89b4a92cb1943a5965fb | [
"MIT"
] | null | null | null | lib/sentinel/web/controllers/html/account_controller.ex | suranyami/sentinel | aeb421e2e61a4bc14abe89b4a92cb1943a5965fb | [
"MIT"
] | null | null | null | lib/sentinel/web/controllers/html/account_controller.ex | suranyami/sentinel | aeb421e2e61a4bc14abe89b4a92cb1943a5965fb | [
"MIT"
] | null | null | null | defmodule Sentinel.Controllers.Html.AccountController do
@moduledoc """
Handles the account show and update actions
"""
use Phoenix.Controller
alias Sentinel.{Config, Update}
plug :put_layout, {Config.layout_view, Config.layout}
plug Sentinel.AuthenticatedPipeline
@doc """
Get the account data for the current user
"""
def edit(conn, _params) do
current_user = Sentinel.Guardian.Plug.current_resource(conn)
changeset = Config.user_model.changeset(current_user, %{})
render(conn, Config.views.user, "edit.html", %{conn: conn, user: current_user, changeset: changeset})
end
@doc """
Update email address or user params of the current user.
If the email address should be updated, the user will receive an email to his new address.
The stored email address will only be updated after clicking the link in that message.
"""
def update(conn, %{"account" => params}) do
current_user = Sentinel.Guardian.Plug.current_resource(conn)
case Update.update(current_user, params) do
{:ok, %{user: updated_user, auth: _auth, confirmation_token: confirmation_token}} ->
Update.maybe_send_new_email_address_confirmation_email(updated_user, confirmation_token)
new_changeset = Config.user_model.changeset(updated_user, %{})
conn
|> put_flash(:info, "Successfully updated user account")
|> render(Config.views.user, "edit.html", %{conn: conn, user: updated_user, changeset: new_changeset})
{:error, changeset} ->
conn
|> put_status(422)
|> put_flash(:error, "Failed to update user account")
|> render(Config.views.user, "edit.html", %{conn: conn, user: current_user, changeset: changeset})
end
end
end
| 37.76087 | 110 | 0.70639 |
9e06c8fc80bf626a4ebd35f2ba4cb2e73ebf40dd | 6,635 | ex | Elixir | apps/omg_watcher/lib/omg_watcher/exit_processor/exit_info.ex | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | null | null | null | apps/omg_watcher/lib/omg_watcher/exit_processor/exit_info.ex | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | null | null | null | apps/omg_watcher/lib/omg_watcher/exit_processor/exit_info.ex | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019-2020 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.Watcher.ExitProcessor.ExitInfo do
@moduledoc """
Represents the bulk of information about a tracked exit.
Internal stuff of `OMG.Watcher.ExitProcessor`
"""
alias OMG.Crypto
alias OMG.State.Transaction
alias OMG.Utxo
alias OMG.Watcher.Event
require Utxo
@enforce_keys [
:amount,
:currency,
:owner,
:exit_id,
:exiting_txbytes,
:is_active,
:eth_height,
:root_chain_txhash,
:scheduled_finalization_time,
:block_timestamp,
:spending_txhash
]
defstruct @enforce_keys
@type t :: %__MODULE__{
amount: non_neg_integer(),
currency: Crypto.address_t(),
owner: Crypto.address_t(),
exit_id: non_neg_integer(),
# the transaction creating the exiting output
exiting_txbytes: Transaction.tx_bytes(),
# this means the exit has been first seen active. If false, it won't be considered harmful
is_active: boolean(),
eth_height: pos_integer(),
root_chain_txhash: Transaction.tx_hash() | nil,
scheduled_finalization_time: pos_integer() | nil,
block_timestamp: pos_integer() | nil,
spending_txhash: Transaction.tx_hash() | nil
}
@spec new(map(), map()) :: t()
def new(
contract_status,
%{
eth_height: eth_height,
call_data: %{output_tx: txbytes},
exit_id: exit_id,
root_chain_txhash: root_chain_txhash,
scheduled_finalization_time: scheduled_finalization_time,
block_timestamp: block_timestamp
} = exit_event
) do
Utxo.position(_, _, oindex) = utxo_pos_for(exit_event)
{:ok, raw_tx} = Transaction.decode(txbytes)
%{amount: amount, currency: currency, owner: owner} = raw_tx |> Transaction.get_outputs() |> Enum.at(oindex)
do_new(contract_status,
amount: amount,
currency: currency,
owner: owner,
exit_id: exit_id,
exiting_txbytes: txbytes,
eth_height: eth_height,
root_chain_txhash: root_chain_txhash,
scheduled_finalization_time: scheduled_finalization_time,
block_timestamp: block_timestamp,
spending_txhash: nil
)
end
def new_key(_contract_status, exit_info),
do: utxo_pos_for(exit_info)
defp utxo_pos_for(%{call_data: %{utxo_pos: utxo_pos_enc}} = _exit_info),
do: Utxo.Position.decode!(utxo_pos_enc)
@spec do_new(map(), list(keyword())) :: t()
defp do_new(contract_status, fields) do
fields = Keyword.put_new(fields, :is_active, parse_contract_exit_status(contract_status))
struct!(__MODULE__, fields)
end
@spec make_event_data(Event.module_t(), Utxo.Position.t(), t()) :: struct()
def make_event_data(type, position, exit_info) do
struct(
type,
exit_info |> Map.from_struct() |> Map.put(:utxo_pos, Utxo.Position.encode(position))
)
end
# NOTE: we have no migrations, so we handle data compatibility here (make_db_update/1 and from_db_kv/1), OMG-421
@spec make_db_update({Utxo.Position.t(), t()}) :: {:put, :exit_info, {Utxo.Position.db_t(), map()}}
def make_db_update({position, exit_info}) do
value = %{
amount: exit_info.amount,
currency: exit_info.currency,
owner: exit_info.owner,
exit_id: exit_info.exit_id,
exiting_txbytes: exit_info.exiting_txbytes,
is_active: exit_info.is_active,
eth_height: exit_info.eth_height,
root_chain_txhash: exit_info.root_chain_txhash,
scheduled_finalization_time: exit_info.scheduled_finalization_time,
block_timestamp: exit_info.block_timestamp
}
{:put, :exit_info, {Utxo.Position.to_db_key(position), value}}
end
@spec from_db_kv({Utxo.Position.db_t(), map()}) :: {Utxo.Position.t(), t()}
def from_db_kv({db_utxo_pos, exit_info}) do
# mapping is used in case of changes in data structure
value = %{
amount: exit_info.amount,
currency: exit_info.currency,
owner: exit_info.owner,
exit_id: exit_info.exit_id,
exiting_txbytes: exit_info.exiting_txbytes,
is_active: exit_info.is_active,
eth_height: exit_info.eth_height,
spending_txhash: nil,
# defaults value to nil if non-existent in the DB.
root_chain_txhash: Map.get(exit_info, :root_chain_txhash),
scheduled_finalization_time: Map.get(exit_info, :scheduled_finalization_time),
block_timestamp: Map.get(exit_info, :block_timestamp)
}
{Utxo.Position.from_db_key(db_utxo_pos), struct!(__MODULE__, value)}
end
# processes the return value of `Eth.get_standard_exit_structs(exit_ids)`
# `exitable` will be `false` if the exit was challenged
# `exitable` will be `false` ALONG WITH the whole tuple holding zeroees, if the exit was processed successfully
# **NOTE** one can only rely on the zero-nonzero of this data, since for processed exits this data will be all zeros
defp parse_contract_exit_status({exitable, _, _, _, _, _}), do: exitable
# Based on the block number determines whether UTXO was created by a deposit.
defguardp is_deposit(blknum, child_block_interval) when rem(blknum, child_block_interval) != 0
@doc """
Calculates the time at which an exit can be processed and released if not challenged successfully.
See https://docs.omg.network/challenge-period for calculation logic.
"""
@spec calculate_sft(
blknum :: pos_integer(),
exit_block_timestamp :: pos_integer(),
utxo_creation_timestamp :: pos_integer(),
min_exit_period :: pos_integer(),
child_block_interval :: pos_integer()
) ::
{:ok, pos_integer()}
def calculate_sft(blknum, exit_block_timestamp, utxo_creation_timestamp, min_exit_period, child_block_interval) do
case is_deposit(blknum, child_block_interval) do
true ->
{:ok, max(exit_block_timestamp + min_exit_period, utxo_creation_timestamp + min_exit_period)}
false ->
{:ok, max(exit_block_timestamp + min_exit_period, utxo_creation_timestamp + 2 * min_exit_period)}
end
end
end
| 36.456044 | 118 | 0.695855 |
9e06dec6c9f1b6b8d0601e5d48cc51068102429b | 403 | exs | Elixir | Getting-Started-with-Phoenix/rsvp_umbrella/mix.exs | timothydang/elixir-phoenix-training | f21ca5b4b48bf8c7188897d09b9d6565f7a46f0e | [
"Apache-2.0"
] | null | null | null | Getting-Started-with-Phoenix/rsvp_umbrella/mix.exs | timothydang/elixir-phoenix-training | f21ca5b4b48bf8c7188897d09b9d6565f7a46f0e | [
"Apache-2.0"
] | 3 | 2020-07-17T15:52:56.000Z | 2021-05-09T22:33:57.000Z | Getting-Started-with-Phoenix/rsvp_umbrella/mix.exs | timothydang/elixir-phoenix-training | f21ca5b4b48bf8c7188897d09b9d6565f7a46f0e | [
"Apache-2.0"
] | null | null | null | defmodule RsvpUmbrella.MixProject do
use Mix.Project
def project do
[
apps_path: "apps",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Dependencies listed here are available only for this
# project and cannot be accessed from applications inside
# the apps folder.
#
# Run "mix help deps" for examples and options.
defp deps do
[]
end
end
| 19.190476 | 59 | 0.655087 |
9e06f5330a74fa13110b691acca0d914725c6dad | 617 | ex | Elixir | lib/sanbase_web/admin/model/contract_address.ex | sitedata/sanbase2 | 8da5e44a343288fbc41b68668c6c80ae8547d557 | [
"MIT"
] | 81 | 2017-11-20T01:20:22.000Z | 2022-03-05T12:04:25.000Z | lib/sanbase_web/admin/model/contract_address.ex | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | 359 | 2017-10-15T14:40:53.000Z | 2022-01-25T13:34:20.000Z | lib/sanbase_web/admin/model/contract_address.ex | sitedata/sanbase2 | 8da5e44a343288fbc41b68668c6c80ae8547d557 | [
"MIT"
] | 16 | 2017-11-19T13:57:40.000Z | 2022-02-07T08:13:02.000Z | defmodule SanbaseWeb.ExAdmin.Model.Project.ContractAddress do
use ExAdmin.Register
import Ecto.Query
register_resource Sanbase.Model.Project.ContractAddress do
controller do
after_filter(:set_defaults, only: [:new])
end
end
def set_defaults(conn, params, resource, :new) do
{conn, params, resource |> set_project_default(params)}
end
defp set_project_default(%{project_id: nil} = contract_address, params) do
Map.get(params, :project_id, nil)
|> case do
nil -> contract_address
project_id -> Map.put(contract_address, :project_id, project_id)
end
end
end
| 25.708333 | 76 | 0.721232 |
9e070ac13150823dfbd146b8affa3e4ff9d4176d | 557 | exs | Elixir | exercises/concept/chessboard/test/chessboard_test.exs | SaschaMann/elixir | 2489747bba72a0ba5efa27e7e00441a428fdf987 | [
"MIT"
] | 1 | 2021-06-09T06:57:02.000Z | 2021-06-09T06:57:02.000Z | exercises/concept/chessboard/test/chessboard_test.exs | SaschaMann/elixir | 2489747bba72a0ba5efa27e7e00441a428fdf987 | [
"MIT"
] | 6 | 2022-03-04T13:05:25.000Z | 2022-03-30T18:36:49.000Z | exercises/concept/chessboard/test/chessboard_test.exs | SaschaMann/elixir | 2489747bba72a0ba5efa27e7e00441a428fdf987 | [
"MIT"
] | null | null | null | defmodule ChessboardTest do
use ExUnit.Case
@task_id 1
test "rank_range is a range from 1 to 8" do
assert Chessboard.rank_range() == 1..8
end
@task_id 2
test "file_range is a range from ?A to ?H" do
assert Chessboard.file_range() == ?A..?H
end
@task_id 3
test "ranks is a list of integers from 1 to 8" do
assert Chessboard.ranks() == [1, 2, 3, 4, 5, 6, 7, 8]
end
@task_id 4
test "files is a list of letters (strings) from A to H" do
assert Chessboard.files() == ["A", "B", "C", "D", "E", "F", "G", "H"]
end
end
| 23.208333 | 73 | 0.612208 |
9e071abc85c503c26994d96c92c4be50d9522f3c | 2,624 | exs | Elixir | test/delivery/packages_test.exs | Simon-Initiative/proving-ground | ab6ba03051e55edbaa09535f11d9192776c8d901 | [
"MIT"
] | null | null | null | test/delivery/packages_test.exs | Simon-Initiative/proving-ground | ab6ba03051e55edbaa09535f11d9192776c8d901 | [
"MIT"
] | 4 | 2021-03-01T20:51:23.000Z | 2021-09-20T21:37:12.000Z | test/delivery/packages_test.exs | Simon-Initiative/proving-ground | ab6ba03051e55edbaa09535f11d9192776c8d901 | [
"MIT"
] | null | null | null | defmodule Delivery.PackagesTest do
use Delivery.DataCase
alias Delivery.Packages
describe "packages" do
alias Delivery.Packages.Package
@valid_attrs %{description: "some description", friendly: "some friendly", title: "some title", version: "some version"}
@update_attrs %{description: "some updated description", friendly: "some updated friendly", title: "some updated title", version: "some updated version"}
@invalid_attrs %{description: nil, friendly: nil, title: nil, version: nil}
def package_fixture(attrs \\ %{}) do
{:ok, package} =
attrs
|> Enum.into(@valid_attrs)
|> Packages.create_package()
package
end
test "list_packages/0 returns all packages" do
package = package_fixture()
assert Packages.list_packages() == [package]
end
test "get_package!/1 returns the package with given id" do
package = package_fixture()
assert Packages.get_package!(package.id) == package
end
test "create_package/1 with valid data creates a package" do
assert {:ok, %Package{} = package} = Packages.create_package(@valid_attrs)
assert package.description == "some description"
assert package.friendly == "some friendly"
assert package.title == "some title"
assert package.version == "some version"
end
test "create_package/1 with invalid data returns error changeset" do
assert {:error, %Ecto.Changeset{}} = Packages.create_package(@invalid_attrs)
end
test "update_package/2 with valid data updates the package" do
package = package_fixture()
assert {:ok, %Package{} = package} = Packages.update_package(package, @update_attrs)
assert package.description == "some updated description"
assert package.friendly == "some updated friendly"
assert package.title == "some updated title"
assert package.version == "some updated version"
end
test "update_package/2 with invalid data returns error changeset" do
package = package_fixture()
assert {:error, %Ecto.Changeset{}} = Packages.update_package(package, @invalid_attrs)
assert package == Packages.get_package!(package.id)
end
test "delete_package/1 deletes the package" do
package = package_fixture()
assert {:ok, %Package{}} = Packages.delete_package(package)
assert_raise Ecto.NoResultsError, fn -> Packages.get_package!(package.id) end
end
test "change_package/1 returns a package changeset" do
package = package_fixture()
assert %Ecto.Changeset{} = Packages.change_package(package)
end
end
end
| 36.957746 | 157 | 0.691692 |
9e07387e04f1e57ea7ae53bdb12409e72e50ffb3 | 1,294 | ex | Elixir | stats.ex | dcarneiro/etudes-for-elixir | 807d21942747817d84fc2890806582b59921bd59 | [
"MIT"
] | null | null | null | stats.ex | dcarneiro/etudes-for-elixir | 807d21942747817d84fc2890806582b59921bd59 | [
"MIT"
] | null | null | null | stats.ex | dcarneiro/etudes-for-elixir | 807d21942747817d84fc2890806582b59921bd59 | [
"MIT"
] | null | null | null | defmodule Stats do
def minimum(list) do
try do
[head|tail] = list
minimum(tail, head)
rescue
err -> err
end
end
defp minimum([], result) do
result
end
defp minimum([head|tail], result) when result < head do
minimum(tail, result)
end
defp minimum([head|tail], _result) do
minimum(tail, head)
end
def maximum(list) do
[head|tail] = list
maximum(tail, head)
end
defp maximum([], result) do
result
end
defp maximum([head|tail], result) when result > head do
maximum(tail, result)
end
defp maximum([head|tail], _result) do
maximum(tail, head)
end
def range(list) do
[minimum(list), maximum(list)]
end
@doc """
Returns the mean of a list of numbers.
"""
def mean(list) do
try do
sum = List.foldl(list, 0, fn (x, acc) -> x + acc end)
sum / Enum.count(list)
rescue
err -> err
end
end
@doc """
Returns the standard deviation of the list
"""
def stdv(list) do
try do
sum = List.foldl(list, 0, fn (x, acc) -> x + acc end)
sum_of_squares = List.foldl(list, 0, fn (x, acc) -> x * x + acc end)
n = Enum.count(list)
:math.sqrt((n * sum_of_squares - sum * sum)/(n * (n - 1)))
rescue
err -> err
end
end
end
| 18.485714 | 74 | 0.578053 |
9e073ad408bd1a845d55c3bd263b542eec2e27eb | 1,995 | exs | Elixir | test/ai_web/controllers/api/v1/user_controller_test.exs | mirai-audio/- | 365c0fba614543bf40ebaae55de47bc541bd473f | [
"MIT"
] | null | null | null | test/ai_web/controllers/api/v1/user_controller_test.exs | mirai-audio/- | 365c0fba614543bf40ebaae55de47bc541bd473f | [
"MIT"
] | null | null | null | test/ai_web/controllers/api/v1/user_controller_test.exs | mirai-audio/- | 365c0fba614543bf40ebaae55de47bc541bd473f | [
"MIT"
] | null | null | null | defmodule AiWeb.API.V1.UserControllerTest do
use AiWeb.ConnCase
alias Ai.Accounts.Credential
@valid_attrs %{
email: "[email protected]",
password: "aaabbbcccddd",
"password-confirmation": "aaabbbcccddd"
}
@invalid_attrs %{}
setup %{conn: conn} do
conn =
conn
|> put_req_header("accept", "application/vnd.api+json")
|> put_req_header("content-type", "application/vnd.api+json")
{:ok, conn: conn}
end
test "creates and renders resource when data is valid", %{conn: conn} do
conn =
post(conn, user_path(conn, :create), %{
data: %{
type: "users",
attributes: @valid_attrs
}
})
assert json_response(conn, 201)["data"]["id"]
assert Repo.get_by(Credential, %{provider_uid: @valid_attrs[:email]})
end
test "does not create resource and renders errors when data is empty", %{conn: conn} do
assert_error_sent(400, fn ->
_conn =
post(conn, user_path(conn, :create), %{
data: %{type: "users", attributes: @invalid_attrs}
})
end)
end
# [email protected]
test "does not create resource and renders errors when email is too short: [email protected]", %{conn: conn} do
assert_error_sent(400, fn ->
_conn =
post(conn, user_path(conn, :create), %{
data: %{
type: "users",
attributes: %{
email: "[email protected]",
password: "aaabbbcccddd",
password_confirmation: "aaabbbcccddd"
}
}
})
end)
end
# axbb.cc
test "does not create resource and renders errors when email is invalid: aXbb.cc", %{conn: conn} do
assert_error_sent(400, fn ->
_conn =
post(conn, user_path(conn, :create), %{
data: %{
type: "users",
attributes: %{
email: "aXbb.cc",
password: "aaabbbcccddd",
password_confirmation: "aaabbbcccddd"
}
}
})
end)
end
end
| 25.909091 | 101 | 0.556391 |
9e074116fe6785d6257043396d05e5b6b82a8f2e | 167 | ex | Elixir | apps/discovery_api/lib/discovery_api_web/views/tableau_view.ex | calebcarroll1/smartcitiesdata | b0f03496f6c592c82ba14aebf6c5996311cf3cd0 | [
"Apache-2.0"
] | 26 | 2019-09-20T23:54:45.000Z | 2020-08-20T14:23:32.000Z | apps/discovery_api/lib/discovery_api_web/views/tableau_view.ex | calebcarroll1/smartcitiesdata | b0f03496f6c592c82ba14aebf6c5996311cf3cd0 | [
"Apache-2.0"
] | 757 | 2019-08-15T18:15:07.000Z | 2020-09-18T20:55:31.000Z | apps/discovery_api/lib/discovery_api_web/views/tableau_view.ex | calebcarroll1/smartcitiesdata | b0f03496f6c592c82ba14aebf6c5996311cf3cd0 | [
"Apache-2.0"
] | 9 | 2019-11-12T16:43:46.000Z | 2020-03-25T16:23:16.000Z | defmodule DiscoveryApiWeb.TableauView do
use DiscoveryApiWeb, :view
def render("fetch_table_info.json", %{table_infos: table_infos}) do
table_infos
end
end
| 20.875 | 69 | 0.772455 |
9e07567bda332648962121e228c4556271b182d0 | 928 | exs | Elixir | config/config.exs | albydarned/shorten | af4f870dbb0f9c3b0699712cc27370e83ed3a730 | [
"MIT"
] | 3 | 2018-09-14T16:16:11.000Z | 2019-07-12T22:25:50.000Z | config/config.exs | albydarned/shorten | af4f870dbb0f9c3b0699712cc27370e83ed3a730 | [
"MIT"
] | null | null | null | config/config.exs | albydarned/shorten | af4f870dbb0f9c3b0699712cc27370e83ed3a730 | [
"MIT"
] | 1 | 2019-07-12T22:25:53.000Z | 2019-07-12T22:25:53.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# General application configuration
config :shorten,
ecto_repos: [Shorten.Repo]
# Configures the endpoint
config :shorten, ShortenWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "2ol1TIaiNSUvh8sJ4vGn1B3ukw1jGhr1ChGPSiTlb4UNJq/pQaSAcJSbNLVsDYur",
render_errors: [view: ShortenWeb.ErrorView, accepts: ~w(json)],
pubsub: [name: Shorten.PubSub,
adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:user_id]
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env}.exs"
| 33.142857 | 86 | 0.762931 |
9e079d7fb5593f3041c548ec3fe21968e25939d9 | 194 | ex | Elixir | lib/honeydew/please/events/list_made.ex | christian-fei/honeydew | af06f5778de164fd50979ae20e59b6aeb3092485 | [
"MIT"
] | 13 | 2022-02-13T18:43:20.000Z | 2022-03-19T11:53:48.000Z | lib/honeydew/please/events/list_made.ex | christian-fei/honeydew | af06f5778de164fd50979ae20e59b6aeb3092485 | [
"MIT"
] | 1 | 2022-02-23T13:57:19.000Z | 2022-02-23T13:57:19.000Z | lib/honeydew/please/events/list_made.ex | christian-fei/honeydew | af06f5778de164fd50979ae20e59b6aeb3092485 | [
"MIT"
] | 3 | 2022-02-13T19:25:19.000Z | 2022-02-22T17:56:52.000Z | defmodule Honeydew.Please.Events.ListMade do
@moduledoc """
Event that signals a new list was made.
"""
@derive Jason.Encoder
defstruct [
:list_id,
:name,
:notes
]
end
| 13.857143 | 44 | 0.64433 |
9e07a27516f7b77e609c655189c37b15cc661977 | 673 | ex | Elixir | test/support/net.ex | steveoliver/reactive-interaction-gateway | 59b6dc994fd0f098bed19b7bf1e699513ac87167 | [
"Apache-2.0"
] | 518 | 2017-11-09T13:10:49.000Z | 2022-03-28T14:29:50.000Z | test/support/net.ex | steveoliver/reactive-interaction-gateway | 59b6dc994fd0f098bed19b7bf1e699513ac87167 | [
"Apache-2.0"
] | 270 | 2017-11-10T00:11:34.000Z | 2022-02-27T13:08:16.000Z | test/support/net.ex | steveoliver/reactive-interaction-gateway | 59b6dc994fd0f098bed19b7bf1e699513ac87167 | [
"Apache-2.0"
] | 67 | 2017-12-19T20:16:37.000Z | 2022-03-31T10:43:04.000Z | defmodule RigInboundGatewayWeb.Net do
@moduledoc false
def tcp_port_free?(port_num) do
import Enum
:erlang.ports()
|> map(fn port ->
info =
case :erlang.port_info(port) do
info when is_list(info) -> info
_ -> []
end
{port, info}
end)
|> filter(fn {_port, info} -> info[:name] == 'tcp_inet' end)
|> reduce_while(nil, fn {port, info}, _acc ->
case :inet.port(port) do
{:ok, ^port_num} -> {:halt, info[:connected]}
_ -> {:cont, nil}
end
end)
|> case do
nil ->
true
_pid ->
# Process.exit(pid, :kill)
false
end
end
end
| 19.794118 | 64 | 0.511144 |
9e07add9116a764f65222f6e63dc697d3a4939c2 | 1,729 | ex | Elixir | clients/container_analysis/lib/google_api/container_analysis/v1beta1/model/batch_create_occurrences_response.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/container_analysis/lib/google_api/container_analysis/v1beta1/model/batch_create_occurrences_response.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/container_analysis/lib/google_api/container_analysis/v1beta1/model/batch_create_occurrences_response.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ContainerAnalysis.V1beta1.Model.BatchCreateOccurrencesResponse do
@moduledoc """
Response for creating occurrences in batch.
## Attributes
* `occurrences` (*type:* `list(GoogleApi.ContainerAnalysis.V1beta1.Model.Occurrence.t)`, *default:* `nil`) - The occurrences that were created.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:occurrences => list(GoogleApi.ContainerAnalysis.V1beta1.Model.Occurrence.t())
}
field(:occurrences, as: GoogleApi.ContainerAnalysis.V1beta1.Model.Occurrence, type: :list)
end
defimpl Poison.Decoder,
for: GoogleApi.ContainerAnalysis.V1beta1.Model.BatchCreateOccurrencesResponse do
def decode(value, options) do
GoogleApi.ContainerAnalysis.V1beta1.Model.BatchCreateOccurrencesResponse.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.ContainerAnalysis.V1beta1.Model.BatchCreateOccurrencesResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.25 | 147 | 0.75882 |
9e07d5ed4bbc98daab3cbede513587d3cfa7e17b | 24 | exs | Elixir | HelloWorld/elixir/hello-word.exs | LarisseLima/BabySteps | 882b6f44895092c9b060019b14d9b850c1cc8083 | [
"MIT"
] | 37 | 2020-10-01T03:50:42.000Z | 2021-11-23T00:49:51.000Z | HelloWorld/elixir/hello-word.exs | LarisseLima/BabySteps | 882b6f44895092c9b060019b14d9b850c1cc8083 | [
"MIT"
] | 27 | 2020-10-03T23:16:13.000Z | 2021-11-19T19:53:01.000Z | HelloWorld/elixir/hello-word.exs | LarisseLima/BabySteps | 882b6f44895092c9b060019b14d9b850c1cc8083 | [
"MIT"
] | 97 | 2020-10-01T11:39:01.000Z | 2021-11-01T00:30:53.000Z | IO.puts "hello, world!"
| 12 | 23 | 0.666667 |
9e07d6a6f613469bc5f84c938b6a20c9df3374c1 | 521 | ex | Elixir | lib/changelog/regexp.ex | PsOverflow/changelog.com | 53f4ecfc39b021c6b8cfcc0fa11f29aff8038a7f | [
"MIT"
] | 2,599 | 2016-10-25T15:02:53.000Z | 2022-03-26T02:34:42.000Z | lib/changelog/regexp.ex | type1fool/changelog.com | fbec3528cc3f5adfdc75b008bb92b17efc4f248f | [
"MIT"
] | 253 | 2016-10-25T20:29:24.000Z | 2022-03-29T21:52:36.000Z | lib/changelog/regexp.ex | type1fool/changelog.com | fbec3528cc3f5adfdc75b008bb92b17efc4f248f | [
"MIT"
] | 298 | 2016-10-25T15:18:31.000Z | 2022-01-18T21:25:52.000Z | defmodule Changelog.Regexp do
def email, do: ~r/^\S+@\S+\.\S+$/
def http, do: ~r/^https?:\/\//
def http_message, do: "must include http(s)://"
def tag, do: ~r/(?<open><\/?)(?<tag>.*?)(?<close>>)/
def tag(name), do: ~r/(?<open><\/?)#{name}(?<close>>)/
def social, do: ~r/\A[a-z|A-Z|0-9|_|-]+\z/
def social_message, do: "just the username, plz"
def slug, do: ~r/\A[a-z|0-9|_|-]+\z/
def slug_message, do: "valid chars: a-z, 0-9, -, _"
def timestamp, do: ~r/(\d\d:)?(\d\d?:)(\d\d)(\.\d\d?)?/
end
| 23.681818 | 57 | 0.514395 |
9e07f0963114a8ddb7910a02db47353f18440b25 | 4,643 | exs | Elixir | test/scenic_new_nerves_test.exs | grahamhay/scenic_new | 23bb1559878ed5e1cd66866b758a685306cbc1a8 | [
"Apache-2.0"
] | null | null | null | test/scenic_new_nerves_test.exs | grahamhay/scenic_new | 23bb1559878ed5e1cd66866b758a685306cbc1a8 | [
"Apache-2.0"
] | null | null | null | test/scenic_new_nerves_test.exs | grahamhay/scenic_new | 23bb1559878ed5e1cd66866b758a685306cbc1a8 | [
"Apache-2.0"
] | null | null | null | Code.require_file("mix_helper.exs", __DIR__)
defmodule Mix.Tasks.Scenic.New.NervesTest do
use ExUnit.Case, async: false
import ScenicNew.MixHelper
import ExUnit.CaptureIO
@app_name "scenic_demo"
@module_name "ScenicDemo"
test "new.example with defaults" do
in_tmp("new with defaults", fn ->
assert capture_io(fn ->
Mix.Tasks.Scenic.New.Nerves.run([@app_name])
assert_file("#{@app_name}/README.md")
assert_file("#{@app_name}/.formatter.exs")
assert_file("#{@app_name}/.gitignore")
assert_file("#{@app_name}/config/config.exs", fn file ->
assert file =~ "config :nerves, :firmware, rootfs_overlay: \"rootfs_overlay\""
end)
assert_file("#{@app_name}/config/config.host.exs", fn file ->
assert file =~ "config :#{@app_name}, :viewport"
assert file =~ "size: {800, 480}"
assert file =~ "opts: [scale: 1.0]"
assert file =~ "default_scene: {#{@module_name}.Scene.SysInfo, nil}"
assert file =~ "title: \"MIX_TARGET=host, app = :#{@app_name}\""
end)
assert_file("#{@app_name}/config/config.rpi3.exs", fn file ->
assert file =~ "config :#{@app_name}, :viewport"
assert file =~ "size: {800, 480}"
assert file =~ "opts: [scale: 1.0]"
assert file =~ "default_scene: {#{@module_name}.Scene.SysInfo, nil}"
assert file =~ "device: \"FT5406 memory based driver\""
end)
assert_file("#{@app_name}/lib/#{@app_name}.ex", fn file ->
assert file =~ "defmodule #{@module_name}.Application do"
assert file =~ "Application.get_env(:#{@app_name}, :viewport)"
end)
assert_file("#{@app_name}/lib/scenes/crosshair.ex")
assert_file("#{@app_name}/lib/scenes/sys_info.ex")
assert_file("#{@app_name}/mix.exs", fn file ->
assert file =~ "{:scenic, \"~> 0.9\"}"
assert file =~ "{:scenic_driver_glfw, \"~> 0.9\"}"
assert file =~ "{:scenic_driver_nerves_touch, \"~> 0.9\"}"
end)
end) =~ "Your Scenic project was created successfully."
end)
end
test "new with invalid args" do
assert_raise Mix.Error, ~r"Application name cannot be scenic", fn ->
Mix.Tasks.Scenic.New.Nerves.run(["scenic"])
end
assert_raise Mix.Error, ~r"Application name cannot be scenic", fn ->
Mix.Tasks.Scenic.New.Nerves.run(["folder/scenic"])
end
assert_raise Mix.Error, ~r"Application name must start with a lowercase ASCII letter,", fn ->
Mix.Tasks.Scenic.New.Nerves.run(["007invalid"])
end
assert_raise Mix.Error, ~r"Application name must start with a lowercase ASCII letter, ", fn ->
Mix.Tasks.Scenic.New.Nerves.run(["valid", "--app", "007invalid"])
end
assert_raise Mix.Error, ~r"Module name must be a valid Elixir alias", fn ->
Mix.Tasks.Scenic.New.Nerves.run(["valid", "--module", "not.valid"])
end
assert_raise Mix.Error, ~r"Module name \w+ is already taken", fn ->
Mix.Tasks.Scenic.New.Nerves.run(["string"])
end
assert_raise Mix.Error, ~r"Module name \w+ is already taken", fn ->
Mix.Tasks.Scenic.New.Nerves.run(["string", "chars"])
end
assert_raise Mix.Error, ~r"Module name \w+ is already taken", fn ->
Mix.Tasks.Scenic.New.Nerves.run(["valid", "--app", "mix"])
end
assert_raise Mix.Error, ~r"Module name \w+ is already taken", fn ->
Mix.Tasks.Scenic.New.Nerves.run(["valid", "--module", "String"])
end
end
test "new without args" do
assert capture_io(fn -> Mix.Tasks.Scenic.New.Nerves.run([]) end) =~
"Generates a starter Scenic application."
end
test "new check for directory existence" do
shell = Mix.shell()
in_tmp("check for directory existence", fn ->
File.mkdir!(@app_name)
# Send Mix messages to the current process instead of performing IO
Mix.shell(Mix.Shell.Process)
msg = "The directory \"scenic_demo\" already exists. Are you sure you want to continue?"
assert_raise Mix.Error, ~r"Please select another directory for installation", fn ->
# The shell ask if we want to continue. We will say no.
send(self(), {:mix_shell_input, :yes?, false})
Mix.Tasks.Scenic.New.Nerves.run([@app_name])
assert_received {:mix_shell, :yes?, [^msg]}
end
end)
Mix.shell(shell)
end
end
| 36.849206 | 98 | 0.587767 |
9e07fa26ca94dfb023dd60b28248dc4b84e49f2d | 3,281 | exs | Elixir | test/lib/code_corps/model/project_user_test.exs | fikape/code-corps-api | c21674b0b2a19fa26945c94268db8894420ca181 | [
"MIT"
] | 275 | 2015-06-23T00:20:51.000Z | 2021-08-19T16:17:37.000Z | test/lib/code_corps/model/project_user_test.exs | fikape/code-corps-api | c21674b0b2a19fa26945c94268db8894420ca181 | [
"MIT"
] | 1,304 | 2015-06-26T02:11:54.000Z | 2019-12-12T21:08:00.000Z | test/lib/code_corps/model/project_user_test.exs | fikape/code-corps-api | c21674b0b2a19fa26945c94268db8894420ca181 | [
"MIT"
] | 140 | 2016-01-01T18:19:47.000Z | 2020-11-22T06:24:47.000Z | defmodule CodeCorps.ProjectUserTest do
use CodeCorps.ModelCase
alias CodeCorps.{ProjectUser, Repo}
describe "update_changeset role validation" do
test "includes pending" do
attrs = %{role: "pending"}
changeset = ProjectUser.update_changeset(%ProjectUser{}, attrs)
assert changeset.valid?
end
test "includes contributor" do
attrs = %{role: "contributor"}
changeset = ProjectUser.update_changeset(%ProjectUser{}, attrs)
assert changeset.valid?
end
test "includes admin" do
attrs = %{role: "admin"}
changeset = ProjectUser.update_changeset(%ProjectUser{}, attrs)
assert changeset.valid?
end
test "includes owner" do
attrs = %{role: "owner"}
changeset = ProjectUser.update_changeset(%ProjectUser{}, attrs)
assert changeset.valid?
end
test "does not include invalid values" do
attrs = %{role: "invalid"}
changeset = ProjectUser.update_changeset(%ProjectUser{}, attrs)
refute changeset.valid?
end
end
describe "create_owner_changeset/2" do
@attributes ~w(project_id user_id role)
test "casts #{@attributes}, with role cast to 'owner'" do
attrs = %{foo: "bar", project_id: 1, user_id: 2}
changeset = ProjectUser.create_owner_changeset(%ProjectUser{}, attrs)
assert changeset.changes == %{project_id: 1, user_id: 2, role: "owner"}
end
test "ensures user record exists" do
project = insert(:project)
attrs = %{project_id: project.id, user_id: -1}
changeset = ProjectUser.create_owner_changeset(%ProjectUser{}, attrs)
{:error, invalid_changeset} = changeset |> Repo.insert
refute invalid_changeset.valid?
assert assoc_constraint_triggered?(invalid_changeset, :user)
end
test "ensures project record exists" do
user = insert(:user)
attrs = %{project_id: -1, user_id: user.id}
changeset = ProjectUser.create_owner_changeset(%ProjectUser{}, attrs)
{:error, invalid_changeset} = changeset |> Repo.insert
refute invalid_changeset.valid?
assert assoc_constraint_triggered?(invalid_changeset, :project)
end
end
describe "create_changeset/2" do
@attributes ~w(project_id user_id role)
test "casts #{@attributes}, with role cast to 'pending'" do
attrs = %{foo: "bar", project_id: 1, user_id: 2}
changeset = ProjectUser.create_changeset(%ProjectUser{}, attrs)
assert changeset.changes == %{project_id: 1, user_id: 2, role: "pending"}
end
test "ensures user record exists" do
project = insert(:project)
attrs = %{project_id: project.id, user_id: -1}
changeset = ProjectUser.create_changeset(%ProjectUser{}, attrs)
{:error, invalid_changeset} = changeset |> Repo.insert
refute invalid_changeset.valid?
assert assoc_constraint_triggered?(invalid_changeset, :user)
end
test "ensures project record exists" do
user = insert(:user)
attrs = %{project_id: -1, user_id: user.id}
changeset = ProjectUser.create_changeset(%ProjectUser{}, attrs)
{:error, invalid_changeset} = changeset |> Repo.insert
refute invalid_changeset.valid?
assert assoc_constraint_triggered?(invalid_changeset, :project)
end
end
end
| 32.166667 | 79 | 0.679976 |
9e082c7c5eb2743973a50e1a511b18cdd0117a6a | 3,909 | ex | Elixir | implementations/elixir/ockam/ockam/lib/ockam/messaging/ordering/strict/confirm_pipe.ex | Szymongib/ockam | b6d8c62ec2516c8e0dac243a02c1d1aaf9094622 | [
"Apache-2.0"
] | null | null | null | implementations/elixir/ockam/ockam/lib/ockam/messaging/ordering/strict/confirm_pipe.ex | Szymongib/ockam | b6d8c62ec2516c8e0dac243a02c1d1aaf9094622 | [
"Apache-2.0"
] | 108 | 2021-10-30T14:46:14.000Z | 2021-10-30T16:00:08.000Z | implementations/elixir/ockam/ockam/lib/ockam/messaging/ordering/strict/confirm_pipe.ex | frazar/ockam | 2229a22cb95fc40485304fac9b2236384b544ad4 | [
"Apache-2.0"
] | null | null | null | defmodule Ockam.Messaging.Ordering.Strict.ConfirmPipe do
@moduledoc """
Ockam pipe with receive queue and confirmations.
Next message is processed only after previous one was confirmed.
NOTE: Confirm pipe should go over backtraceable route
NOTE: Confirm pipe doesn't handle message loss. I will get stuck on missing confirm
NOTE: Confirm pipe does not deduplicate messages
"""
## TODO: experiment with call-style waiting for confirm
@behaviour Ockam.Messaging.Pipe
def sender() do
Ockam.Messaging.Ordering.Strict.ConfirmPipe.Sender
end
def receiver() do
Ockam.Messaging.Ordering.Strict.ConfirmPipe.Receiver
end
end
defmodule Ockam.Messaging.Ordering.Strict.ConfirmPipe.Sender do
@moduledoc """
Confirm pipe sender.
Started with receiver route
When message is forwarded, the worker waits for a confirmation.
Additional messages received before confirmation are put in the receive queue.
After confirmation is received - next message from the queue is sent
Confirmations are received on the INNER address
"""
use Ockam.AsymmetricWorker
alias Ockam.Message
@impl true
def inner_setup(options, state) do
receiver_route = Keyword.fetch!(options, :receiver_route)
{:ok, Map.merge(state, %{receiver_route: receiver_route, waiting_confirm: false, queue: []})}
end
@impl true
def handle_inner_message(message, state) do
case is_confirm?(message) do
true ->
confirm(state)
false ->
{:error, {:unknown_inner_message, message}}
end
end
@impl true
def handle_outer_message(message, state) do
case waiting_confirm?(state) do
true ->
queue_message(message, state)
false ->
send_message(message, state)
end
end
def is_confirm?(message) do
## TODO: do we need some payload here?
## Revisit when we have message types
Message.payload(message) == ""
end
def waiting_confirm?(state) do
Map.get(state, :waiting_confirm, false)
end
def queue_message(message, state) do
queue = Map.get(state, :queue, [])
{:ok, Map.put(state, :queue, queue ++ [message])}
end
def send_message(message, state) do
## TODO: do we need to wrap the message?
receiver_route = Map.get(state, :receiver_route)
[_me | onward_route] = Message.onward_route(message)
forwarded_message = %{
onward_route: onward_route,
return_route: Message.return_route(message),
payload: Message.payload(message)
}
{:ok, wrapped_message} = Ockam.Wire.encode(forwarded_message)
Ockam.Router.route(%{
onward_route: receiver_route,
return_route: [state.inner_address],
payload: wrapped_message
})
{:ok, Map.put(state, :waiting_confirm, true)}
end
def confirm(state) do
queue = Map.get(state, :queue, [])
case queue do
[message | rest] ->
send_message(message, Map.put(state, :queue, rest))
[] ->
{:ok, Map.put(state, :waiting_confirm, false)}
end
end
end
defmodule Ockam.Messaging.Ordering.Strict.ConfirmPipe.Receiver do
@moduledoc """
Confirm receiver sends a confirm message for every message received
"""
use Ockam.Worker
alias Ockam.Message
alias Ockam.Router
require Logger
@impl true
def handle_message(message, state) do
return_route = Message.return_route(message)
wrapped_message = Message.payload(message)
case Ockam.Wire.decode(wrapped_message) do
{:ok, message} ->
Router.route(message)
send_confirm(return_route, state)
{:ok, state}
{:error, err} ->
Logger.error("Error unwrapping message: #{inspect(err)}")
{:error, err}
end
end
def send_confirm(return_route, state) do
Router.route(%{
onward_route: return_route,
return_route: [state.address],
## TODO: see `Sender.is_confirm?`
payload: ""
})
end
end
| 25.383117 | 97 | 0.686109 |
9e0872cfe7e53f8330a5896c5fdda35c50fc1db0 | 93 | exs | Elixir | test/surface_bulma/form/multiple_select_test.exs | justin-m-morgan/surface_bulma | c31faebc818c39d06250574b913096504bd6eeec | [
"MIT"
] | 30 | 2021-02-05T18:50:38.000Z | 2022-03-12T22:42:29.000Z | test/surface_bulma/form/multiple_select_test.exs | justin-m-morgan/surface_bulma | c31faebc818c39d06250574b913096504bd6eeec | [
"MIT"
] | 19 | 2021-01-15T19:14:24.000Z | 2022-02-05T14:57:18.000Z | test/surface_bulma/form/multiple_select_test.exs | justin-m-morgan/surface_bulma | c31faebc818c39d06250574b913096504bd6eeec | [
"MIT"
] | 17 | 2021-02-01T20:57:51.000Z | 2022-03-20T17:06:57.000Z | defmodule SurfaceBulma.Components.Form.MultipleSelectTest do
use SurfaceBulma.ConnCase
end
| 23.25 | 60 | 0.870968 |
9e08aa580512086bce307daa1c6a80c9984ba3fa | 61 | ex | Elixir | test/support/mocks.ex | boonious/hui_mon | c88008c92c4a0977333c94f109ab6451af20cc9a | [
"Apache-2.0"
] | null | null | null | test/support/mocks.ex | boonious/hui_mon | c88008c92c4a0977333c94f109ab6451af20cc9a | [
"Apache-2.0"
] | 3 | 2021-04-22T10:10:04.000Z | 2021-05-14T15:21:13.000Z | test/support/mocks.ex | boonious/hui_mon | c88008c92c4a0977333c94f109ab6451af20cc9a | [
"Apache-2.0"
] | null | null | null | Mox.defmock(HuiMon.Source.SolrMock, for: HuiMon.Source.Solr)
| 30.5 | 60 | 0.803279 |
9e08cbfc765bbbb77fef19867e8f988ebd70a55a | 1,123 | exs | Elixir | config/config.exs | jbowtie/kowhai-ex | 7d81b4443f73f5b89ae88a963a964fa77c5829e8 | [
"Apache-2.0"
] | null | null | null | config/config.exs | jbowtie/kowhai-ex | 7d81b4443f73f5b89ae88a963a964fa77c5829e8 | [
"Apache-2.0"
] | null | null | null | config/config.exs | jbowtie/kowhai-ex | 7d81b4443f73f5b89ae88a963a964fa77c5829e8 | [
"Apache-2.0"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :parsers, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:parsers, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.225806 | 73 | 0.751558 |
9e0912a4f6922015c834f31a704f2e36386b4a67 | 121 | ex | Elixir | programming/elixir-course/discuss/lib/discuss_web/controllers/topic_controller.ex | filippo-fonseca/learning | 426c7e3a904e2dcd389f1c4ecd5b1484571f937d | [
"MIT"
] | 1 | 2021-09-24T22:47:33.000Z | 2021-09-24T22:47:33.000Z | programming/elixir-course/discuss/lib/discuss_web/controllers/topic_controller.ex | filippo-fonseca/learning | 426c7e3a904e2dcd389f1c4ecd5b1484571f937d | [
"MIT"
] | null | null | null | programming/elixir-course/discuss/lib/discuss_web/controllers/topic_controller.ex | filippo-fonseca/learning | 426c7e3a904e2dcd389f1c4ecd5b1484571f937d | [
"MIT"
] | 1 | 2020-08-28T22:52:08.000Z | 2020-08-28T22:52:08.000Z | defmodule DiscussWeb.TopicController do
use DiscussWeb, :controller
def new(conn, params) do
end
end | 20.166667 | 39 | 0.68595 |
9e093a4f7cb9d635bc7bcd913a6601c1e253865b | 471 | ex | Elixir | test/support/components/investigation_note_form.ex | geometricservices/epi-viewpoin | ecb5316ea0f3f7299d5ff63e2de588539005ac1c | [
"Apache-2.0"
] | 5 | 2021-02-25T18:43:09.000Z | 2021-02-27T06:00:35.000Z | test/support/components/investigation_note_form.ex | geometricservices/epi-viewpoint | ecb5316ea0f3f7299d5ff63e2de588539005ac1c | [
"Apache-2.0"
] | 3 | 2021-12-13T17:52:47.000Z | 2021-12-17T01:35:31.000Z | test/support/components/investigation_note_form.ex | geometricservices/epi-viewpoint | ecb5316ea0f3f7299d5ff63e2de588539005ac1c | [
"Apache-2.0"
] | 1 | 2022-01-27T23:26:38.000Z | 2022-01-27T23:26:38.000Z | defmodule EpicenterWeb.Test.Components.InvestigationNoteForm do
import Phoenix.LiveViewTest
alias Phoenix.LiveViewTest.View
def change_note(%View{} = view, text) do
view
|> element("form[data-role=note-form]")
|> render_change(%{"form_field_data" => %{"text" => text}})
end
def submit_new_note(%View{} = view, text) do
view
|> element("form[data-role=note-form]")
|> render_submit(%{"form_field_data" => %{"text" => text}})
end
end
| 26.166667 | 63 | 0.666667 |
9e094394dd69278799c2d0750dcbe959c120b1a9 | 1,205 | ex | Elixir | lib/api_elixir_films_web/controllers/movie_controller.ex | brandaoplaster/api_elixir_films | 9594761bb0d2463132b904c5adbed6d9664bb7ff | [
"MIT"
] | null | null | null | lib/api_elixir_films_web/controllers/movie_controller.ex | brandaoplaster/api_elixir_films | 9594761bb0d2463132b904c5adbed6d9664bb7ff | [
"MIT"
] | null | null | null | lib/api_elixir_films_web/controllers/movie_controller.ex | brandaoplaster/api_elixir_films | 9594761bb0d2463132b904c5adbed6d9664bb7ff | [
"MIT"
] | null | null | null | defmodule ApiElixirFilmsWeb.MovieController do
use ApiElixirFilmsWeb, :controller
alias ApiElixirFilms.Collection
alias ApiElixirFilms.Collection.Movie
action_fallback ApiElixirFilmsWeb.FallbackController
def index(conn, _params) do
movies = Collection.list_movies()
render(conn, "index.json", movies: movies)
end
def create(conn, %{"movie" => movie_params}) do
with {:ok, %Movie{} = movie} <- Collection.create_movie(movie_params) do
conn
|> put_status(:created)
|> put_resp_header("location", Routes.movie_path(conn, :show, movie))
|> render("show.json", movie: movie)
end
end
def show(conn, %{"id" => id}) do
movie = Collection.get_movie!(id)
render(conn, "show.json", movie: movie)
end
def update(conn, %{"id" => id, "movie" => movie_params}) do
movie = Collection.get_movie!(id)
with {:ok, %Movie{} = movie} <- Collection.update_movie(movie, movie_params) do
render(conn, "show.json", movie: movie)
end
end
def delete(conn, %{"id" => id}) do
movie = Collection.get_movie!(id)
with {:ok, %Movie{}} <- Collection.delete_movie(movie) do
send_resp(conn, :no_content, "")
end
end
end
| 27.386364 | 83 | 0.663071 |
9e0944425487310b9bf9d39977b5ae5865cd69a7 | 2,989 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/instance_groups_list_instances.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/compute/lib/google_api/compute/v1/model/instance_groups_list_instances.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/instance_groups_list_instances.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Compute.V1.Model.InstanceGroupsListInstances do
@moduledoc """
## Attributes
* `id` (*type:* `String.t`, *default:* `nil`) - [Output Only] Unique identifier for the resource; defined by the server.
* `items` (*type:* `list(GoogleApi.Compute.V1.Model.InstanceWithNamedPorts.t)`, *default:* `nil`) - A list of InstanceWithNamedPorts resources.
* `kind` (*type:* `String.t`, *default:* `compute#instanceGroupsListInstances`) - [Output Only] The resource type, which is always compute#instanceGroupsListInstances for the list of instances in the specified instance group.
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - [Output Only] This token allows you to get the next page of results for list requests. If the number of results is larger than maxResults, use the nextPageToken as a value for the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results.
* `selfLink` (*type:* `String.t`, *default:* `nil`) - [Output Only] Server-defined URL for this resource.
* `warning` (*type:* `GoogleApi.Compute.V1.Model.InstanceGroupsListInstancesWarning.t`, *default:* `nil`) - [Output Only] Informational warning message.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:id => String.t(),
:items => list(GoogleApi.Compute.V1.Model.InstanceWithNamedPorts.t()),
:kind => String.t(),
:nextPageToken => String.t(),
:selfLink => String.t(),
:warning => GoogleApi.Compute.V1.Model.InstanceGroupsListInstancesWarning.t()
}
field(:id)
field(:items, as: GoogleApi.Compute.V1.Model.InstanceWithNamedPorts, type: :list)
field(:kind)
field(:nextPageToken)
field(:selfLink)
field(:warning, as: GoogleApi.Compute.V1.Model.InstanceGroupsListInstancesWarning)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.InstanceGroupsListInstances do
def decode(value, options) do
GoogleApi.Compute.V1.Model.InstanceGroupsListInstances.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.InstanceGroupsListInstances do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 48.209677 | 393 | 0.731683 |
9e0945a1fd6e94c5763c6fdaaa8be7edbdf1d480 | 2,667 | ex | Elixir | lib/oli_web/live/delivery/table_model.ex | ehilfer/oli-torus | c2babe03047bfca2d3fe59ae3b7604597e34001e | [
"MIT"
] | null | null | null | lib/oli_web/live/delivery/table_model.ex | ehilfer/oli-torus | c2babe03047bfca2d3fe59ae3b7604597e34001e | [
"MIT"
] | null | null | null | lib/oli_web/live/delivery/table_model.ex | ehilfer/oli-torus | c2babe03047bfca2d3fe59ae3b7604597e34001e | [
"MIT"
] | null | null | null | defmodule OliWeb.Delivery.SelectSource.TableModel do
alias OliWeb.Common.Table.{ColumnSpec, SortableTableModel}
alias OliWeb.Router.Helpers, as: Routes
use Surface.LiveComponent
defp is_product?(item) do
Map.has_key?(item, :type) and Map.get(item, :type) == :blueprint
end
def new(products) do
SortableTableModel.new(
rows: products,
column_specs: [
%ColumnSpec{
name: :action,
label: "",
render_fn: &__MODULE__.render_action_column/3
},
%ColumnSpec{
name: :title,
label: "Title",
render_fn: &__MODULE__.render_title_column/3,
sort_fn: &__MODULE__.sort_title_column/2
},
%ColumnSpec{
name: :type,
label: "Type",
render_fn: &__MODULE__.render_type_column/3
},
%ColumnSpec{
name: :requires_payment,
label: "Requires Payment",
render_fn: &__MODULE__.render_payment_column/3
},
%ColumnSpec{
name: :inserted_at,
label: "Created",
render_fn: &SortableTableModel.render_inserted_at_column/3
}
],
event_suffix: "",
id_field: [:unique_id]
)
end
def render_payment_column(_, item, _) do
case is_product?(item) do
true ->
if item.requires_payment do
case Money.to_string(item.amount) do
{:ok, m} -> m
_ -> "Yes"
end
else
"None"
end
_ ->
"None"
end
end
def render_title_column(assigns, item, _) do
case is_product?(item) do
true ->
route_path = Routes.live_path(OliWeb.Endpoint, OliWeb.Products.DetailsView, item.slug)
SortableTableModel.render_link_column(assigns, item.title, route_path)
_ ->
route_path = Routes.project_path(OliWeb.Endpoint, :overview, item.project.slug)
SortableTableModel.render_link_column(assigns, item.project.title, route_path)
end
end
def sort_title_column(sort_order, sort_spec),
do: {& &1.project, ColumnSpec.default_sort_fn(sort_order, sort_spec)}
def render_action_column(assigns, item, _) do
id =
case is_product?(item) do
true ->
"product:#{item.id}"
_ ->
"publication:#{item.id}"
end
~F"""
<button class="btn btn-primary" phx-click="selected" phx-value-id={id}>Select</button>
"""
end
def render_type_column(_, item, _) do
case is_product?(item) do
true -> "Product"
_ -> "Course Project"
end
end
def render(assigns) do
~F"""
<div>nothing</div>
"""
end
end
| 25.160377 | 94 | 0.592426 |
9e094e74cb68da808487be05796d17025aa91438 | 1,087 | ex | Elixir | chapter_7/todo_worker_pool/lib/load_test.ex | librity/elixir_in_action | d2df441ceb7e6a0d3f18bc3ab3c59570125fcdec | [
"MIT"
] | 3 | 2021-04-22T11:55:58.000Z | 2021-08-22T13:19:56.000Z | chapter_7/todo_persistent/lib/load_test.ex | librity/elixir_in_action | d2df441ceb7e6a0d3f18bc3ab3c59570125fcdec | [
"MIT"
] | null | null | null | chapter_7/todo_persistent/lib/load_test.ex | librity/elixir_in_action | d2df441ceb7e6a0d3f18bc3ab3c59570125fcdec | [
"MIT"
] | 3 | 2021-04-22T21:19:45.000Z | 2021-08-22T13:20:03.000Z | # Very quick, inconclusive load test
#
# Start from command line with:
# elixir --erl "+P 2000000" -S mix run -e LoadTest.run
#
# Note: the +P 2000000 sets maximum number of processes to 2 millions
defmodule LoadTest do
@total_processes 1_000_000
@interval_size 100_000
def run do
{:ok, cache} = Todo.Cache.Client.start()
interval_count = round(@total_processes / @interval_size)
Enum.each(0..(interval_count - 1), &run_interval(cache, make_interval(&1)))
end
defp make_interval(n) do
start = n * @interval_size
start..(start + @interval_size - 1)
end
defp run_interval(cache, interval) do
{time, _} =
:timer.tc(fn ->
interval
|> Enum.each(&Todo.Cache.Client.server_process(cache, "cache_#{&1}"))
end)
IO.puts("#{inspect(interval)}: average put #{time / @interval_size} μs")
{time, _} =
:timer.tc(fn ->
interval
|> Enum.each(&Todo.Cache.Client.server_process(cache, "cache_#{&1}"))
end)
IO.puts("#{inspect(interval)}: average get #{time / @interval_size} μs\n")
end
end
| 26.512195 | 79 | 0.642134 |
9e0963ead8f54b7c5499f63f0df1ca5413081380 | 755 | exs | Elixir | mix.exs | spreedly/gentry | 3acf32549f539ba5d2cdb011fcccd95ea0a605dd | [
"MIT"
] | 3 | 2018-01-18T16:02:30.000Z | 2019-04-04T16:57:09.000Z | mix.exs | spreedly/gentry | 3acf32549f539ba5d2cdb011fcccd95ea0a605dd | [
"MIT"
] | null | null | null | mix.exs | spreedly/gentry | 3acf32549f539ba5d2cdb011fcccd95ea0a605dd | [
"MIT"
] | null | null | null | defmodule Gentry.Mixfile do
use Mix.Project
def project do
[
app: :gentry,
version: "0.1.1",
elixir: "~> 1.6",
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
deps: deps(),
description: "Generic retries with exponential backoff",
name: "Gentry",
source_url: "https://github.com/spreedly/gentry",
package: package()
]
end
def application do
[extra_applications: [:logger]]
end
defp deps do
[{:ex_doc, ">= 0.0.0", only: :dev}]
end
defp package do
[
name: :gentry,
licenses: ["MIT License"],
maintainers: ["Kevin Lewis", "Spreedly"],
links: %{"GitHub" => "https://github.com/spreedly/gentry"}
]
end
end
| 20.972222 | 64 | 0.572185 |
9e097ce9bba2927d716d9ec8d6744add8556b5a4 | 1,134 | exs | Elixir | mix.exs | elixir-nx/scidata | 86ced1cbbb74710f7cbf3986e5c1b5468a0d175e | [
"Apache-2.0"
] | 107 | 2021-04-16T16:07:38.000Z | 2022-03-06T19:43:12.000Z | mix.exs | elixir-nx/scidata | 86ced1cbbb74710f7cbf3986e5c1b5468a0d175e | [
"Apache-2.0"
] | 19 | 2021-04-16T17:32:14.000Z | 2022-01-19T15:05:53.000Z | mix.exs | elixir-nx/scidata | 86ced1cbbb74710f7cbf3986e5c1b5468a0d175e | [
"Apache-2.0"
] | 9 | 2021-04-17T20:49:55.000Z | 2022-01-19T04:00:34.000Z | defmodule Scidata.MixProject do
use Mix.Project
@version "0.1.4"
@repo_url "https://github.com/elixir-nx/scidata"
def project do
[
app: :scidata,
version: @version,
elixir: "~> 1.11",
start_permanent: Mix.env() == :prod,
deps: deps(),
# Hex
package: package(),
description: "Datasets for science",
# Docs
name: "Scidata",
docs: docs()
]
end
def application do
[
extra_applications: [:logger, :ssl, :inets]
]
end
defp deps do
[
{:ex_doc, ">= 0.24.0", only: :dev, runtime: false},
{:nimble_csv, "~> 1.1"},
{:jason, "~> 1.0"}
]
end
defp package do
[
licenses: ["Apache-2.0"],
links: %{"GitHub" => @repo_url}
]
end
defp docs do
[
source_ref: "v#{@version}",
source_url: @repo_url,
groups_for_modules: [
"Text": [Scidata.IMDBReviews, Scidata.Squad, Scidata.YelpFullReviews, Scidata.YelpPolarityReviews],
"Vision": [Scidata.CIFAR10, Scidata.CIFAR100, Scidata.FashionMNIST, Scidata.KuzushijiMNIST, Scidata.MNIST]
]
]
end
end
| 19.894737 | 114 | 0.564374 |
9e09d024809f121a8011f46a4ac12f3cfbdb928d | 2,146 | exs | Elixir | test/graphql/resolvers/collaborator_test.exs | isshindev/accent | ae4c13139b0a0dfd64ff536b94c940a4e2862150 | [
"BSD-3-Clause"
] | 806 | 2018-04-07T20:40:33.000Z | 2022-03-30T01:39:57.000Z | test/graphql/resolvers/collaborator_test.exs | isshindev/accent | ae4c13139b0a0dfd64ff536b94c940a4e2862150 | [
"BSD-3-Clause"
] | 194 | 2018-04-07T13:49:37.000Z | 2022-03-30T19:58:45.000Z | test/graphql/resolvers/collaborator_test.exs | isshindev/accent | ae4c13139b0a0dfd64ff536b94c940a4e2862150 | [
"BSD-3-Clause"
] | 89 | 2018-04-09T13:55:49.000Z | 2022-03-24T07:09:31.000Z | defmodule AccentTest.GraphQL.Resolvers.Collaborator do
use Accent.RepoCase
use Oban.Testing, repo: Accent.Repo
alias Accent.GraphQL.Resolvers.Collaborator, as: Resolver
alias Accent.{
Collaborator,
Project,
Repo,
User
}
defmodule PlugConn do
defstruct [:assigns]
end
@user %User{email: "[email protected]"}
setup do
user = Repo.insert!(@user)
project = %Project{main_color: "#f00", name: "My project"} |> Repo.insert!()
{:ok, [user: user, project: project]}
end
test "create", %{project: project, user: user} do
context = %{context: %{conn: %PlugConn{assigns: %{current_user: user}}}}
{:ok, result} = Resolver.create(project, %{email: "[email protected]", role: "admin"}, context)
assert_enqueued(
worker: Accent.Hook.Outbounds.Mock,
args: %{
"event" => "create_collaborator",
"payload" => %{"collaborator" => %{"email" => "[email protected]"}},
"project_id" => project.id,
"user_id" => user.id
}
)
assert get_in(result, [:errors]) == nil
assert get_in(Repo.all(Collaborator), [Access.all(), Access.key(:email)]) == ["[email protected]"]
assert get_in(Repo.all(Collaborator), [Access.all(), Access.key(:role)]) == ["admin"]
end
test "update", %{project: project, user: user} do
context = %{context: %{conn: %PlugConn{assigns: %{current_user: user}}}}
collaborator = %Collaborator{email: "[email protected]", role: "reviewer", project_id: project.id} |> Repo.insert!()
{:ok, result} = Resolver.update(collaborator, %{role: "owner"}, context)
assert get_in(result, [:errors]) == nil
assert get_in(result, [:collaborator, Access.key(:role)]) == "owner"
end
test "delete", %{project: project, user: user} do
context = %{context: %{conn: %PlugConn{assigns: %{current_user: user}}}}
collaborator = %Collaborator{email: "[email protected]", role: "reviewer", project_id: project.id} |> Repo.insert!()
{:ok, result} = Resolver.delete(collaborator, %{}, context)
assert get_in(result, [:errors]) == nil
assert get_in(result, [:collaborator]) == collaborator
end
end
| 32.029851 | 119 | 0.637465 |
9e09d114664f0633fbd8c7739346d87dd880993a | 501 | ex | Elixir | lib/slack_inviter_web/views/error_view.ex | syracuseio/slack_inviter | a2bdf4ffc45b12db95aa74bf7a167ef4996a9b81 | [
"MIT"
] | null | null | null | lib/slack_inviter_web/views/error_view.ex | syracuseio/slack_inviter | a2bdf4ffc45b12db95aa74bf7a167ef4996a9b81 | [
"MIT"
] | 3 | 2021-03-08T19:58:55.000Z | 2021-09-01T03:46:56.000Z | lib/slack_inviter_web/views/error_view.ex | syracuseio/slack_inviter | a2bdf4ffc45b12db95aa74bf7a167ef4996a9b81 | [
"MIT"
] | 1 | 2018-08-15T01:52:40.000Z | 2018-08-15T01:52:40.000Z | defmodule SlackInviterWeb.ErrorView do
use SlackInviterWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.html", _assigns) do
# "Internal Server Error"
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.html" becomes
# "Not Found".
def template_not_found(template, _assigns) do
Phoenix.Controller.status_message_from_template(template)
end
end
| 29.470588 | 61 | 0.740519 |
9e09dd170ee38e18a55338fccc262c00f9f470d4 | 2,341 | exs | Elixir | implementations/elixir/ockam/ockam/mix.exs | plaxi0s/ockam | 2cc911dff720228bca950d83faf095ea84717981 | [
"Apache-2.0"
] | 1,912 | 2019-01-10T14:17:00.000Z | 2022-03-30T19:16:44.000Z | implementations/elixir/ockam/ockam/mix.exs | plaxi0s/ockam | 2cc911dff720228bca950d83faf095ea84717981 | [
"Apache-2.0"
] | 1,473 | 2019-01-16T15:14:47.000Z | 2022-03-31T23:44:50.000Z | implementations/elixir/ockam/ockam/mix.exs | plaxi0s/ockam | 2cc911dff720228bca950d83faf095ea84717981 | [
"Apache-2.0"
] | 219 | 2019-01-11T03:35:13.000Z | 2022-03-31T10:25:56.000Z | defmodule Ockam.MixProject do
use Mix.Project
@version "0.1.0"
@elixir_requirement "~> 1.10"
@ockam_github_repo "https://github.com/ockam-network/ockam"
@ockam_github_repo_path "implementations/elixir/ockam/ockam"
def project do
[
app: :ockam,
version: @version,
elixir: @elixir_requirement,
consolidate_protocols: Mix.env() != :test,
elixirc_options: [warnings_as_errors: true],
deps: deps(),
aliases: aliases(),
# lint
dialyzer: [
flags: [:error_handling],
plt_add_apps: [:ranch, :telemetry, :ockam_vault_software]
],
# test
test_coverage: [output: "_build/cover"],
preferred_cli_env: ["test.cover": :test],
elixirc_paths: elixirc_paths(Mix.env()),
# hex
description: "A collection of tools for building connected systems that you can trust.",
package: package(),
# docs
name: "Ockam",
docs: docs()
]
end
# mix help compile.app for more
def application do
[
mod: {Ockam, []},
extra_applications: [:logger]
]
end
defp deps do
[
{:bare, "~> 0.1.1"},
{:gen_state_machine, "~> 3.0"},
{:ockam_vault_software, path: "../ockam_vault_software", optional: true},
{:telemetry, "~> 0.4.2", optional: true},
{:ranch, "~> 1.8", optional: true},
{:ex_doc, "~> 0.24.0", only: :dev, runtime: false},
{:credo, "~> 1.5", only: [:dev, :test], runtime: false},
{:dialyxir, "~> 1.1", only: [:dev], runtime: false}
]
end
# used by hex
defp package do
[
links: %{"GitHub" => @ockam_github_repo},
licenses: ["Apache-2.0"]
]
end
# used by ex_doc
defp docs do
[
main: "Ockam",
source_url_pattern:
"#{@ockam_github_repo}/blob/v#{@version}/#{@ockam_github_repo_path}/%{path}#L%{line}"
]
end
defp elixirc_paths(:test), do: ["lib", "test/ockam/helpers"]
defp elixirc_paths(_), do: ["lib"]
defp aliases do
[
credo: "credo --strict",
docs: "docs --output _build/docs --formatter html",
"test.cover": "test --no-start --cover",
"lint.format": "format --check-formatted",
"lint.credo": "credo --strict",
"lint.dialyzer": "dialyzer --format dialyxir",
lint: ["lint.format", "lint.credo"]
]
end
end
| 24.642105 | 94 | 0.578812 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.