hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
03452ba7e267539f66cc4c735047fa124d88ddc0 | 885 | ex | Elixir | lib/pummpcomm/history/cal_bg_for_ph.ex | infinity-aps/pummpcomm | 7380585ecd110ab1c19d2aea3880e51e3f433050 | [
"MIT"
] | 15 | 2017-08-31T00:58:47.000Z | 2020-01-12T03:53:13.000Z | lib/pummpcomm/history/cal_bg_for_ph.ex | vladhj38/pummpcomm | 7380585ecd110ab1c19d2aea3880e51e3f433050 | [
"MIT"
] | 1 | 2017-09-15T02:09:31.000Z | 2017-09-15T02:09:31.000Z | lib/pummpcomm/history/cal_bg_for_ph.ex | vladhj38/pummpcomm | 7380585ecd110ab1c19d2aea3880e51e3f433050 | [
"MIT"
] | 3 | 2017-09-10T17:24:59.000Z | 2019-09-10T19:41:49.000Z | defmodule Pummpcomm.History.CalBGForPH do
@moduledoc """
Calibration blood glucose for pump history.
"""
use Bitwise
alias Pummpcomm.{BloodGlucose, DateDecoder}
@behaviour Pummpcomm.History.Decoder
# Functions
## Pummpcomm.History.Decoder callbacks
@doc """
Blood glucose `amount`
"""
@impl Pummpcomm.History.Decoder
@spec decode(binary, Pummpcomm.PumpModel.pump_options()) :: %{
amount: BloodGlucose.blood_glucose(),
timestamp: NaiveDateTime.t()
}
def decode(body, pump_options)
def decode(<<amount::8, timestamp::binary-size(5)>>, _) do
<<_::size(16), amount_high_bit::size(1), _::size(15), amount_medium_bit::size(1), _::size(7)>> =
timestamp
%{
amount: (amount_high_bit <<< 9) + (amount_medium_bit <<< 8) + amount,
timestamp: DateDecoder.decode_history_timestamp(timestamp)
}
end
end
| 25.285714 | 100 | 0.667797 |
034561a479eaa4740ef246d0a131c25513b028d5 | 2,294 | ex | Elixir | lib/stripe/core_resources/file_upload.ex | erhlee-bird/stripity_stripe | 8c4c5712f391bf76e0a168125882c85048d3192f | [
"BSD-3-Clause"
] | 555 | 2016-11-29T05:02:27.000Z | 2022-03-30T00:47:59.000Z | lib/stripe/core_resources/file_upload.ex | erhlee-bird/stripity_stripe | 8c4c5712f391bf76e0a168125882c85048d3192f | [
"BSD-3-Clause"
] | 532 | 2016-11-28T18:22:25.000Z | 2022-03-30T17:04:32.000Z | lib/stripe/core_resources/file_upload.ex | erhlee-bird/stripity_stripe | 8c4c5712f391bf76e0a168125882c85048d3192f | [
"BSD-3-Clause"
] | 296 | 2016-12-05T14:04:09.000Z | 2022-03-28T20:39:37.000Z | defmodule Stripe.FileUpload do
@moduledoc """
Work with Stripe file_upload objects.
You can:
- Create a file
- Retrieve a file
- List all files
Stripe API reference: https://stripe.com/docs/api/files
"""
use Stripe.Entity
import Stripe.Request
@type t :: %__MODULE__{
id: Stripe.id(),
object: String.t(),
created: Stripe.timestamp(),
expires_at: Stripe.timestamp(),
filename: String.t() | nil,
links: Stripe.List.t(Stripe.FileLink.t()),
purpose: String.t(),
size: integer,
title: String.t() | nil,
type: String.t() | nil,
url: String.t() | nil
}
defstruct [
:id,
:object,
:created,
:expires_at,
:filename,
:links,
:purpose,
:size,
:title,
:type,
:url
]
@plural_endpoint "files"
@doc """
Create a file according to Stripe's file_upload rules.
Takes the filepath and the purpose.
"""
@spec create(map, Keyword.t()) :: {:ok, t} | {:error, Stripe.Error.t()}
def create(%{file: _, purpose: _} = params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint)
|> put_method(:post)
|> put_params(params)
|> make_file_upload_request()
end
@doc """
Retrieve a file_upload.
"""
@spec retrieve(Stripe.id() | t, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
def retrieve(id, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}")
|> put_method(:get)
|> make_request()
end
@doc """
List all file uploads, going back up to 30 days.
"""
@spec list(params, Stripe.options()) :: {:ok, Stripe.List.t(t)} | {:error, Stripe.Error.t()}
when params:
%{
optional(:ending_before) => t | Stripe.id(),
optional(:limit) => 1..100,
optional(:purpose) => String.t(),
optional(:starting_after) => t | Stripe.id()
}
| %{}
def list(params \\ %{}, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint)
|> put_method(:get)
|> put_params(params)
|> cast_to_id([:ending_before, :starting_after, :limit, :purpose])
|> make_request()
end
end
| 24.666667 | 94 | 0.553182 |
03456dbf908132c27d4218f0db80364cd4b7c9a3 | 3,712 | exs | Elixir | test/ex_alsa_test.exs | eleanor-daw/ex_alsa | e52c8d2fa5b4a9c5da619b16be4f9dbbd3def7bb | [
"MIT"
] | 1 | 2021-12-18T21:42:38.000Z | 2021-12-18T21:42:38.000Z | test/ex_alsa_test.exs | eleanor-daw/ex_alsa | e52c8d2fa5b4a9c5da619b16be4f9dbbd3def7bb | [
"MIT"
] | null | null | null | test/ex_alsa_test.exs | eleanor-daw/ex_alsa | e52c8d2fa5b4a9c5da619b16be4f9dbbd3def7bb | [
"MIT"
] | null | null | null | defmodule ExAlsaTest do
use ExUnit.Case
doctest ExAlsa
describe "ExAlsa" do
test "open_handle/1 returns handle for default device" do
{:ok, _handle} = ExAlsa.open_handle("default")
end
test "open_handle/1 raises error when device does not exist" do
assert_raise ArgumentError, fn ->
ExAlsa.open_handle("device-does-not-exist")
end
end
test "set_params/2 raises error when device does not exist" do
{:ok, handle} = ExAlsa.open_handle("default")
"""
options = %{
sample_rate: 44100,
channels: 1,
buffer_time: 500000,
period_time: 2,
stop_threshold:
}
{:ok, {sample_rate, channels, buffer_size, period_size, stop_threshold}} = ExAlsa.set_params(handle, 1, 44100, 512, 2) |> IO.inspect
"""
end
test "performance test" do
{:ok, handle} = ExAlsa.open_handle("default")
{:ok, %{
buffer_size: buffer_size,
periods: periods,
period_size: period_size,
rate: rate,
start_threshold: start_threshold,
stop_threshold: stop_threshold,
}} =
ExAlsa.set_params(handle, %{
channels: 1,
rate: 44100,
periods: 2,
period_size: 2000,
start_threshold: 100
})
seconds = 3
frames = Enum.take(sin_freq(220, seconds), 44100 * seconds)
seconds_per_frame = 1.0 / 44100.0
frames = get_frames(seconds / 3)
send_frame(frames, handle, 940)
end
end
@doc """
This is one implementation of a proper way to stream continuous playback. It's necessary
to read the available buffer frames to know how many more frames to send. You cannot
rely on sending a constant everytime without experiencing xruns (see documentation).
Introduce a sleep which will decrease the calls to write but increase the number of
frames sent. There is a natural maximum for this depending on your configuration
in order to avoid underruns. (see documentation for calculating this).
"""
defp send_frame(frame, handle, n) do
unless n == 0 do
:timer.sleep(0)
case ExAlsa.write(handle, Enum.take(frame, n)) do
{:error, requested} ->
send_frame(frame, handle, requested)
{:ok, sent, requested} ->
frame = Enum.drop(frame, sent)
send_frame(frame, handle, min(Enum.count(frame), requested))
end
end
end
defp transform_freq(list) do
Enum.reduce(list, 1, &(&1 * &2))
end
defp mix(list1, list2) do
Enum.sum(list1) / Enum.count(list1)
end
defp get_frames(seconds) do
pitch = 240.0
freq = [
[sin_freq(100, seconds), sin_freq(110, seconds), sin_freq(55, seconds)],
[sin_freq(220, seconds), sin_freq(110, seconds), sin_freq(55, seconds)],
[sin_freq(420, seconds), sin_freq(110, seconds), sin_freq(55, seconds)],
[sin_freq(120, seconds), sin_freq(220, seconds), sin_freq(55, seconds)],
[sin_freq(100, seconds), sin_freq(110, seconds), sin_freq(55, seconds)],
[sin_freq(220, seconds), sin_freq(110, seconds), sin_freq(55, seconds)],
[sin_freq(420, seconds), sin_freq(220, seconds), sin_freq(55, seconds)],
[sin_freq(120, seconds), sin_freq(110, seconds), sin_freq(55, seconds)]
]
freq
|> Enum.flat_map(fn freqs -> Enum.zip_with(freqs, &transform_freq/1) end)
end
defp pause(time) do
Enum.map(0..floor(44100 * time), fn _ -> 0 end)
end
defp sin_freq(pitch, time) do
radians_per_second = pitch * 2.0 * :math.pi()
seconds_per_frame = 1.0 / 44100.0
Enum.map(0..floor(44100 * time), fn i ->
:math.sin(radians_per_second * i * seconds_per_frame)
end)
end
end
| 30.677686 | 138 | 0.636315 |
03457eec7c1b2a62f1a924bea104eefb137a9cc2 | 1,091 | exs | Elixir | apps/counter/mix.exs | twDuke/myopic | 74b2322eb638fc3a21d0d7f5578bf16f142a711b | [
"MIT"
] | null | null | null | apps/counter/mix.exs | twDuke/myopic | 74b2322eb638fc3a21d0d7f5578bf16f142a711b | [
"MIT"
] | null | null | null | apps/counter/mix.exs | twDuke/myopic | 74b2322eb638fc3a21d0d7f5578bf16f142a711b | [
"MIT"
] | null | null | null | defmodule Counter.Mixfile do
use Mix.Project
def project do
[app: :counter,
version: "0.1.0",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.4",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
# Specify extra applications you'll use from Erlang/Elixir
[extra_applications: [:logger],
mod: {Counter.Application, []}]
end
# Dependencies can be Hex packages:
#
# {:my_dep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:my_dep, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
#
# To depend on another app inside the umbrella:
#
# {:my_app, in_umbrella: true}
#
# Type "mix help deps" for more examples and options
defp deps do
[
{:commands, in_umbrella: true},
{:events, in_umbrella: true},
]
end
end
| 23.717391 | 79 | 0.601283 |
0345841a4f7ded561e35b96a6077dcd38cdc4ac4 | 529 | exs | Elixir | apps/exred_ui/priv/repo/migrations/20180105230904_create_nodes.exs | exredorg/exred | 0ece8e6680747ba8f30b4413ede598a45495aa7c | [
"MIT"
] | null | null | null | apps/exred_ui/priv/repo/migrations/20180105230904_create_nodes.exs | exredorg/exred | 0ece8e6680747ba8f30b4413ede598a45495aa7c | [
"MIT"
] | null | null | null | apps/exred_ui/priv/repo/migrations/20180105230904_create_nodes.exs | exredorg/exred | 0ece8e6680747ba8f30b4413ede598a45495aa7c | [
"MIT"
] | null | null | null | defmodule ExredUI.Repo.Migrations.CreateNodes do
use Ecto.Migration
def change do
create table(:nodes, primary_key: false) do
add :id, :uuid, primary_key: true
add :type, :string
add :name, :string
add :category, :string
add :module, :string
add :config, :map
add :info, :string
add :is_prototype, :boolean, default: false
add :flow_id, references(:flows, on_delete: :nothing, type: :uuid)
timestamps()
end
create index(:nodes, [:flow_id])
end
end
| 24.045455 | 72 | 0.635161 |
0345c879fbc031bf41237eaa1b612a12ebf5cc00 | 918 | ex | Elixir | test/support/test_consumer.ex | zmstone/kastlex | 3478bc230f08bde99e768067787b0ef3f8e1c026 | [
"Apache-2.0"
] | 38 | 2016-10-21T08:26:15.000Z | 2021-03-19T23:10:40.000Z | test/support/test_consumer.ex | zmstone/kastlex | 3478bc230f08bde99e768067787b0ef3f8e1c026 | [
"Apache-2.0"
] | 32 | 2017-02-19T08:57:35.000Z | 2020-02-05T13:15:11.000Z | test/support/test_consumer.ex | zmstone/kastlex | 3478bc230f08bde99e768067787b0ef3f8e1c026 | [
"Apache-2.0"
] | 6 | 2017-11-24T16:02:20.000Z | 2022-02-22T07:44:36.000Z | defmodule Kastlex.TestConsumer do
require Logger
import Record, only: [defrecord: 2, extract: 2]
defrecord :kafka_message, extract(:kafka_message, from_lib: "brod/include/brod.hrl")
def init(_group_id, state) do
send state.parent, :init
{:ok, state}
end
def handle_message(_topic, _partition, message, state) do
kafka_message(key: key, value: value, offset: offset) = message
Logger.debug("Got message #{key}:#{value}@#{offset}")
send state.parent, {key, value, offset}
{:ok, :ack, state}
end
def start(client_id, topic, group_id) do
group_config = [offset_commit_policy: :commit_to_kafka_v2,
offset_commit_interval_seconds: 1
]
consumer_config = [begin_offset: :latest]
:brod.start_link_group_subscriber(client_id, group_id, [topic],
group_config, consumer_config, __MODULE__, %{:parent => Kernel.self()})
end
end
| 32.785714 | 86 | 0.683007 |
0345ebc737d9dc815e4940197b31a33659cdfd0a | 3,072 | ex | Elixir | lib/mix/lib/mix/cli.ex | sunaku/elixir | 8aa43eaedd76be8ac0d495049eb9ecd56971f4fe | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/cli.ex | sunaku/elixir | 8aa43eaedd76be8ac0d495049eb9ecd56971f4fe | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/cli.ex | sunaku/elixir | 8aa43eaedd76be8ac0d495049eb9ecd56971f4fe | [
"Apache-2.0"
] | null | null | null | defmodule Mix.CLI do
@moduledoc false
@doc """
Runs Mix according to the command line arguments.
"""
def main(args \\ System.argv) do
Mix.Local.append_archives
Mix.Local.append_paths
case check_for_shortcuts(args) do
:help ->
proceed(["help"])
:version ->
display_version()
nil ->
proceed(args)
end
end
defp proceed(args) do
_ = Mix.Tasks.Local.Hex.ensure_updated?()
load_dot_config()
args = load_mixfile(args)
{task, args} = get_task(args)
change_env(task)
run_task(task, args)
end
defp load_mixfile(args) do
file = System.get_env("MIX_EXS") || "mix.exs"
_ = if File.regular?(file) do
Code.load_file(file)
end
args
end
defp get_task(["-" <> _|_]) do
Mix.shell.error "** (Mix) Cannot implicitly pass flags to default mix task, " <>
"please invoke instead: mix #{Mix.Project.config[:default_task]}"
exit({:shutdown, 1})
end
defp get_task([h|t]) do
{h, t}
end
defp get_task([]) do
{Mix.Project.config[:default_task], []}
end
defp run_task(name, args) do
try do
Mix.Task.run "loadconfig"
# If the task is not available, let's try to
# compile the repository and then run it again.
cond do
Mix.Task.get(name) ->
Mix.Task.run(name, args)
Mix.Project.get ->
Mix.Task.run("compile")
Mix.Task.run(name, args)
true ->
# Raise no task error
Mix.Task.get!(name)
end
rescue
# We only rescue exceptions in the mix namespace, all
# others pass through and will explode on the users face
exception ->
stacktrace = System.stacktrace
if Map.get(exception, :mix) do
mod = exception.__struct__ |> Module.split() |> Enum.at(0, "Mix")
Mix.shell.error "** (#{mod}) #{Exception.message(exception)}"
exit({:shutdown, 1})
else
reraise exception, stacktrace
end
end
end
defp change_env(task) do
if nil?(System.get_env("MIX_ENV")) &&
(env = preferred_cli_env(task)) do
Mix.env(env)
if project = Mix.Project.pop do
%{name: name, file: file} = project
Mix.Project.push name, file
end
end
end
defp preferred_cli_env(task) do
task = String.to_atom(task)
Mix.Project.config[:preferred_cli_env][task] || default_cli_env(task)
end
defp default_cli_env(:test), do: :test
defp default_cli_env(_), do: nil
defp load_dot_config do
path = Path.join(Mix.Utils.mix_home, "config.exs")
if File.regular?(path) do
Mix.Task.run "loadconfig", [path]
end
end
defp display_version() do
IO.puts "Elixir #{System.version}"
end
# Check for --help or --version in the args
defp check_for_shortcuts([first_arg|_]) when first_arg in
["--help", "-h", "-help"], do: :help
defp check_for_shortcuts([first_arg|_]) when first_arg in
["--version", "-v"], do: :version
defp check_for_shortcuts(_), do: nil
end
| 24.97561 | 85 | 0.604492 |
034619779132948e2e14a52269eea9e80628a05e | 5,669 | ex | Elixir | lib/membrane/rtp/vad.ex | simoexpo/membrane_rtp_plugin | 925053eb6ad0befbfe79ab1dad51e40f3b68ae69 | [
"Apache-2.0"
] | null | null | null | lib/membrane/rtp/vad.ex | simoexpo/membrane_rtp_plugin | 925053eb6ad0befbfe79ab1dad51e40f3b68ae69 | [
"Apache-2.0"
] | null | null | null | lib/membrane/rtp/vad.ex | simoexpo/membrane_rtp_plugin | 925053eb6ad0befbfe79ab1dad51e40f3b68ae69 | [
"Apache-2.0"
] | null | null | null | defmodule Membrane.RTP.VAD do
@moduledoc """
Simple vad based on audio level sent in RTP header.
To make this module work appropriate RTP header extension has to be set in SDP offer/answer.
If avg of audio level in packets in `time_window` exceeds `vad_threshold` it emits
notification `t:speech_notification_t/0`.
When avg falls below `vad_threshold` and doesn't exceed it in the next `vad_silence_timer`
it emits notification `t:silence_notification_t/0`.
"""
use Membrane.Filter
def_input_pad :input,
availability: :always,
caps: :any,
demand_unit: :buffers
def_output_pad :output,
availability: :always,
caps: :any
def_options time_window: [
spec: pos_integer(),
default: 2_000_000_000,
description: "Time window (in `ns`) in which avg audio level is measured."
],
min_packet_num: [
spec: pos_integer(),
default: 50,
description: """
Minimal number of packets to count avg audio level from.
Speech won't be detected until there are enough packets.
"""
],
vad_threshold: [
spec: -127..0,
default: -50,
description: """
Audio level in dBov representing vad threshold.
Values above are considered to represent voice activity.
Value -127 represents digital silence.
"""
],
vad_silence_time: [
spec: pos_integer(),
default: 300,
description: """
Time to wait before emitting notification `t:silence_notification_t/0` after audio track is
no longer considered to represent speech.
If at this time audio track is considered to represent speech again the notification will not be sent.
"""
]
@typedoc """
Notification sent after detecting speech activity.
"""
@type speech_notification_t() :: {:vad, :speech}
@typedoc """
Notification sent after detecting silence activity.
"""
@type silence_notification_t() :: {:vad, :silence}
@impl true
def handle_init(opts) do
state = %{
audio_levels: Qex.new(),
vad: :silence,
vad_silence_timestamp: 0,
current_timestamp: 0,
time_window: opts.time_window,
min_packet_num: opts.min_packet_num,
vad_threshold: opts.vad_threshold,
vad_silence_time: opts.vad_silence_time,
audio_levels_sum: 0,
audio_levels_count: 0
}
{:ok, state}
end
@impl true
def handle_demand(:output, size, :buffers, _ctx, state) do
{{:ok, demand: {:input, size}}, state}
end
@impl true
def handle_process(:input, %Membrane.Buffer{} = buffer, _ctx, state) do
<<_id::4, _len::4, _v::1, level::7, _rest::binary-size(2)>> =
buffer.metadata.rtp.extension.data
state = %{state | current_timestamp: buffer.metadata.timestamp}
state = filter_old_audio_levels(state)
state = add_new_audio_level(state, level)
audio_levels_vad = get_audio_levels_vad(state)
actions = [buffer: {:output, buffer}] ++ maybe_notify(audio_levels_vad, state)
state = update_vad_state(audio_levels_vad, state)
{{:ok, actions}, state}
end
defp filter_old_audio_levels(state) do
Enum.reduce_while(state.audio_levels, state, fn {level, timestamp}, state ->
if state.current_timestamp - timestamp > state.time_window do
{_level, audio_levels} = Qex.pop(state.audio_levels)
state = %{
state
| audio_levels_sum: state.audio_levels_sum - level,
audio_levels_count: state.audio_levels_count - 1,
audio_levels: audio_levels
}
{:cont, state}
else
{:halt, state}
end
end)
end
defp add_new_audio_level(state, level) do
audio_levels = Qex.push(state.audio_levels, {-level, state.current_timestamp})
state = %{state | audio_levels: audio_levels}
state = %{state | audio_levels_sum: state.audio_levels_sum + -level}
%{state | audio_levels_count: state.audio_levels_count + 1}
end
defp get_audio_levels_vad(state) do
if state.audio_levels_count >= state.min_packet_num and avg(state) >= state.vad_threshold,
do: :speech,
else: :silence
end
defp avg(state), do: state.audio_levels_sum / state.audio_levels_count
defp maybe_notify(audio_levels_vad, state) do
if vad_silence?(audio_levels_vad, state) or vad_speech?(audio_levels_vad, state) do
[notify: {:vad, audio_levels_vad}]
else
[]
end
end
defp update_vad_state(audio_levels_vad, state) do
cond do
vad_maybe_silence?(audio_levels_vad, state) ->
Map.merge(state, %{vad: :maybe_silence, vad_silence_timestamp: state.current_timestamp})
vad_silence?(audio_levels_vad, state) or vad_speech?(audio_levels_vad, state) ->
Map.merge(state, %{vad: audio_levels_vad})
true ->
state
end
end
defp vad_silence?(audio_levels_vad, state),
do: state.vad == :maybe_silence and audio_levels_vad == :silence and timer_expired?(state)
defp vad_speech?(audio_levels_vad, state) do
(state.vad == :maybe_silence and audio_levels_vad == :speech) or
(state.vad == :silence and audio_levels_vad == :speech)
end
defp vad_maybe_silence?(audio_levels_vad, state),
do: state.vad == :speech and audio_levels_vad == :silence
defp timer_expired?(state),
do: state.current_timestamp - state.vad_silence_timestamp > state.vad_silence_time
end
| 32.959302 | 118 | 0.644382 |
034622ac19f58cc605dd155ce407565cc8761ced | 1,438 | ex | Elixir | exrack_ui/lib/exrack_ui_web/router.ex | jirimakarius/exrack | e499ea62f61000463360adb6b2b7a9ce9695467f | [
"MIT"
] | null | null | null | exrack_ui/lib/exrack_ui_web/router.ex | jirimakarius/exrack | e499ea62f61000463360adb6b2b7a9ce9695467f | [
"MIT"
] | null | null | null | exrack_ui/lib/exrack_ui_web/router.ex | jirimakarius/exrack | e499ea62f61000463360adb6b2b7a9ce9695467f | [
"MIT"
] | null | null | null | defmodule ExRackUIWeb.Router do
use ExRackUIWeb, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_live_flash
plug :put_root_layout, {ExRackUIWeb.LayoutView, :root}
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", ExRackUIWeb do
pipe_through :browser
live "/", TestView
end
# Other scopes may use custom stacks.
# scope "/api", ExRackUIWeb do
# pipe_through :api
# end
# Enables LiveDashboard only for development
#
# If you want to use the LiveDashboard in production, you should put
# it behind authentication and allow only admins to access it.
# If your application does not have an admins-only section yet,
# you can use Plug.BasicAuth to set up some basic authentication
# as long as you are also using SSL (which you should anyway).
if Mix.env() in [:dev, :test] do
import Phoenix.LiveDashboard.Router
scope "/" do
pipe_through :browser
live_dashboard "/dashboard", metrics: ExRackUIWeb.Telemetry
end
end
# Enables the Swoosh mailbox preview in development.
#
# Note that preview only shows emails that were sent by the same
# node running the Phoenix server.
if Mix.env() == :dev do
scope "/dev" do
pipe_through :browser
forward "/mailbox", Plug.Swoosh.MailboxPreview
end
end
end
| 25.22807 | 70 | 0.696801 |
0346649d8bc5d3b33a5b6c8657500d3c851167d7 | 256 | exs | Elixir | priv/repo/migrations/20180111204242_edit_daily_quotes_unique_index.exs | allen-garvey/block-quote-phoenix | 5c0f5d16daf6bb515a8f1846c3e4311b368a7bdb | [
"MIT"
] | 4 | 2019-10-04T16:11:15.000Z | 2021-08-18T21:00:13.000Z | apps/blockquote/priv/repo/migrations/20180111204242_edit_daily_quotes_unique_index.exs | allen-garvey/phoenix-umbrella | 1d444bbd62a5e7b5f51d317ce2be71ee994125d5 | [
"MIT"
] | 5 | 2020-03-16T23:52:25.000Z | 2021-09-03T16:52:17.000Z | apps/blockquote/priv/repo/migrations/20180111204242_edit_daily_quotes_unique_index.exs | allen-garvey/phoenix-umbrella | 1d444bbd62a5e7b5f51d317ce2be71ee994125d5 | [
"MIT"
] | null | null | null | defmodule Blockquote.Repo.Migrations.EditDailyQuotesUniqueIndex do
use Ecto.Migration
def change do
drop index(:daily_quotes, [:quote_id, :date_used], name: :daily_quote_unique_index)
create unique_index(:daily_quotes, [:date_used])
end
end
| 28.444444 | 87 | 0.777344 |
03467b50bc6b029820392673e03aa81d24611027 | 71 | ex | Elixir | phx_csvchart/lib/phx_csvchart_web/views/layout_view.ex | NervesJP/fukuokaex11 | 8bd3e5bf5f4c0a2f08a2a3e16a94a56468eb1aa3 | [
"Apache-2.0"
] | 13 | 2018-06-22T10:41:17.000Z | 2018-09-01T02:34:33.000Z | phx_csvchart/lib/phx_csvchart_web/views/layout_view.ex | takasehideki/fukuokaex11 | 8bd3e5bf5f4c0a2f08a2a3e16a94a56468eb1aa3 | [
"Apache-2.0"
] | null | null | null | phx_csvchart/lib/phx_csvchart_web/views/layout_view.ex | takasehideki/fukuokaex11 | 8bd3e5bf5f4c0a2f08a2a3e16a94a56468eb1aa3 | [
"Apache-2.0"
] | 1 | 2022-01-20T13:12:18.000Z | 2022-01-20T13:12:18.000Z | defmodule PhxCsvchartWeb.LayoutView do
use PhxCsvchartWeb, :view
end
| 17.75 | 38 | 0.830986 |
0346921b33f98eb4fe8ed0ec16728f2442c46243 | 2,711 | ex | Elixir | lib/google_api/you_tube/v3/model/playlist.ex | jesteracer/ytb | 67e3cab899e4f69e586383f7be2c3855c6beea49 | [
"Apache-2.0"
] | null | null | null | lib/google_api/you_tube/v3/model/playlist.ex | jesteracer/ytb | 67e3cab899e4f69e586383f7be2c3855c6beea49 | [
"Apache-2.0"
] | null | null | null | lib/google_api/you_tube/v3/model/playlist.ex | jesteracer/ytb | 67e3cab899e4f69e586383f7be2c3855c6beea49 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.YouTube.V3.Model.Playlist do
@moduledoc """
A playlist resource represents a YouTube playlist. A playlist is a collection of videos that can be viewed sequentially and shared with other users. A playlist can contain up to 200 videos, and YouTube does not limit the number of playlists that each user creates. By default, playlists are publicly visible to other users, but playlists can be public or private. YouTube also uses playlists to identify special collections of videos for a channel, such as: - uploaded videos - favorite videos - positively rated (liked) videos - watch history - watch later To be more specific, these lists are associated with a channel, which is a collection of a person, group, or company's videos, playlists, and other YouTube information. You can retrieve the playlist IDs for each of these lists from the channel resource for a given channel. You can then use the playlistItems.list method to retrieve any of those lists. You can also add or remove items from those lists by calling the playlistItems.insert and playlistItems.delete methods.
"""
@derive [Poison.Encoder]
defstruct [
:"contentDetails",
:"etag",
:"id",
:"kind",
:"localizations",
:"player",
:"snippet",
:"status"
]
end
defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.Playlist do
import GoogleApi.YouTube.V3.Deserializer
def decode(value, options) do
value
|> deserialize(:"contentDetails", :struct, GoogleApi.YouTube.V3.Model.PlaylistContentDetails, options)
|> deserialize(:"localizations", :map, GoogleApi.YouTube.V3.Model.PlaylistLocalization, options)
|> deserialize(:"player", :struct, GoogleApi.YouTube.V3.Model.PlaylistPlayer, options)
|> deserialize(:"snippet", :struct, GoogleApi.YouTube.V3.Model.PlaylistSnippet, options)
|> deserialize(:"status", :struct, GoogleApi.YouTube.V3.Model.PlaylistStatus, options)
end
end
| 54.22 | 1,052 | 0.752859 |
0346a2d55b665cc094b9fed44470429c2fa96fe0 | 865 | exs | Elixir | .travis/test.exs | jgsmith/ex_venture | 546adaa8fe80d45a72fde6de8d8d6906902c12d4 | [
"MIT"
] | 1 | 2019-02-10T10:22:39.000Z | 2019-02-10T10:22:39.000Z | .travis/test.exs | jgsmith/ex_venture | 546adaa8fe80d45a72fde6de8d8d6906902c12d4 | [
"MIT"
] | null | null | null | .travis/test.exs | jgsmith/ex_venture | 546adaa8fe80d45a72fde6de8d8d6906902c12d4 | [
"MIT"
] | null | null | null | use Mix.Config
config :logger, level: :error
config :ex_venture, Data.Repo,
database: "ex_venture_test",
hostname: "localhost",
username: "exventure",
password: "password",
port: 5433,
pool: Ecto.Adapters.SQL.Sandbox
config :ex_venture, :networking,
host: "localhost",
port: 5555,
server: false,
socket_module: Test.Networking.Socket
config :ex_venture, :game,
world: false,
npc: Test.Game.NPC,
zone: Test.Game.Zone,
room: Test.Game.Room,
environment: Test.Game.Environment,
shop: Test.Game.Shop,
zone: Test.Game.Zone,
rand: Test.ChanceSuccess,
report_players: false,
continue_wait: 10,
random_effect_range: 0..0
config :ex_venture, :npc, reaction_time_ms: 0
config :bcrypt_elixir, :log_rounds, 4
config :ex_venture, :mailer, from: "[email protected]"
config :ex_venture, ExVenture.Mailer, adapter: Bamboo.TestAdapter
| 22.763158 | 65 | 0.728324 |
0346c1956fc5b486096f979e0048775c66b585d2 | 1,543 | ex | Elixir | lib/akkad_web/views/error_helpers.ex | thelastinuit/akkad | 08df3f51daeada737c53d07663c166a5e6cc297e | [
"MIT"
] | 1 | 2022-03-05T00:05:26.000Z | 2022-03-05T00:05:26.000Z | lib/akkad_web/views/error_helpers.ex | thelastinuit/akkad | 08df3f51daeada737c53d07663c166a5e6cc297e | [
"MIT"
] | null | null | null | lib/akkad_web/views/error_helpers.ex | thelastinuit/akkad | 08df3f51daeada737c53d07663c166a5e6cc297e | [
"MIT"
] | null | null | null | defmodule AkkadWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn error ->
content_tag(:span, translate_error(error),
class: "invalid-feedback",
phx_feedback_for: input_name(form, field)
)
end)
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# When using gettext, we typically pass the strings we want
# to translate as a static argument:
#
# # Translate "is invalid" in the "errors" domain
# dgettext("errors", "is invalid")
#
# # Translate the number of files with plural rules
# dngettext("errors", "1 file", "%{count} files", count)
#
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
# This requires us to call the Gettext module passing our gettext
# backend as first argument.
#
# Note we use the "errors" domain, which means translations
# should be written to the errors.po file. The :count option is
# set by Ecto and indicates we should also apply plural rules.
if count = opts[:count] do
Gettext.dngettext(AkkadWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(AkkadWeb.Gettext, "errors", msg, opts)
end
end
end
| 32.145833 | 74 | 0.66429 |
0346d2734578c3b448dd4eaea449ce6a0a09f3d7 | 1,514 | ex | Elixir | lib/composers/ota_read/request.ex | ChannexIO/ex_open_travel | 51a1101f55bc2d12a093237bb9ef64ef8a4d3091 | [
"Apache-2.0"
] | null | null | null | lib/composers/ota_read/request.ex | ChannexIO/ex_open_travel | 51a1101f55bc2d12a093237bb9ef64ef8a4d3091 | [
"Apache-2.0"
] | null | null | null | lib/composers/ota_read/request.ex | ChannexIO/ex_open_travel | 51a1101f55bc2d12a093237bb9ef64ef8a4d3091 | [
"Apache-2.0"
] | null | null | null | defmodule ExOpenTravel.Composers.OtaRead.Request do
alias ExOpenTravel.{Meta, Request}
alias ExOpenTravel.Request.PCIProxies.{ChannexPCI, PCIBooking}
alias ExOpenTravel.Request.Document
@action "OTA_Read"
@type credentials :: %{user: String.t(), password: String.t(), endpoint: String.t()}
@type options :: keyword() | any()
@doc """
This method is used to update availability.
"""
@spec execute(%{hotel_code: String.t()}, credentials, Meta.t(), options) ::
{:ok, struct(), Meta.t()} | {:error, any(), Meta.t()}
def execute(%{hotel_code: _} = params, %{pci_proxy: :pci_booking} = credentials, meta, opts) do
params
|> build_read(meta)
|> Document.build(@action, credentials)
|> PCIBooking.proxy_send(credentials, opts)
end
def execute(%{hotel_code: _} = params, %{pci_proxy: :channex_pci} = credentials, meta, opts) do
params
|> build_read(meta)
|> Document.build(@action, credentials)
|> ChannexPCI.proxy_send(credentials, opts)
end
def execute(%{hotel_code: _} = params, credentials, meta, opts) do
params
|> build_read(Map.put(meta, :method, @action))
|> Document.build(@action, credentials)
|> Request.send(credentials, opts)
end
@spec build_read(%{hotel_code: String.t()}, Meta.t()) ::
{{atom(), map | nil, list | nil}, Meta.t()}
def build_read(%{hotel_code: hotel_code}, meta) do
{{:"ns1:ReadRequests", nil, [{:"ns1:HotelReadRequest", %{HotelCode: "#{hotel_code}"}, nil}]},
meta}
end
end
| 35.209302 | 97 | 0.653897 |
0346dbf79380f041672fe15570c8da5ec93f9d16 | 995 | ex | Elixir | elixir/prime-factors/lib/prime_factors.ex | paulfioravanti/exercism | b3bf15814b89b0ef50b9bf062b3c800e80e37e75 | [
"MIT"
] | 6 | 2019-06-19T15:43:20.000Z | 2020-07-17T19:46:09.000Z | elixir/prime-factors/lib/prime_factors.ex | paulfioravanti/exercism | b3bf15814b89b0ef50b9bf062b3c800e80e37e75 | [
"MIT"
] | 10 | 2021-05-10T21:02:55.000Z | 2021-05-11T20:29:41.000Z | elixir/prime-factors/lib/prime_factors.ex | paulfioravanti/exercism | b3bf15814b89b0ef50b9bf062b3c800e80e37e75 | [
"MIT"
] | 1 | 2019-06-25T10:42:14.000Z | 2019-06-25T10:42:14.000Z | defmodule PrimeFactors do
@minimum_prime 2
@final_factor 1
defguardp no_remainder?(number, quotient) when rem(number, quotient) == 0
defguardp final_factor?(number, quotient)
when @final_factor in [number, quotient]
@doc """
Compute the prime factors for 'number'.
The prime factors are prime numbers that when multiplied give the desired
number.
The prime factors of 'number' will be ordered lowest to highest.
"""
@spec factors_for(pos_integer) :: [pos_integer]
def factors_for(1), do: []
def factors_for(number), do: factors_for(number, @minimum_prime, [])
defp factors_for(number, quotient, acc)
when final_factor?(number, quotient) do
Enum.reverse(acc)
end
defp factors_for(number, quotient, acc)
when no_remainder?(number, quotient) do
number
|> div(quotient)
|> factors_for(quotient, [quotient | acc])
end
defp factors_for(number, quotient, acc) do
factors_for(number, quotient + 1, acc)
end
end
| 26.184211 | 75 | 0.700503 |
0346ffb37be7cfa489fdad2c1f4e31cb89bbc7b5 | 969 | ex | Elixir | lib/d01/challenge.ex | Meldanor/AdventOfCode2021 | c0e625865a0b717f73b712c846a2f8f0729ad82b | [
"MIT"
] | null | null | null | lib/d01/challenge.ex | Meldanor/AdventOfCode2021 | c0e625865a0b717f73b712c846a2f8f0729ad82b | [
"MIT"
] | null | null | null | lib/d01/challenge.ex | Meldanor/AdventOfCode2021 | c0e625865a0b717f73b712c846a2f8f0729ad82b | [
"MIT"
] | null | null | null | defmodule D01.Challenge do
@moduledoc false
require Logger
def run(1) do
result =
Utils.read_input(1, &String.to_integer/1)
|> count_increases()
Logger.info("#{result.increased}x increased")
end
def run(2) do
result =
Utils.read_input(1, &String.to_integer/1)
|> sum_windows()
|> count_increases()
Logger.info("3-Sums #{result.increased}x increased")
end
defp sum_windows(values) do
values
|> Enum.chunk_every(3, 1, :discard)
|> Enum.map(&Enum.sum/1)
|> Enum.reject(fn i -> i == 0 end)
end
defp count_increases(values) do
values
|> Enum.reduce(
%{increased: 0, last: Integer.pow(2, 31) - 1},
fn e,
%{
increased: increased,
last: last
} ->
%{increased: increased + increased_scalar(e, last), last: e}
end
)
end
defp increased_scalar(a, b) when a > b, do: 1
defp increased_scalar(_a, _b), do: 0
end
| 20.617021 | 68 | 0.586171 |
034710cba91991776ee5b44bc131f44cf84b4af0 | 6,051 | ex | Elixir | lib/codes/codes_x74.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_x74.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_x74.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | defmodule IcdCode.ICDCode.Codes_X74 do
alias IcdCode.ICDCode
def _X7401XA do
%ICDCode{full_code: "X7401XA",
category_code: "X74",
short_code: "01XA",
full_name: "Intentional self-harm by airgun, initial encounter",
short_name: "Intentional self-harm by airgun, initial encounter",
category_name: "Intentional self-harm by airgun, initial encounter"
}
end
def _X7401XD do
%ICDCode{full_code: "X7401XD",
category_code: "X74",
short_code: "01XD",
full_name: "Intentional self-harm by airgun, subsequent encounter",
short_name: "Intentional self-harm by airgun, subsequent encounter",
category_name: "Intentional self-harm by airgun, subsequent encounter"
}
end
def _X7401XS do
%ICDCode{full_code: "X7401XS",
category_code: "X74",
short_code: "01XS",
full_name: "Intentional self-harm by airgun, sequela",
short_name: "Intentional self-harm by airgun, sequela",
category_name: "Intentional self-harm by airgun, sequela"
}
end
def _X7402XA do
%ICDCode{full_code: "X7402XA",
category_code: "X74",
short_code: "02XA",
full_name: "Intentional self-harm by paintball gun, initial encounter",
short_name: "Intentional self-harm by paintball gun, initial encounter",
category_name: "Intentional self-harm by paintball gun, initial encounter"
}
end
def _X7402XD do
%ICDCode{full_code: "X7402XD",
category_code: "X74",
short_code: "02XD",
full_name: "Intentional self-harm by paintball gun, subsequent encounter",
short_name: "Intentional self-harm by paintball gun, subsequent encounter",
category_name: "Intentional self-harm by paintball gun, subsequent encounter"
}
end
def _X7402XS do
%ICDCode{full_code: "X7402XS",
category_code: "X74",
short_code: "02XS",
full_name: "Intentional self-harm by paintball gun, sequela",
short_name: "Intentional self-harm by paintball gun, sequela",
category_name: "Intentional self-harm by paintball gun, sequela"
}
end
def _X7409XA do
%ICDCode{full_code: "X7409XA",
category_code: "X74",
short_code: "09XA",
full_name: "Intentional self-harm by other gas, air or spring-operated gun, initial encounter",
short_name: "Intentional self-harm by other gas, air or spring-operated gun, initial encounter",
category_name: "Intentional self-harm by other gas, air or spring-operated gun, initial encounter"
}
end
def _X7409XD do
%ICDCode{full_code: "X7409XD",
category_code: "X74",
short_code: "09XD",
full_name: "Intentional self-harm by other gas, air or spring-operated gun, subsequent encounter",
short_name: "Intentional self-harm by other gas, air or spring-operated gun, subsequent encounter",
category_name: "Intentional self-harm by other gas, air or spring-operated gun, subsequent encounter"
}
end
def _X7409XS do
%ICDCode{full_code: "X7409XS",
category_code: "X74",
short_code: "09XS",
full_name: "Intentional self-harm by other gas, air or spring-operated gun, sequela",
short_name: "Intentional self-harm by other gas, air or spring-operated gun, sequela",
category_name: "Intentional self-harm by other gas, air or spring-operated gun, sequela"
}
end
def _X748XXA do
%ICDCode{full_code: "X748XXA",
category_code: "X74",
short_code: "8XXA",
full_name: "Intentional self-harm by other firearm discharge, initial encounter",
short_name: "Intentional self-harm by other firearm discharge, initial encounter",
category_name: "Intentional self-harm by other firearm discharge, initial encounter"
}
end
def _X748XXD do
%ICDCode{full_code: "X748XXD",
category_code: "X74",
short_code: "8XXD",
full_name: "Intentional self-harm by other firearm discharge, subsequent encounter",
short_name: "Intentional self-harm by other firearm discharge, subsequent encounter",
category_name: "Intentional self-harm by other firearm discharge, subsequent encounter"
}
end
def _X748XXS do
%ICDCode{full_code: "X748XXS",
category_code: "X74",
short_code: "8XXS",
full_name: "Intentional self-harm by other firearm discharge, sequela",
short_name: "Intentional self-harm by other firearm discharge, sequela",
category_name: "Intentional self-harm by other firearm discharge, sequela"
}
end
def _X749XXA do
%ICDCode{full_code: "X749XXA",
category_code: "X74",
short_code: "9XXA",
full_name: "Intentional self-harm by unspecified firearm discharge, initial encounter",
short_name: "Intentional self-harm by unspecified firearm discharge, initial encounter",
category_name: "Intentional self-harm by unspecified firearm discharge, initial encounter"
}
end
def _X749XXD do
%ICDCode{full_code: "X749XXD",
category_code: "X74",
short_code: "9XXD",
full_name: "Intentional self-harm by unspecified firearm discharge, subsequent encounter",
short_name: "Intentional self-harm by unspecified firearm discharge, subsequent encounter",
category_name: "Intentional self-harm by unspecified firearm discharge, subsequent encounter"
}
end
def _X749XXS do
%ICDCode{full_code: "X749XXS",
category_code: "X74",
short_code: "9XXS",
full_name: "Intentional self-harm by unspecified firearm discharge, sequela",
short_name: "Intentional self-harm by unspecified firearm discharge, sequela",
category_name: "Intentional self-harm by unspecified firearm discharge, sequela"
}
end
end
| 42.612676 | 111 | 0.6627 |
03471de89b779d1df307c8761aa8f41a03187e20 | 1,801 | ex | Elixir | clients/app_engine/lib/google_api/app_engine/v1/model/resource_record.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/app_engine/lib/google_api/app_engine/v1/model/resource_record.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/app_engine/lib/google_api/app_engine/v1/model/resource_record.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AppEngine.V1.Model.ResourceRecord do
@moduledoc """
A DNS resource record.
## Attributes
* `name` (*type:* `String.t`, *default:* `nil`) - Relative name of the object affected by this record. Only applicable for CNAME records. Example: 'www'.
* `rrdata` (*type:* `String.t`, *default:* `nil`) - Data for this record. Values vary by record type, as defined in RFC 1035 (section 5) and RFC 1034 (section 3.6.1).
* `type` (*type:* `String.t`, *default:* `nil`) - Resource record type. Example: AAAA.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:name => String.t(),
:rrdata => String.t(),
:type => String.t()
}
field(:name)
field(:rrdata)
field(:type)
end
defimpl Poison.Decoder, for: GoogleApi.AppEngine.V1.Model.ResourceRecord do
def decode(value, options) do
GoogleApi.AppEngine.V1.Model.ResourceRecord.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AppEngine.V1.Model.ResourceRecord do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.981132 | 170 | 0.704609 |
03472eba31e89a319c86f3d2c4ec0af468ab2287 | 8,458 | ex | Elixir | lib/suomidev/submissions.ex | yliaho/suomi.dev | 4e1946adbeb67cebdc1b7e2009ddecf66497a9c7 | [
"MIT"
] | 19 | 2020-08-24T02:50:26.000Z | 2021-05-31T20:54:30.000Z | lib/suomidev/submissions.ex | yliaho/suomi.dev | 4e1946adbeb67cebdc1b7e2009ddecf66497a9c7 | [
"MIT"
] | 2 | 2020-08-24T06:59:46.000Z | 2020-08-26T09:51:40.000Z | lib/suomidev/submissions.ex | yliaho/suomi.dev | 4e1946adbeb67cebdc1b7e2009ddecf66497a9c7 | [
"MIT"
] | 6 | 2020-08-26T02:52:48.000Z | 2022-03-08T12:55:51.000Z | defmodule Suomidev.Submissions do
@moduledoc """
The Submissions context.
"""
import Ecto.Query, warn: false
alias Suomidev.Repo
alias Suomidev.Accounts.User
alias Suomidev.Submissions.Submission
@pagination_limit 2
defdelegate authorize(action, user, params), to: Suomidev.Submissions.Policy
def list_submissions do
Repo.all(Submission)
end
def list_user_submissions(:posts, %User{id: user_id}) do
Repo.all(
from submission in Submission,
where: submission.user_id == ^user_id and submission.type == "post",
limit: 5,
order_by: submission.inserted_at
)
|> Repo.preload(:user)
end
def list_user_submissions(:comments, %User{id: user_id}) do
Repo.all(
from submission in Submission,
where: submission.user_id == ^user_id and submission.type == "comment",
limit: 5,
order_by: submission.inserted_at
)
|> Repo.preload(:user)
end
def available_submissions do
from submission in Submission,
where: is_nil(submission.flag)
end
def available_posts() do
from submission in subquery(available_submissions()),
where: submission.type == "post"
end
def available_comments() do
from submission in subquery(available_submissions()),
where: submission.type == "comment"
end
def submission_type_as_current_user(user_id, submission_type) do
query = fn ->
case submission_type do
"post" ->
available_posts()
"comment" ->
available_comments()
"all" ->
available_submissions()
end
end
from submission in subquery(query.()),
left_join: like in assoc(submission, :likes),
on: like.user_id == ^user_id,
select:
merge(submission, %{
current_user_liked: fragment("CASE WHEN ? IS NOT NULL THEN true ELSE false END", like)
})
end
def list_posts do
Repo.all(from(submission in subquery(available_posts())))
end
def paginate_posts(current_user, opts) do
limit = (opts[:limit] || @pagination_limit) + 1
offset = limit * (opts[:page] - 1)
current_user_id = if current_user, do: current_user.id, else: -1
results =
Repo.all(
from submission in subquery(submission_type_as_current_user(current_user_id, "post")),
order_by:
fragment(
"""
(COALESCE(?, 0) + 1) / POW(((EXTRACT(EPOCH FROM NOW()) - EXTRACT(EPOCH FROM ?)) / 3600) + 2, 1.8) DESC
""",
submission.cache_like_count,
submission.inserted_at
),
preload: [:user],
limit: ^limit,
offset: ^offset
)
%{
results: results,
has_more: length(results) > opts[:limit],
next: opts[:page] + 1,
prev: opts[:page] - 1
}
end
def get_submission!(id), do: Repo.get!(Submission, id) |> Repo.preload(:user)
def get_submission(id), do: Repo.get(Submission, id)
def get_submission_as_current_user!(id, %User{} = user) do
Repo.one!(
from submission in subquery(submission_type_as_current_user(user.id, "all")),
where: submission.id == ^id,
preload: [:user]
)
end
def get_submission_as_current_user!(id, nil) do
get_submission!(id)
end
def paginate_comments_for_post(post_id, user, opts \\ [limit: 5, page: 1]) do
limit = (opts[:limit] || @pagination_limit) + 1
offset = limit * (opts[:page] - 1)
depth = String.split(post_id, ".") |> length()
max_depth = depth + 6
user_id = if user, do: user.id, else: -1
results =
Repo.all(
from comment in Submission,
left_join: like in assoc(comment, :likes),
on: like.user_id == ^user_id,
where:
fragment("? <@ text2ltree(?)", comment.path, ^post_id) and
fragment(
"nlevel(?) <= ?",
comment.path,
^max_depth
),
preload: [:user],
order_by:
fragment(
"""
nlevel(?),
(COALESCE(?, 0) + 1) / POW(((EXTRACT(EPOCH FROM NOW()) - EXTRACT(EPOCH FROM ?)) / 3600) + 2, 1.8) DESC
""",
comment.path,
comment.cache_like_count,
comment.inserted_at
),
limit: ^limit,
offset: ^offset,
select:
merge(comment, %{
current_user_liked:
fragment("CASE WHEN ? IS NOT NULL THEN true ELSE false END", like),
score:
fragment(
"""
(COALESCE(?, 0) + 1) / POW(((EXTRACT(EPOCH FROM NOW()) - EXTRACT(EPOCH FROM ?)) / 3600) + 2, 1.8)
""",
comment.cache_like_count,
comment.inserted_at
),
more_comment:
fragment(
"""
(SELECT
COUNT(*)
FROM
submissions child
WHERE
? = child.parent_id)
""",
comment.id
)
})
)
|> gen_comment_tree(String.split(post_id, ".") |> List.last() |> String.to_integer())
%{
results: results,
has_more: length(results) > opts[:limit],
next: opts[:page] + 1,
prev: opts[:page] - 1
}
end
@doc """
Creates post submission
"""
def create_submission(attrs) do
%Submission{}
|> Submission.changeset(
Map.put(
attrs,
"content_html",
Suomidev.Markdown.as_safe_html(attrs["content_md"])
)
)
|> Repo.insert()
end
@doc """
Creates comment submission
"""
def create_submission(parent_id, attrs) do
if parent = get_submission(parent_id) do
path = if parent.path, do: "#{parent.path}.#{parent.id}", else: "#{parent.id}"
case %Submission{}
|> Submission.changeset(
Map.merge(attrs, %{
"content_html" => Suomidev.Markdown.as_safe_html(attrs["content_md"] || ""),
"path" => path,
"parent_id" => parent_id
})
)
|> Repo.insert() do
{:ok, struct} ->
spawn(fn ->
inc_cache_count(struct.path.labels, :cache_comment_count, 1)
end)
{:ok, struct}
{:error, changeset} ->
{:error, changeset}
end
else
{:no_parent, "parent submissions is unavailable"}
end
end
def update_submission(%Submission{} = submission, attrs) do
submission
|> Submission.changeset(
Map.merge(attrs, %{
"content_html" => Suomidev.Markdown.as_safe_html(attrs["content_md"] || "")
})
)
|> Repo.update()
end
def update_post_submisison(%Submission{} = submission, attrs) do
submission
|> Submission.edit_post_changeset(
Map.merge(attrs, %{
"content_html" => Suomidev.Markdown.as_safe_html(attrs["content_md"] || "")
})
)
|> Repo.update()
end
def delete_submission(%Submission{} = submission) do
Repo.delete(submission)
end
def change_submission(%Submission{} = submission, attrs \\ %{}) do
Submission.changeset(submission, attrs)
end
def change_post_submission(%Submission{} = submission, attrs \\ %{}) do
Submission.edit_post_changeset(submission, attrs)
end
def gen_comment_tree(comments, parent_id \\ 0) do
comments
|> Enum.sort(&(&1.score >= &2.score))
|> Enum.filter(fn comment ->
String.to_integer(List.last(comment.path.labels)) == parent_id
end)
|> Enum.map(fn comment ->
comment
|> Map.merge(%{
children: gen_comment_tree(comments, comment.id)
})
end)
end
def inc_cache_count(id, :cache_like_count, value) do
Repo.update_all(
from(submission in Submission,
where: submission.id == ^id
),
inc: [cache_like_count: value]
)
end
def inc_cache_count(path_labels, :cache_comment_count, value) do
post_id = List.first(path_labels)
parent_id = if post_id == List.last(path_labels), do: "-1", else: List.last(path_labels)
Repo.update_all(
from(submission in Submission,
where: submission.id == ^post_id or submission.id == ^parent_id
),
inc: [cache_comment_count: value]
)
end
end
| 27.372168 | 118 | 0.564081 |
034760c6161a22f7052e670650bb259eed7c9f88 | 844 | ex | Elixir | test/support/model_case.ex | Rabsztok/HexWorlds | 6d4b00824a6bf09ab84eca4efd14722cf3326616 | [
"MIT"
] | 7 | 2017-08-11T12:08:58.000Z | 2020-08-17T14:31:49.000Z | test/support/model_case.ex | Rabsztok/HexWorlds | 6d4b00824a6bf09ab84eca4efd14722cf3326616 | [
"MIT"
] | 1 | 2020-06-20T17:54:12.000Z | 2020-06-20T17:54:12.000Z | test/support/model_case.ex | Rabsztok/HexWorlds | 6d4b00824a6bf09ab84eca4efd14722cf3326616 | [
"MIT"
] | null | null | null | defmodule Game.ModelCase do
@moduledoc """
This module defines the test case to be used by
model tests.
You may define functions here to be used as helpers in
your model tests. See `errors_on/2`'s definition as reference.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias Game.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Game.ModelCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Game.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Game.Repo, {:shared, self()})
end
:ok
end
end
| 22.210526 | 66 | 0.691943 |
034765c88e9ae776e169c2100b16ff6e41fdbb37 | 3,142 | ex | Elixir | clients/playable_locations/lib/google_api/playable_locations/v3/model/google_maps_playablelocations_v3_sample_spacing_options.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/playable_locations/lib/google_api/playable_locations/v3/model/google_maps_playablelocations_v3_sample_spacing_options.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/playable_locations/lib/google_api/playable_locations/v3/model/google_maps_playablelocations_v3_sample_spacing_options.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.PlayableLocations.V3.Model.GoogleMapsPlayablelocationsV3SampleSpacingOptions do
@moduledoc """
A set of options that specifies the separation between playable locations.
## Attributes
* `minSpacingMeters` (*type:* `float()`, *default:* `nil`) - Required. The minimum spacing between any two playable locations, measured in meters. The minimum value is 30. The maximum value is 1000. Inputs will be rounded up to the next 10 meter interval. The default value is 200m. Set this field to remove tight clusters of playable locations. Note: The spacing is a greedy algorithm. It optimizes for selecting the highest ranking locations first, not to maximize the number of locations selected. Consider the following scenario: * Rank: A: 2, B: 1, C: 3. * Distance: A--200m--B--200m--C If spacing=250, it will pick the highest ranked location [B], not [A, C]. Note: Spacing works within the game object type itself, as well as the previous ones. Suppose three game object types, each with the following spacing: * X: 400m, Y: undefined, Z: 200m. 1. Add locations for X, within 400m of each other. 2. Add locations for Y, without any spacing. 3. Finally, add locations for Z within 200m of each other as well X and Y. The distance diagram between those locations end up as: * From->To. * X->X: 400m * Y->X, Y->Y: unspecified. * Z->X, Z->Y, Z->Z: 200m.
* `pointType` (*type:* `String.t`, *default:* `nil`) - Specifies whether the minimum spacing constraint applies to the center-point or to the snapped point of playable locations. The default value is `CENTER_POINT`. If a snapped point is not available for a playable location, its center-point is used instead. Set this to the point type used in your game.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:minSpacingMeters => float() | nil,
:pointType => String.t() | nil
}
field(:minSpacingMeters)
field(:pointType)
end
defimpl Poison.Decoder,
for: GoogleApi.PlayableLocations.V3.Model.GoogleMapsPlayablelocationsV3SampleSpacingOptions do
def decode(value, options) do
GoogleApi.PlayableLocations.V3.Model.GoogleMapsPlayablelocationsV3SampleSpacingOptions.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.PlayableLocations.V3.Model.GoogleMapsPlayablelocationsV3SampleSpacingOptions do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 57.127273 | 1,160 | 0.749204 |
03476bf6b26657a9f0c1c1b91f4a3b28cccdfb18 | 2,778 | ex | Elixir | printer/lib/printer/connection/server.ex | Jwsonic/cliff_swallow | 6ea69bcb10a844e10dd002a309eb4f48d0f0d1b8 | [
"MIT"
] | null | null | null | printer/lib/printer/connection/server.ex | Jwsonic/cliff_swallow | 6ea69bcb10a844e10dd002a309eb4f48d0f0d1b8 | [
"MIT"
] | null | null | null | printer/lib/printer/connection/server.ex | Jwsonic/cliff_swallow | 6ea69bcb10a844e10dd002a309eb4f48d0f0d1b8 | [
"MIT"
] | null | null | null | defmodule Printer.Connection.Server do
@moduledoc """
GenServer for handling connections and their various lifecycle events.
"""
use GenServer
require Logger
defmodule State do
@moduledoc """
State struct for Connection Server
"""
defstruct [:connection, :printer_server]
end
alias Printer.Connection.Protocol, as: ConnectionProtocol
alias Printer.Connection.Server.State
def start_link(args \\ []) do
GenServer.start_link(__MODULE__, args)
end
@impl GenServer
def init(args) do
state = %State{
connection: Keyword.fetch!(args, :connection),
printer_server: Keyword.fetch!(args, :printer_server)
}
{:ok, state, {:continue, :open_connection}}
end
@impl GenServer
def handle_continue(:open_connection, %State{connection: connection} = state) do
case ConnectionProtocol.open(connection) do
{:ok, connection} ->
state = %{state | connection: connection}
send_to_printer(state, :connection_open, connection)
{:noreply, state}
{:error, reason} ->
send_to_printer(state, :connection_open_failed, reason)
{:stop, :normal, state}
end
end
@impl GenServer
def handle_call(:close, _from, %State{connection: connection} = state) do
reply = ConnectionProtocol.close(connection)
{:stop, :normal, reply, state}
end
@impl GenServer
def handle_call({:send, message}, _from, %State{connection: connection} = state) do
reply = ConnectionProtocol.send(connection, message)
{:reply, reply, state}
end
@impl GenServer
def handle_info(message, %State{connection: connection} = state) do
# The message may not be for the protocol, so catch function clause errors here
response =
try do
ConnectionProtocol.handle_message(connection, message)
rescue
FunctionClauseError ->
Logger.info("handle_message/2 unable to handle message: #{inspect(message)}")
:ok
end
case response do
{:ok, connection} ->
{:noreply, %{state | connection: connection}}
{:ok, connection, response} ->
state = %{state | connection: connection}
send_to_printer(state, :connection_data, response)
{:noreply, state}
{:closed, reason} ->
send_to_printer(state, :connection_closed, reason)
{:stop, :normal, reason, state}
{:error, error, connection} ->
state = %{state | connection: connection}
send_to_printer(state, :connection_error, error)
{:noreply, state}
end
end
# Sends a self() tagged message to the printer server
defp send_to_printer(%State{printer_server: printer_server}, type, data) do
Process.send(printer_server, {type, self(), data}, [])
end
end
| 25.962617 | 87 | 0.663787 |
034780ad7ba4b319a52abc66c972ca36c90213d9 | 335 | ex | Elixir | lib/mimicry_api/endpoint.ex | mimicry-tech/mimicry | 08a28fb08a7f02fbbb0a2bc2724a08a0b0b41cd9 | [
"MIT"
] | 4 | 2021-06-18T17:47:32.000Z | 2022-01-04T22:41:36.000Z | lib/mimicry_api/endpoint.ex | mimicry-tech/mimicry | 08a28fb08a7f02fbbb0a2bc2724a08a0b0b41cd9 | [
"MIT"
] | 56 | 2021-06-14T19:04:35.000Z | 2022-03-24T04:18:58.000Z | lib/mimicry_api/endpoint.ex | mimicry-tech/mimicry | 08a28fb08a7f02fbbb0a2bc2724a08a0b0b41cd9 | [
"MIT"
] | 1 | 2021-06-13T13:44:35.000Z | 2021-06-13T13:44:35.000Z | defmodule MimicryApi.Endpoint do
use Phoenix.Endpoint, otp_app: :mimicry
if code_reloading? do
plug(Phoenix.CodeReloader)
end
plug(MimicryApi.ResponseHeaders)
plug(Plug.RequestId)
plug(Plug.Parsers,
parsers: [:json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
)
plug(MimicryApi.Router)
end
| 17.631579 | 41 | 0.701493 |
034791d45415e65cc6baf429e97d005c54474f2e | 1,256 | exs | Elixir | config/dev.exs | shawn-mcginty/phoenix-webpack-skeleton | 311559c860da97157d643d06cd2601aee81c6a3a | [
"MIT"
] | null | null | null | config/dev.exs | shawn-mcginty/phoenix-webpack-skeleton | 311559c860da97157d643d06cd2601aee81c6a3a | [
"MIT"
] | null | null | null | config/dev.exs | shawn-mcginty/phoenix-webpack-skeleton | 311559c860da97157d643d06cd2601aee81c6a3a | [
"MIT"
] | null | null | null | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
config :time_voice, TimeVoice.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [npm: ["run", "watch"], npm: ["run", "watch-sass"]]
# Watch static and templates for browser reloading.
config :time_voice, TimeVoice.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{web/views/.*(ex)$},
~r{web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Configure your database
config :time_voice, TimeVoice.Repo,
adapter: Ecto.Adapters.Postgres,
username: "timevoice",
password: "timevoice1",
database: "time_voice_dev",
hostname: "localhost",
pool_size: 10
| 29.904762 | 68 | 0.707803 |
0347a22c008a0344984de5b8a30a5ead3606f93a | 963 | ex | Elixir | test/support/erp.ex | noizu-labs/SimplePoolAdvanced | 9555071e42917d280c7ce4846c19ef208c77f14e | [
"MIT"
] | null | null | null | test/support/erp.ex | noizu-labs/SimplePoolAdvanced | 9555071e42917d280c7ce4846c19ef208c77f14e | [
"MIT"
] | null | null | null | test/support/erp.ex | noizu-labs/SimplePoolAdvanced | 9555071e42917d280c7ce4846c19ef208c77f14e | [
"MIT"
] | null | null | null | #-------------------------------------------------------------------------------
# Author: Keith Brings
# Copyright (C) 2018 Noizu Labs, Inc. All rights reserved.
#-------------------------------------------------------------------------------
defimpl Noizu.ERP, for: Atom do
def sref(nil), do: nil
def ref(nil), do: nil
def id(nil), do: nil
def entity(nil, _options \\ nil), do: nil
def entity!(nil, _options \\ nil), do: nil
def record(nil, _options \\ nil), do: nil
def record!(nil, _options \\ nil), do: nil
def id_ok(o) do
r = id(o)
r && {:ok, r} || {:error, o}
end
def ref_ok(o) do
r = ref(o)
r && {:ok, r} || {:error, o}
end
def sref_ok(o) do
r = sref(o)
r && {:ok, r} || {:error, o}
end
def entity_ok(o, options \\ %{}) do
r = entity(o, options)
r && {:ok, r} || {:error, o}
end
def entity_ok!(o, options \\ %{}) do
r = entity!(o, options)
r && {:ok, r} || {:error, o}
end
end
| 25.342105 | 80 | 0.44756 |
0347c18c0a88a9efd5448ebf9cb0fee461f8defb | 8,555 | ex | Elixir | lib/earmark/html_renderer.ex | ZeLarpMaster/earmark | 35c9661d6647059e507c0278347e21d92351c417 | [
"Apache-1.1"
] | null | null | null | lib/earmark/html_renderer.ex | ZeLarpMaster/earmark | 35c9661d6647059e507c0278347e21d92351c417 | [
"Apache-1.1"
] | null | null | null | lib/earmark/html_renderer.ex | ZeLarpMaster/earmark | 35c9661d6647059e507c0278347e21d92351c417 | [
"Apache-1.1"
] | null | null | null | defmodule Earmark.HtmlRenderer do
@moduledoc false
alias Earmark.Block
alias Earmark.Context
alias Earmark.Options
import Earmark.Inline, only: [convert: 3]
import Earmark.Helpers, only: [escape: 2]
import Earmark.Helpers.HtmlHelpers
import Earmark.Message, only: [add_messages_from: 2, get_messages: 1, set_messages: 2]
import Earmark.Context, only: [append: 2, set_value: 2]
import Earmark.Options, only: [get_mapper: 1]
@doc false
def render(blocks, context = %Context{options: %Options{}}) do
messages = get_messages(context)
{contexts, html} =
get_mapper(context.options).(
blocks,
&render_block(&1, put_in(context.options.messages, []))
)
|> Enum.unzip()
all_messages =
contexts
|> Enum.reduce(messages, fn ctx, messages1 -> messages1 ++ get_messages(ctx) end)
{put_in(context.options.messages, all_messages), html |> IO.iodata_to_binary()}
end
#############
# Paragraph #
#############
defp render_block(%Block.Para{lnb: lnb, lines: lines, attrs: attrs}, context) do
lines = convert(lines, lnb, context)
add_attrs(lines, "<p>#{lines.value}</p>\n", attrs, [], lnb)
end
########
# Html #
########
defp render_block(%Block.Html{html: html}, context) do
{context, Enum.intersperse(html, ?\n)}
end
defp render_block(%Block.HtmlComment{lines: lines}, context) do
{context, Enum.intersperse(lines, ?\n)}
end
defp render_block(%Block.HtmlOneline{html: html}, context) do
{context, Enum.intersperse(html, ?\n)}
end
#########
# Ruler #
#########
defp render_block(%Block.Ruler{lnb: lnb, type: "-", attrs: attrs}, context) do
add_attrs(context, "<hr />\n", attrs, [{"class", ["thin"]}], lnb)
end
defp render_block(%Block.Ruler{lnb: lnb, type: "_", attrs: attrs}, context) do
add_attrs(context, "<hr />\n", attrs, [{"class", ["medium"]}], lnb)
end
defp render_block(%Block.Ruler{lnb: lnb, type: "*", attrs: attrs}, context) do
add_attrs(context, "<hr />\n", attrs, [{"class", ["thick"]}], lnb)
end
###########
# Heading #
###########
defp render_block(
%Block.Heading{lnb: lnb, level: level, content: content, attrs: attrs},
context
) do
converted = convert(content, lnb, context)
html = "<h#{level}>#{converted.value}</h#{level}>\n"
add_attrs(converted, html, attrs, [], lnb)
end
##############
# Blockquote #
##############
defp render_block(%Block.BlockQuote{lnb: lnb, blocks: blocks, attrs: attrs}, context) do
{context1, body} = render(blocks, context)
html = "<blockquote>#{body}</blockquote>\n"
add_attrs(context1, html, attrs, [], lnb)
end
#########
# Table #
#########
defp render_block(
%Block.Table{lnb: lnb, header: header, rows: rows, alignments: aligns, attrs: attrs},
context
) do
{context1, html} = add_attrs(context, "<table>\n", attrs, [], lnb)
context2 = set_value(context1, html)
context3 =
if header do
append(add_trs(append(context2, "<thead>\n"), [header], "th", aligns, lnb), "</thead>\n")
else
# Maybe an error, needed append(context, html)
context2
end
context4 = append(add_trs(append(context3, "<tbody>\n"), rows, "td", aligns, lnb), "</tbody>\n")
{context4, [context4.value, "</table>\n"]}
end
########
# Code #
########
defp render_block(
%Block.Code{lnb: lnb, language: language, attrs: attrs} = block,
context = %Context{options: options}
) do
class =
if language, do: ~s{ class="#{code_classes(language, options.code_class_prefix)}"}, else: ""
tag = ~s[<pre><code#{class}>]
lines = options.render_code.(block)
html = ~s[#{tag}#{lines}</code></pre>\n]
add_attrs(context, html, attrs, [], lnb)
end
#########
# Lists #
#########
defp render_block(
%Block.List{lnb: lnb, type: type, blocks: items, attrs: attrs, start: start},
context
) do
{context1, content} = render(items, context)
html = "<#{type}#{start}>\n#{content}</#{type}>\n"
add_attrs(context1, html, attrs, [], lnb)
end
# format a single paragraph list item, and remove the para tags
defp render_block(
%Block.ListItem{lnb: lnb, blocks: blocks, spaced: false, attrs: attrs},
context
)
when length(blocks) == 1 do
{context1, content} = render(blocks, context)
content = Regex.replace(~r{</?p>}, content, "")
html = "<li>#{content}</li>\n"
add_attrs(context1, html, attrs, [], lnb)
end
# format a spaced list item
defp render_block(%Block.ListItem{lnb: lnb, blocks: blocks, attrs: attrs}, context) do
{context1, content} = render(blocks, context)
html = "<li>#{content}</li>\n"
add_attrs(context1, html, attrs, [], lnb)
end
##################
# Footnote Block #
##################
defp render_block(%Block.FnList{blocks: footnotes}, context) do
items =
Enum.map(footnotes, fn note ->
blocks = append_footnote_link(note)
%Block.ListItem{attrs: "#fn:#{note.number}", type: :ol, blocks: blocks}
end)
{context1, html} = render_block(%Block.List{type: :ol, blocks: items}, context)
{context1, Enum.join([~s[<div class="footnotes">], "<hr />", html, "</div>"], "\n")}
end
#######################################
# Isolated IALs are rendered as paras #
#######################################
defp render_block(%Block.Ial{verbatim: verbatim}, context) do
{context, "<p>{:#{verbatim}}</p>\n"}
end
####################
# IDDef is ignored #
####################
defp render_block(%Block.IdDef{}, context), do: {context, ""}
#####################################
# And here are the inline renderers #
#####################################
def br, do: "<br />"
def codespan(text), do: ~s[<code class="inline">#{text}</code>]
def em(text), do: "<em>#{text}</em>"
def strong(text), do: "<strong>#{text}</strong>"
def strikethrough(text), do: "<del>#{text}</del>"
def link(url, text), do: ~s[<a href="#{url}">#{text}</a>]
def link(url, text, nil), do: ~s[<a href="#{url}">#{text}</a>]
def link(url, text, title), do: ~s[<a href="#{url}" title="#{title}">#{text}</a>]
def image(path, alt, nil) do
~s[<img src="#{path}" alt="#{alt}" />]
end
def image(path, alt, title) do
~s[<img src="#{path}" alt="#{alt}" title="#{title}" />]
end
def footnote_link(ref, backref, number),
do: ~s[<a href="##{ref}" id="#{backref}" class="footnote" title="see footnote">#{number}</a>]
# Table rows
defp add_trs(context, rows, tag, aligns, lnb) do
numbered_rows =
rows
|> Enum.zip(Stream.iterate(lnb, &(&1 + 1)))
numbered_rows
|> Enum.reduce(context, fn {row, lnb}, ctx ->
append(add_tds(append(ctx, "<tr>\n"), row, tag, aligns, lnb), "\n</tr>\n")
end)
end
defp add_tds(context, row, tag, aligns, lnb) do
Enum.reduce(1..length(row), context, add_td_fn(row, tag, aligns, lnb))
end
defp add_td_fn(row, tag, aligns, lnb) do
fn n, ctx ->
style =
case Enum.at(aligns, n - 1, :default) do
:default -> ""
align -> " style=\"text-align: #{align}\""
end
col = Enum.at(row, n - 1)
converted = convert(col, lnb, set_messages(ctx, []))
append(add_messages_from(ctx, converted), "<#{tag}#{style}>#{converted.value}</#{tag}>")
end
end
###############################
# Append Footnote Return Link #
###############################
def append_footnote_link(note = %Block.FnDef{}) do
fnlink =
~s[<a href="#fnref:#{note.number}" title="return to article" class="reversefootnote">↩</a>]
[last_block | blocks] = Enum.reverse(note.blocks)
last_block = append_footnote_link(last_block, fnlink)
Enum.reverse([last_block | blocks])
|> List.flatten()
end
def append_footnote_link(block = %Block.Para{lines: lines}, fnlink) do
[last_line | lines] = Enum.reverse(lines)
last_line = "#{last_line} #{fnlink}"
[put_in(block.lines, Enum.reverse([last_line | lines]))]
end
def append_footnote_link(block, fnlink) do
[block, %Block.Para{lines: fnlink}]
end
def render_code(%Block.Code{lines: lines}) do
lines |> Enum.join("\n") |> escape(false)
end
defp code_classes(language, prefix) do
["" | String.split(prefix || "")]
|> Enum.map(fn pfx -> "#{pfx}#{language}" end)
|> Enum.join(" ")
end
end
# SPDX-License-Identifier: Apache-2.0
| 29.808362 | 104 | 0.576622 |
0347c98bd404c3da0ad32d54237d1d240e557703 | 1,115 | exs | Elixir | patterns/pub-sub/elixir/pubSub/config/config.exs | thetonymaster/thetonymaster.github.io | 2e24d46dd377fed6ab6d1609e5afe24b4953a0f2 | [
"MIT"
] | null | null | null | patterns/pub-sub/elixir/pubSub/config/config.exs | thetonymaster/thetonymaster.github.io | 2e24d46dd377fed6ab6d1609e5afe24b4953a0f2 | [
"MIT"
] | null | null | null | patterns/pub-sub/elixir/pubSub/config/config.exs | thetonymaster/thetonymaster.github.io | 2e24d46dd377fed6ab6d1609e5afe24b4953a0f2 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :pubSub, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:pubSub, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 35.967742 | 73 | 0.750673 |
03480c8adc3a542b138c64b8dae1047ee891a985 | 1,126 | ex | Elixir | lib/astarte_flow_web/views/flow_view.ex | matt-mazzucato/astarte_flow | e8644b5a27edf325977f5bced9a919f20e289ee2 | [
"Apache-2.0"
] | null | null | null | lib/astarte_flow_web/views/flow_view.ex | matt-mazzucato/astarte_flow | e8644b5a27edf325977f5bced9a919f20e289ee2 | [
"Apache-2.0"
] | null | null | null | lib/astarte_flow_web/views/flow_view.ex | matt-mazzucato/astarte_flow | e8644b5a27edf325977f5bced9a919f20e289ee2 | [
"Apache-2.0"
] | null | null | null | #
# This file is part of Astarte.
#
# Copyright 2020 Ispirata Srl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
defmodule Astarte.FlowWeb.FlowView do
use Astarte.FlowWeb, :view
alias Astarte.FlowWeb.FlowView
def render("index.json", %{flows: flows}) do
%{data: render_many(flows, FlowView, "flow_name.json")}
end
def render("show.json", %{flow: flow}) do
%{data: render_one(flow, FlowView, "flow.json")}
end
def render("flow.json", %{flow: flow}) do
%{name: flow.name, pipeline: flow.pipeline, config: flow.config}
end
def render("flow_name.json", %{flow: flow}) do
flow.name
end
end
| 28.871795 | 74 | 0.715808 |
0348630d5329cb062e71675f50e9ed393a9e77f5 | 81 | ex | Elixir | lib/component/definitions/actor_name.ex | doawoo/elixir_rpg | 4dcd0eb717bd1d654b3e6a06be31aba4c3254fb3 | [
"MIT"
] | 23 | 2021-10-24T00:21:13.000Z | 2022-03-13T12:33:38.000Z | lib/component/definitions/actor_name.ex | doawoo/elixir_rpg | 4dcd0eb717bd1d654b3e6a06be31aba4c3254fb3 | [
"MIT"
] | null | null | null | lib/component/definitions/actor_name.ex | doawoo/elixir_rpg | 4dcd0eb717bd1d654b3e6a06be31aba4c3254fb3 | [
"MIT"
] | 3 | 2021-11-04T02:42:25.000Z | 2022-02-02T14:22:52.000Z | use ElixirRPG.DSL.Component
defcomponent ActorName do
member :name, "???"
end
| 13.5 | 27 | 0.740741 |
034884da54ff9265d4c583beefa8bb93b918fba7 | 231 | ex | Elixir | code/docstring_test/lib/docstring_test.ex | HTX-Elixir-Meetup/2016-12-07-exunit-and-testing | 7317a429f938e9694e1bd0c57981a89fbc398d8c | [
"Apache-2.0"
] | null | null | null | code/docstring_test/lib/docstring_test.ex | HTX-Elixir-Meetup/2016-12-07-exunit-and-testing | 7317a429f938e9694e1bd0c57981a89fbc398d8c | [
"Apache-2.0"
] | null | null | null | code/docstring_test/lib/docstring_test.ex | HTX-Elixir-Meetup/2016-12-07-exunit-and-testing | 7317a429f938e9694e1bd0c57981a89fbc398d8c | [
"Apache-2.0"
] | null | null | null | defmodule DocstringTest do
@doc """
This function returns a reversed string.
iex> DocstringTest.reverse_string "hi"
"ih"
"""
def reverse_string(string) when is_bitstring(string) do
String.reverse(string)
end
end
| 19.25 | 57 | 0.722944 |
034890505a7c67f471164f78e04419ea7fb1dfb2 | 40,219 | ex | Elixir | lib/elixir/lib/system.ex | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 19,291 | 2015-01-01T02:42:49.000Z | 2022-03-31T21:01:40.000Z | lib/elixir/lib/system.ex | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 8,082 | 2015-01-01T04:16:23.000Z | 2022-03-31T22:08:02.000Z | lib/elixir/lib/system.ex | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 3,472 | 2015-01-03T04:11:56.000Z | 2022-03-29T02:07:30.000Z | defmodule System do
@moduledoc """
The `System` module provides functions that interact directly
with the VM or the host system.
## Time
The `System` module also provides functions that work with time,
returning different times kept by the system with support for
different time units.
One of the complexities in relying on system times is that they
may be adjusted. For example, when you enter and leave daylight
saving time, the system clock will be adjusted, often adding
or removing one hour. We call such changes "time warps". In
order to understand how such changes may be harmful, imagine
the following code:
## DO NOT DO THIS
prev = System.os_time()
# ... execute some code ...
next = System.os_time()
diff = next - prev
If, while the code is executing, the system clock changes,
some code that executed in 1 second may be reported as taking
over 1 hour! To address such concerns, the VM provides a
monotonic time via `System.monotonic_time/0` which never
decreases and does not leap:
## DO THIS
prev = System.monotonic_time()
# ... execute some code ...
next = System.monotonic_time()
diff = next - prev
Generally speaking, the VM provides three time measurements:
* `os_time/0` - the time reported by the operating system (OS). This time may be
adjusted forwards or backwards in time with no limitation;
* `system_time/0` - the VM view of the `os_time/0`. The system time and operating
system time may not match in case of time warps although the VM works towards
aligning them. This time is not monotonic (i.e., it may decrease)
as its behaviour is configured [by the VM time warp
mode](https://www.erlang.org/doc/apps/erts/time_correction.html#Time_Warp_Modes);
* `monotonic_time/0` - a monotonically increasing time provided
by the Erlang VM.
The time functions in this module work in the `:native` unit
(unless specified otherwise), which is operating system dependent. Most of
the time, all calculations are done in the `:native` unit, to
avoid loss of precision, with `convert_time_unit/3` being
invoked at the end to convert to a specific time unit like
`:millisecond` or `:microsecond`. See the `t:time_unit/0` type for
more information.
For a more complete rundown on the VM support for different
times, see the [chapter on time and time
correction](https://www.erlang.org/doc/apps/erts/time_correction.html)
in the Erlang docs.
"""
@typedoc """
The time unit to be passed to functions like `monotonic_time/1` and others.
The `:second`, `:millisecond`, `:microsecond` and `:nanosecond` time
units controls the return value of the functions that accept a time unit.
A time unit can also be a strictly positive integer. In this case, it
represents the "parts per second": the time will be returned in `1 /
parts_per_second` seconds. For example, using the `:millisecond` time unit
is equivalent to using `1000` as the time unit (as the time will be returned
in 1/1000 seconds - milliseconds).
"""
@type time_unit ::
:second
| :millisecond
| :microsecond
| :nanosecond
| pos_integer
@type signal ::
:sigabrt
| :sigalrm
| :sigchld
| :sighup
| :sigquit
| :sigstop
| :sigterm
| :sigtstp
| :sigusr1
| :sigusr2
@vm_signals [:sigquit, :sigterm, :sigusr1]
@os_signals [:sighup, :sigabrt, :sigalrm, :sigusr2, :sigchld, :sigstop, :sigtstp]
@signals @vm_signals ++ @os_signals
@base_dir :filename.join(__DIR__, "../../..")
@version_file :filename.join(@base_dir, "VERSION")
defp strip(iodata) do
:re.replace(iodata, "^[\s\r\n\t]+|[\s\r\n\t]+$", "", [:global, return: :binary])
end
defp read_stripped(path) do
case :file.read_file(path) do
{:ok, binary} ->
strip(binary)
_ ->
""
end
end
# Read and strip the version from the VERSION file.
defmacrop get_version do
case read_stripped(@version_file) do
"" -> raise "could not read the version number from VERSION"
data -> data
end
end
# Returns OTP version that Elixir was compiled with.
defmacrop get_otp_release do
:erlang.list_to_binary(:erlang.system_info(:otp_release))
end
# Tries to run "git rev-parse --short=7 HEAD". In the case of success returns
# the short revision hash. If that fails, returns an empty string.
defmacrop get_revision do
null =
case :os.type() do
{:win32, _} -> 'NUL'
_ -> '/dev/null'
end
'git rev-parse --short=7 HEAD 2> '
|> Kernel.++(null)
|> :os.cmd()
|> strip
end
defp revision, do: get_revision()
# Get the date at compilation time.
# Follows https://reproducible-builds.org/specs/source-date-epoch/
defmacrop get_date do
unix_epoch =
if source_date_epoch = :os.getenv('SOURCE_DATE_EPOCH') do
try do
List.to_integer(source_date_epoch)
rescue
_ -> nil
end
end
unix_epoch = unix_epoch || :os.system_time(:second)
{{year, month, day}, {hour, minute, second}} =
:calendar.gregorian_seconds_to_datetime(unix_epoch + 62_167_219_200)
"~4..0b-~2..0b-~2..0bT~2..0b:~2..0b:~2..0bZ"
|> :io_lib.format([year, month, day, hour, minute, second])
|> :erlang.iolist_to_binary()
end
@doc """
Returns the endianness.
"""
@spec endianness() :: :little | :big
def endianness do
:erlang.system_info(:endian)
end
@doc """
Returns the endianness the system was compiled with.
"""
@endianness :erlang.system_info(:endian)
@spec compiled_endianness() :: :little | :big
def compiled_endianness do
@endianness
end
@doc """
Elixir version information.
Returns Elixir's version as binary.
"""
@spec version() :: String.t()
def version, do: get_version()
@doc """
Elixir build information.
Returns a map with the Elixir version, the Erlang/OTP release it was compiled
with, a short Git revision hash and the date and time it was built.
Every value in the map is a string, and these are:
* `:build` - the Elixir version, short Git revision hash and
Erlang/OTP release it was compiled with
* `:date` - a string representation of the ISO8601 date and time it was built
* `:otp_release` - OTP release it was compiled with
* `:revision` - short Git revision hash. If Git was not available at building
time, it is set to `""`
* `:version` - the Elixir version
One should not rely on the specific formats returned by each of those fields.
Instead one should use specialized functions, such as `version/0` to retrieve
the Elixir version and `otp_release/0` to retrieve the Erlang/OTP release.
## Examples
iex> System.build_info()
%{
build: "1.9.0-dev (772a00a0c) (compiled with Erlang/OTP 21)",
date: "2018-12-24T01:09:21Z",
otp_release: "21",
revision: "772a00a0c",
version: "1.9.0-dev"
}
"""
@spec build_info() :: %{
build: String.t(),
date: String.t(),
revision: String.t(),
version: String.t(),
otp_release: String.t()
}
def build_info do
%{
build: build(),
date: get_date(),
revision: revision(),
version: version(),
otp_release: get_otp_release()
}
end
# Returns a string of the build info
defp build do
{:ok, v} = Version.parse(version())
revision_string = if v.pre != [] and revision() != "", do: " (#{revision()})", else: ""
otp_version_string = " (compiled with Erlang/OTP #{get_otp_release()})"
version() <> revision_string <> otp_version_string
end
@doc """
Lists command line arguments.
Returns the list of command line arguments passed to the program.
"""
@spec argv() :: [String.t()]
def argv do
:elixir_config.get(:argv)
end
@doc """
Modifies command line arguments.
Changes the list of command line arguments. Use it with caution,
as it destroys any previous argv information.
"""
@spec argv([String.t()]) :: :ok
def argv(args) do
:elixir_config.put(:argv, args)
end
@doc """
Marks if the system should halt or not at the end of ARGV processing.
"""
@doc since: "1.9.0"
@spec no_halt(boolean) :: :ok
def no_halt(boolean) when is_boolean(boolean) do
:elixir_config.put(:no_halt, boolean)
end
@doc """
Checks if the system will halt or not at the end of ARGV processing.
"""
@doc since: "1.9.0"
@spec no_halt() :: boolean
def no_halt() do
:elixir_config.get(:no_halt)
end
@doc """
Current working directory.
Returns the current working directory or `nil` if one
is not available.
"""
@deprecated "Use File.cwd/0 instead"
@spec cwd() :: String.t() | nil
def cwd do
case File.cwd() do
{:ok, cwd} -> cwd
_ -> nil
end
end
@doc """
Current working directory, exception on error.
Returns the current working directory or raises `RuntimeError`.
"""
@deprecated "Use File.cwd!/0 instead"
@spec cwd!() :: String.t()
def cwd! do
case File.cwd() do
{:ok, cwd} ->
cwd
_ ->
raise "could not get a current working directory, the current location is not accessible"
end
end
@doc """
User home directory.
Returns the user home directory (platform independent).
"""
@spec user_home() :: String.t() | nil
def user_home do
case :init.get_argument(:home) do
{:ok, [[home] | _]} ->
encoding = :file.native_name_encoding()
:unicode.characters_to_binary(home, encoding, encoding)
_ ->
nil
end
end
@doc """
User home directory, exception on error.
Same as `user_home/0` but raises `RuntimeError`
instead of returning `nil` if no user home is set.
"""
@spec user_home!() :: String.t()
def user_home! do
user_home() || raise "could not find the user home, please set the HOME environment variable"
end
@doc ~S"""
Writable temporary directory.
Returns a writable temporary directory.
Searches for directories in the following order:
1. the directory named by the TMPDIR environment variable
2. the directory named by the TEMP environment variable
3. the directory named by the TMP environment variable
4. `C:\TMP` on Windows or `/tmp` on Unix-like operating systems
5. as a last resort, the current working directory
Returns `nil` if none of the above are writable.
"""
@spec tmp_dir() :: String.t() | nil
def tmp_dir do
write_env_tmp_dir('TMPDIR') || write_env_tmp_dir('TEMP') || write_env_tmp_dir('TMP') ||
write_tmp_dir('/tmp') || write_cwd_tmp_dir()
end
defp write_cwd_tmp_dir do
case File.cwd() do
{:ok, cwd} -> write_tmp_dir(cwd)
_ -> nil
end
end
@doc """
Writable temporary directory, exception on error.
Same as `tmp_dir/0` but raises `RuntimeError`
instead of returning `nil` if no temp dir is set.
"""
@spec tmp_dir!() :: String.t()
def tmp_dir! do
tmp_dir() ||
raise "could not get a writable temporary directory, please set the TMPDIR environment variable"
end
defp write_env_tmp_dir(env) do
case :os.getenv(env) do
false -> nil
tmp -> write_tmp_dir(tmp)
end
end
defp write_tmp_dir(dir) do
case File.stat(dir) do
{:ok, stat} ->
case {stat.type, stat.access} do
{:directory, access} when access in [:read_write, :write] ->
IO.chardata_to_string(dir)
_ ->
nil
end
{:error, _} ->
nil
end
end
@doc """
Registers a program exit handler function.
Registers a function that will be invoked at the end of an Elixir script.
A script is typically started via the command line via the `elixir` and
`mix` executables.
The handler always executes in a different process from the one it was
registered in. As a consequence, any resources managed by the calling process
(ETS tables, open files, and others) won't be available by the time the handler
function is invoked.
The function must receive the exit status code as an argument.
If the VM terminates programmatically, via `System.stop/1`, `System.halt/1`,
or exit signals, the `at_exit/1` callbacks are not executed.
"""
@spec at_exit((non_neg_integer -> any)) :: :ok
def at_exit(fun) when is_function(fun, 1) do
:elixir_config.update(:at_exit, &[fun | &1])
:ok
end
defmodule SignalHandler do
@moduledoc false
@behaviour :gen_event
@impl true
def init({event, fun}) do
{:ok, {event, fun}}
end
@impl true
def handle_call(_message, state) do
{:ok, :ok, state}
end
@impl true
def handle_event(signal, {event, fun}) do
if signal == event, do: :ok = fun.()
{:ok, {event, fun}}
end
@impl true
def handle_info(_, {event, fun}) do
{:ok, {event, fun}}
end
end
@doc """
Traps the given `signal` to execute the `fun`.
> **Important**: Trapping signals may have strong implications
> on how a system shuts down and behave in production and
> therefore it is extremely discouraged for libraries to
> set their own traps. Instead, they should redirect users
> to configure them themselves. The only cases where it is
> acceptable for libraries to set their own traps is when
> using Elixir in script mode, such as in `.exs` files and
> via Mix tasks.
An optional `id` that uniquely identifies the function
can be given, otherwise a unique one is automatically
generated. If a previously registered `id` is given,
this function returns an error tuple. The `id` can be
used to remove a registered signal by calling
`untrap_signal/2`.
The given `fun` receives no arguments and it must return
`:ok`.
It returns `{:ok, id}` in case of success,
`{:error, :already_registered}` in case the id has already
been registered for the given signal, or `{:error, :not_sup}`
in case trapping exists is not supported by the current OS.
The first time a signal is trapped, it will override the
default behaviour from the operating system. If the same
signal is trapped multiple times, subsequent functions
given to `trap_signal` will execute *first*. In other
words, you can consider each function is prepended to
the signal handler.
By default, the Erlang VM register traps to the three
signals:
* `:sigstop` - gracefully shuts down the VM with `stop/0`
* `:sigquit` - halts the VM via `halt/0`
* `:sigusr1` - halts the VM via status code of 1
Therefore, if you add traps to the signals above, the
default behaviour above will be executed after all user
signals.
## Implementation notes
All signals run from a single process. Therefore, blocking the
`fun` will block subsequent traps. It is also not possible to add
or remove traps from within a trap itself.
Internally, this functionality is built on top of `:os.set_signal/2`.
When you register a trap, Elixir automatically sets it to `:handle`
and it reverts it back to `:default` once all traps are removed
(except for `:sigquit`, `:sigterm`, and `:sigusr1` which are always
handled). If you or a library call `:os.set_signal/2` directly,
it may disable Elixir traps (or Elixir may override your configuration).
"""
@doc since: "1.12.0"
@spec trap_signal(signal, (() -> :ok)) :: {:ok, reference()} | {:error, :not_sup}
@spec trap_signal(signal, id, (() -> :ok)) ::
{:ok, id} | {:error, :already_registered} | {:error, :not_sup}
when id: term()
def trap_signal(signal, id \\ make_ref(), fun)
when signal in @signals and is_function(fun, 0) do
:elixir_config.serial(fn ->
gen_id = {signal, id}
if {SignalHandler, gen_id} in signal_handlers() do
{:error, :already_registered}
else
try do
:os.set_signal(signal, :handle)
rescue
_ -> {:error, :not_sup}
else
:ok ->
:ok =
:gen_event.add_handler(:erl_signal_server, {SignalHandler, gen_id}, {signal, fun})
{:ok, id}
end
end
end)
end
@doc """
Removes a previously registered `signal` with `id`.
"""
@doc since: "1.12.0"
@spec untrap_signal(signal, id) :: :ok | {:error, :not_found} when id: term
def untrap_signal(signal, id) when signal in @signals do
:elixir_config.serial(fn ->
gen_id = {signal, id}
case :gen_event.delete_handler(:erl_signal_server, {SignalHandler, gen_id}, :delete) do
:ok ->
if not trapping?(signal) do
:os.set_signal(signal, :default)
end
:ok
{:error, :module_not_found} ->
{:error, :not_found}
end
end)
end
defp trapping?(signal) do
signal in @vm_signals or
Enum.any?(signal_handlers(), &match?({_, {^signal, _}}, &1))
end
defp signal_handlers do
:gen_event.which_handlers(:erl_signal_server)
end
@doc """
Locates an executable on the system.
This function looks up an executable program given
its name using the environment variable PATH on Windows and Unix-like
operating systems. It also considers the proper executable
extension for each operating system, so for Windows it will try to
lookup files with `.com`, `.cmd` or similar extensions.
"""
@spec find_executable(binary) :: binary | nil
def find_executable(program) when is_binary(program) do
assert_no_null_byte!(program, "System.find_executable/1")
case :os.find_executable(String.to_charlist(program)) do
false -> nil
other -> List.to_string(other)
end
end
# TODO: Remove this once we require Erlang/OTP 24+
@compile {:no_warn_undefined, {:os, :env, 0}}
@doc """
Returns all system environment variables.
The returned value is a map containing name-value pairs.
Variable names and their values are strings.
"""
@spec get_env() :: %{optional(String.t()) => String.t()}
def get_env do
if function_exported?(:os, :env, 0) do
Map.new(:os.env(), fn {k, v} ->
{IO.chardata_to_string(k), IO.chardata_to_string(v)}
end)
else
Enum.into(:os.getenv(), %{}, fn var ->
var = IO.chardata_to_string(var)
[k, v] = String.split(var, "=", parts: 2)
{k, v}
end)
end
end
@doc """
Returns the value of the given environment variable.
The returned value of the environment variable
`varname` is a string. If the environment variable
is not set, returns the string specified in `default` or
`nil` if none is specified.
## Examples
iex> System.get_env("PORT")
"4000"
iex> System.get_env("NOT_SET")
nil
iex> System.get_env("NOT_SET", "4001")
"4001"
"""
@doc since: "1.9.0"
@spec get_env(String.t(), String.t() | nil) :: String.t() | nil
def get_env(varname, default \\ nil)
when is_binary(varname) and
(is_binary(default) or is_nil(default)) do
case :os.getenv(String.to_charlist(varname)) do
false -> default
other -> List.to_string(other)
end
end
@doc """
Returns the value of the given environment variable or `:error` if not found.
If the environment variable `varname` is set, then `{:ok, value}` is returned
where `value` is a string. If `varname` is not set, `:error` is returned.
## Examples
iex> System.fetch_env("PORT")
{:ok, "4000"}
iex> System.fetch_env("NOT_SET")
:error
"""
@doc since: "1.9.0"
@spec fetch_env(String.t()) :: {:ok, String.t()} | :error
def fetch_env(varname) when is_binary(varname) do
case :os.getenv(String.to_charlist(varname)) do
false -> :error
other -> {:ok, List.to_string(other)}
end
end
@doc """
Returns the value of the given environment variable or raises if not found.
Same as `get_env/1` but raises instead of returning `nil` when the variable is
not set.
## Examples
iex> System.fetch_env!("PORT")
"4000"
iex> System.fetch_env!("NOT_SET")
** (ArgumentError) could not fetch environment variable "NOT_SET" because it is not set
"""
@doc since: "1.9.0"
@spec fetch_env!(String.t()) :: String.t()
def fetch_env!(varname) when is_binary(varname) do
get_env(varname) ||
raise ArgumentError,
"could not fetch environment variable #{inspect(varname)} because it is not set"
end
@doc """
Erlang VM process identifier.
Returns the process identifier of the current Erlang emulator
in the format most commonly used by the operating system environment.
For more information, see `:os.getpid/0`.
"""
@deprecated "Use System.pid/0 instead"
@spec get_pid() :: binary
def get_pid, do: IO.iodata_to_binary(:os.getpid())
@doc """
Sets an environment variable value.
Sets a new `value` for the environment variable `varname`.
"""
@spec put_env(binary, binary) :: :ok
def put_env(varname, value) when is_binary(varname) and is_binary(value) do
case :binary.match(varname, "=") do
{_, _} ->
raise ArgumentError,
"cannot execute System.put_env/2 for key with \"=\", got: #{inspect(varname)}"
:nomatch ->
:os.putenv(String.to_charlist(varname), String.to_charlist(value))
:ok
end
end
@doc """
Sets multiple environment variables.
Sets a new value for each environment variable corresponding
to each `{key, value}` pair in `enum`.
"""
@spec put_env(Enumerable.t()) :: :ok
def put_env(enum) do
Enum.each(enum, fn {key, val} -> put_env(key, val) end)
end
@doc """
Deletes an environment variable.
Removes the variable `varname` from the environment.
"""
@spec delete_env(String.t()) :: :ok
def delete_env(varname) do
:os.unsetenv(String.to_charlist(varname))
:ok
end
@doc """
Deprecated mechanism to retrieve the last exception stacktrace.
It always return an empty list.
"""
@deprecated "Use __STACKTRACE__ instead"
def stacktrace do
[]
end
@doc """
Immediately halts the Erlang runtime system.
Terminates the Erlang runtime system without properly shutting down
applications and ports. Please see `stop/1` for a careful shutdown of the
system.
`status` must be a non-negative integer, the atom `:abort` or a binary.
* If an integer, the runtime system exits with the integer value which
is returned to the operating system.
* If `:abort`, the runtime system aborts producing a core dump, if that is
enabled in the operating system.
* If a string, an Erlang crash dump is produced with status as slogan,
and then the runtime system exits with status code 1.
Note that on many platforms, only the status codes 0-255 are supported
by the operating system.
For more information, see `:erlang.halt/1`.
## Examples
System.halt(0)
System.halt(1)
System.halt(:abort)
"""
@spec halt() :: no_return
@spec halt(non_neg_integer | binary | :abort) :: no_return
def halt(status \\ 0)
def halt(status) when is_integer(status) or status == :abort do
:erlang.halt(status)
end
def halt(status) when is_binary(status) do
:erlang.halt(String.to_charlist(status))
end
@doc """
Returns the operating system PID for the current Erlang runtime system instance.
Returns a string containing the (usually) numerical identifier for a process.
On Unix-like operating systems, this is typically the return value of the `getpid()` system call.
On Windows, the process ID as returned by the `GetCurrentProcessId()` system
call is used.
## Examples
System.pid()
"""
@doc since: "1.9.0"
@spec pid :: String.t()
def pid do
List.to_string(:os.getpid())
end
@doc """
Restarts all applications in the Erlang runtime system.
All applications are taken down smoothly, all code is unloaded, and all ports
are closed before the system starts all applications once again.
## Examples
System.restart()
"""
@doc since: "1.9.0"
@spec restart :: :ok
defdelegate restart(), to: :init
@doc """
Carefully stops the Erlang runtime system.
All applications are taken down smoothly, all code is unloaded, and all ports
are closed before the system terminates by calling `halt/1`.
`status` must be a non-negative integer or a binary.
* If an integer, the runtime system exits with the integer value which is
returned to the operating system.
* If a binary, an Erlang crash dump is produced with status as slogan, and
then the runtime system exits with status code 1.
Note that on many platforms, only the status codes 0-255 are supported
by the operating system.
## Examples
System.stop(0)
System.stop(1)
"""
@doc since: "1.5.0"
@spec stop(non_neg_integer | binary) :: no_return
def stop(status \\ 0)
def stop(status) when is_integer(status) do
:init.stop(status)
end
def stop(status) when is_binary(status) do
:init.stop(String.to_charlist(status))
end
@doc ~S"""
Executes the given `command` in the OS shell.
It uses `sh` for Unix-like systems and `cmd` for Windows.
> **Important:**: Use this function with care. In particular, **never
> pass untrusted user input to this function**, as the user would be
> able to perform "command injection attacks" by executing any code
> directly on the machine. Generally speaking, prefer to use `cmd/3`
> over this function.
## Examples
iex> System.shell("echo hello")
{"hello\n", 0}
If you want to stream the output to Standard IO as it arrives:
iex> System.shell("echo hello", into: IO.stream())
hello
{%IO.Stream{}, 0}
## Options
It accepts the same options as `cmd/3`, except for `arg0`.
"""
@doc since: "1.12.0"
@spec shell(binary, keyword) :: {Collectable.t(), exit_status :: non_neg_integer}
def shell(command, opts \\ []) when is_binary(command) do
assert_no_null_byte!(command, "System.shell/2")
# Finding shell command logic from :os.cmd in OTP
# https://github.com/erlang/otp/blob/8deb96fb1d017307e22d2ab88968b9ef9f1b71d0/lib/kernel/src/os.erl#L184
command =
case :os.type() do
{:unix, _} ->
command =
command
|> String.replace("\"", "\\\"")
|> String.to_charlist()
'sh -c "' ++ command ++ '"'
{:win32, osname} ->
command = String.to_charlist(command)
case {System.get_env("COMSPEC"), osname} do
{nil, :windows} -> 'command.com /s /c ' ++ command
{nil, _} -> 'cmd /s /c ' ++ command
{cmd, _} -> '#{cmd} /s /c ' ++ command
end
end
do_cmd({:spawn, command}, [], opts)
end
@doc ~S"""
Executes the given `command` with `args`.
`command` is expected to be an executable available in PATH
unless an absolute path is given.
`args` must be a list of binaries which the executable will receive
as its arguments as is. This means that:
* environment variables will not be interpolated
* wildcard expansion will not happen (unless `Path.wildcard/2` is used
explicitly)
* arguments do not need to be escaped or quoted for shell safety
This function returns a tuple containing the collected result
and the command exit status.
Internally, this function uses a `Port` for interacting with the
outside world. However, if you plan to run a long-running program,
ports guarantee stdin/stdout devices will be closed but it does not
automatically terminate the program. The documentation for the
`Port` module describes this problem and possible solutions under
the "Zombie processes" section.
## Examples
iex> System.cmd("echo", ["hello"])
{"hello\n", 0}
iex> System.cmd("echo", ["hello"], env: [{"MIX_ENV", "test"}])
{"hello\n", 0}
If you want to stream the output to Standard IO as it arrives:
iex> System.cmd("echo", ["hello"], into: IO.stream())
hello
{%IO.Stream{}, 0}
## Options
* `:into` - injects the result into the given collectable, defaults to `""`
* `:cd` - the directory to run the command in
* `:env` - an enumerable of tuples containing environment key-value as
binary. The child process inherits all environment variables from its
parent process, the Elixir application, except those overwritten or
cleared using this option. Specify a value of `nil` to clear (unset) an
environment variable, which is useful for preventing credentials passed
to the application from leaking into child processes.
* `:arg0` - sets the command arg0
* `:stderr_to_stdout` - redirects stderr to stdout when `true`
* `:parallelism` - when `true`, the VM will schedule port tasks to improve
parallelism in the system. If set to `false`, the VM will try to perform
commands immediately, improving latency at the expense of parallelism.
The default can be set on system startup by passing the "+spp" argument
to `--erl`.
## Error reasons
If invalid arguments are given, `ArgumentError` is raised by
`System.cmd/3`. `System.cmd/3` also expects a strict set of
options and will raise if unknown or invalid options are given.
Furthermore, `System.cmd/3` may fail with one of the POSIX reasons
detailed below:
* `:system_limit` - all available ports in the Erlang emulator are in use
* `:enomem` - there was not enough memory to create the port
* `:eagain` - there are no more available operating system processes
* `:enametoolong` - the external command given was too long
* `:emfile` - there are no more available file descriptors
(for the operating system process that the Erlang emulator runs in)
* `:enfile` - the file table is full (for the entire operating system)
* `:eacces` - the command does not point to an executable file
* `:enoent` - the command does not point to an existing file
## Shell commands
If you desire to execute a trusted command inside a shell, with pipes,
redirecting and so on, please check `shell/2`.
"""
@spec cmd(binary, [binary], keyword) :: {Collectable.t(), exit_status :: non_neg_integer}
def cmd(command, args, opts \\ []) when is_binary(command) and is_list(args) do
assert_no_null_byte!(command, "System.cmd/3")
unless Enum.all?(args, &is_binary/1) do
raise ArgumentError, "all arguments for System.cmd/3 must be binaries"
end
cmd = String.to_charlist(command)
cmd =
if Path.type(cmd) == :absolute do
cmd
else
:os.find_executable(cmd) || :erlang.error(:enoent, [command, args, opts])
end
do_cmd({:spawn_executable, cmd}, [args: args], opts)
end
defp do_cmd(port_init, base_opts, opts) do
{into, opts} = cmd_opts(opts, [:use_stdio, :exit_status, :binary, :hide] ++ base_opts, "")
{initial, fun} = Collectable.into(into)
try do
do_port(Port.open(port_init, opts), initial, fun)
catch
kind, reason ->
fun.(initial, :halt)
:erlang.raise(kind, reason, __STACKTRACE__)
else
{acc, status} -> {fun.(acc, :done), status}
end
end
defp do_port(port, acc, fun) do
receive do
{^port, {:data, data}} ->
do_port(port, fun.(acc, {:cont, data}), fun)
{^port, {:exit_status, status}} ->
{acc, status}
end
end
defp cmd_opts([{:into, any} | t], opts, _into),
do: cmd_opts(t, opts, any)
defp cmd_opts([{:cd, bin} | t], opts, into) when is_binary(bin),
do: cmd_opts(t, [{:cd, bin} | opts], into)
defp cmd_opts([{:arg0, bin} | t], opts, into) when is_binary(bin),
do: cmd_opts(t, [{:arg0, bin} | opts], into)
defp cmd_opts([{:stderr_to_stdout, true} | t], opts, into),
do: cmd_opts(t, [:stderr_to_stdout | opts], into)
defp cmd_opts([{:stderr_to_stdout, false} | t], opts, into),
do: cmd_opts(t, opts, into)
defp cmd_opts([{:parallelism, bool} | t], opts, into) when is_boolean(bool),
do: cmd_opts(t, [{:parallelism, bool} | opts], into)
defp cmd_opts([{:env, enum} | t], opts, into),
do: cmd_opts(t, [{:env, validate_env(enum)} | opts], into)
defp cmd_opts([{key, val} | _], _opts, _into),
do: raise(ArgumentError, "invalid option #{inspect(key)} with value #{inspect(val)}")
defp cmd_opts([], opts, into),
do: {into, opts}
defp validate_env(enum) do
Enum.map(enum, fn
{k, nil} ->
{String.to_charlist(k), false}
{k, v} ->
{String.to_charlist(k), String.to_charlist(v)}
other ->
raise ArgumentError, "invalid environment key-value #{inspect(other)}"
end)
end
@doc """
Returns the current monotonic time in the `:native` time unit.
This time is monotonically increasing and starts in an unspecified
point in time.
Inlined by the compiler.
"""
@spec monotonic_time() :: integer
def monotonic_time do
:erlang.monotonic_time()
end
@doc """
Returns the current monotonic time in the given time unit.
This time is monotonically increasing and starts in an unspecified
point in time.
"""
@spec monotonic_time(time_unit) :: integer
def monotonic_time(unit) do
:erlang.monotonic_time(normalize_time_unit(unit))
end
@doc """
Returns the current system time in the `:native` time unit.
It is the VM view of the `os_time/0`. They may not match in
case of time warps although the VM works towards aligning
them. This time is not monotonic.
Inlined by the compiler.
"""
@spec system_time() :: integer
def system_time do
:erlang.system_time()
end
@doc """
Returns the current system time in the given time unit.
It is the VM view of the `os_time/0`. They may not match in
case of time warps although the VM works towards aligning
them. This time is not monotonic.
"""
@spec system_time(time_unit) :: integer
def system_time(unit) do
:erlang.system_time(normalize_time_unit(unit))
end
@doc """
Converts `time` from time unit `from_unit` to time unit `to_unit`.
The result is rounded via the floor function.
`convert_time_unit/3` accepts an additional time unit (other than the
ones in the `t:time_unit/0` type) called `:native`. `:native` is the time
unit used by the Erlang runtime system. It's determined when the runtime
starts and stays the same until the runtime is stopped, but could differ
the next time the runtime is started on the same machine. For this reason,
you should use this function to convert `:native` time units to a predictable
unit before you display them to humans.
To determine how many seconds the `:native` unit represents in your current
runtime, you can call this function to convert 1 second to the `:native`
time unit: `System.convert_time_unit(1, :second, :native)`.
"""
@spec convert_time_unit(integer, time_unit | :native, time_unit | :native) :: integer
def convert_time_unit(time, from_unit, to_unit) do
:erlang.convert_time_unit(time, normalize_time_unit(from_unit), normalize_time_unit(to_unit))
end
@doc """
Returns the current time offset between the Erlang VM monotonic
time and the Erlang VM system time.
The result is returned in the `:native` time unit.
See `time_offset/1` for more information.
Inlined by the compiler.
"""
@spec time_offset() :: integer
def time_offset do
:erlang.time_offset()
end
@doc """
Returns the current time offset between the Erlang VM monotonic
time and the Erlang VM system time.
The result is returned in the given time unit `unit`. The returned
offset, added to an Erlang monotonic time (for instance, one obtained with
`monotonic_time/1`), gives the Erlang system time that corresponds
to that monotonic time.
"""
@spec time_offset(time_unit) :: integer
def time_offset(unit) do
:erlang.time_offset(normalize_time_unit(unit))
end
@doc """
Returns the current operating system (OS) time.
The result is returned in the `:native` time unit.
This time may be adjusted forwards or backwards in time
with no limitation and is not monotonic.
Inlined by the compiler.
"""
@spec os_time() :: integer
@doc since: "1.3.0"
def os_time do
:os.system_time()
end
@doc """
Returns the current operating system (OS) time in the given time `unit`.
This time may be adjusted forwards or backwards in time
with no limitation and is not monotonic.
"""
@spec os_time(time_unit) :: integer
@doc since: "1.3.0"
def os_time(unit) do
:os.system_time(normalize_time_unit(unit))
end
@doc """
Returns the Erlang/OTP release number.
"""
@spec otp_release :: String.t()
@doc since: "1.3.0"
def otp_release do
:erlang.list_to_binary(:erlang.system_info(:otp_release))
end
@doc """
Returns the number of schedulers in the VM.
"""
@spec schedulers :: pos_integer
@doc since: "1.3.0"
def schedulers do
:erlang.system_info(:schedulers)
end
@doc """
Returns the number of schedulers online in the VM.
"""
@spec schedulers_online :: pos_integer
@doc since: "1.3.0"
def schedulers_online do
:erlang.system_info(:schedulers_online)
end
@doc """
Generates and returns an integer that is unique in the current runtime
instance.
"Unique" means that this function, called with the same list of `modifiers`,
will never return the same integer more than once on the current runtime
instance.
If `modifiers` is `[]`, then a unique integer (that can be positive or negative) is returned.
Other modifiers can be passed to change the properties of the returned integer:
* `:positive` - the returned integer is guaranteed to be positive.
* `:monotonic` - the returned integer is monotonically increasing. This
means that, on the same runtime instance (but even on different
processes), integers returned using the `:monotonic` modifier will always
be strictly less than integers returned by successive calls with the
`:monotonic` modifier.
All modifiers listed above can be combined; repeated modifiers in `modifiers`
will be ignored.
Inlined by the compiler.
"""
@spec unique_integer([:positive | :monotonic]) :: integer
def unique_integer(modifiers \\ []) do
:erlang.unique_integer(modifiers)
end
defp assert_no_null_byte!(binary, operation) do
case :binary.match(binary, "\0") do
{_, _} ->
raise ArgumentError,
"cannot execute #{operation} for program with null byte, got: #{inspect(binary)}"
:nomatch ->
binary
end
end
defp normalize_time_unit(:native), do: :native
defp normalize_time_unit(:second), do: :second
defp normalize_time_unit(:millisecond), do: :millisecond
defp normalize_time_unit(:microsecond), do: :microsecond
defp normalize_time_unit(:nanosecond), do: :nanosecond
defp normalize_time_unit(:seconds), do: warn(:seconds, :second)
defp normalize_time_unit(:milliseconds), do: warn(:milliseconds, :millisecond)
defp normalize_time_unit(:microseconds), do: warn(:microseconds, :microsecond)
defp normalize_time_unit(:nanoseconds), do: warn(:nanoseconds, :nanosecond)
defp normalize_time_unit(:milli_seconds), do: warn(:milli_seconds, :millisecond)
defp normalize_time_unit(:micro_seconds), do: warn(:micro_seconds, :microsecond)
defp normalize_time_unit(:nano_seconds), do: warn(:nano_seconds, :nanosecond)
defp normalize_time_unit(unit) when is_integer(unit) and unit > 0, do: unit
defp normalize_time_unit(other) do
raise ArgumentError,
"unsupported time unit. Expected :second, :millisecond, " <>
":microsecond, :nanosecond, or a positive integer, " <> "got #{inspect(other)}"
end
defp warn(unit, replacement_unit) do
IO.warn_once(
{__MODULE__, unit},
"deprecated time unit: #{inspect(unit)}. A time unit should be " <>
":second, :millisecond, :microsecond, :nanosecond, or a positive integer",
_stacktrace_drop_levels = 4
)
replacement_unit
end
end
| 30.171793 | 108 | 0.665208 |
0348ba60458f58f3d5b73829a90117674e8d5580 | 1,600 | ex | Elixir | test/support/apps/phx1_5/lib/phx1_5_web/endpoint.ex | nhphuc412/torch | d2145b9fdb889517c0a36a5367f0b116ee7a6844 | [
"MIT"
] | 528 | 2019-09-13T15:10:36.000Z | 2022-03-31T10:28:27.000Z | test/support/apps/phx1_5/lib/phx1_5_web/endpoint.ex | nhphuc412/torch | d2145b9fdb889517c0a36a5367f0b116ee7a6844 | [
"MIT"
] | 133 | 2019-09-13T17:46:59.000Z | 2022-03-01T13:37:10.000Z | test/support/apps/phx1_5/lib/phx1_5_web/endpoint.ex | nhphuc412/torch | d2145b9fdb889517c0a36a5367f0b116ee7a6844 | [
"MIT"
] | 38 | 2019-10-29T20:37:13.000Z | 2022-03-03T05:19:33.000Z | defmodule Phx15Web.Endpoint do
use Phoenix.Endpoint, otp_app: :phx1_5
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
@session_options [
store: :cookie,
key: "_phx1_5_key",
signing_salt: "LI9ono2V"
]
socket "/socket", Phx15Web.UserSocket,
websocket: true,
longpoll: false
socket "/live", Phoenix.LiveView.Socket, websocket: [connect_info: [session: @session_options]]
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :phx1_5,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
plug Phoenix.Ecto.CheckRepoStatus, otp_app: :phx1_5
end
plug Phoenix.LiveDashboard.RequestLogger,
param_key: "request_logger",
cookie_key: "request_logger"
plug Plug.RequestId
plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session, @session_options
plug Phx15Web.Router
end
| 29.090909 | 97 | 0.716875 |
0348bccb9c3563bfcfbd73a2b65a352a27438fc0 | 694 | ex | Elixir | lib/policr_mini_bot/consumer.ex | skyplaying/policr-mini | ac265daa251fd76b770d0ce08c67075a6a57f796 | [
"MIT"
] | null | null | null | lib/policr_mini_bot/consumer.ex | skyplaying/policr-mini | ac265daa251fd76b770d0ce08c67075a6a57f796 | [
"MIT"
] | 2 | 2022-02-25T06:15:30.000Z | 2022-02-25T06:15:33.000Z | lib/policr_mini_bot/consumer.ex | skyplaying/policr-mini | ac265daa251fd76b770d0ce08c67075a6a57f796 | [
"MIT"
] | null | null | null | defmodule PolicrMiniBot.Consumer do
@moduledoc """
消息更新的消费实现。
"""
use DynamicSupervisor
alias PolicrMiniBot.State
alias PolicrMini.Logger
def start_link(default \\ []) when is_list(default) do
DynamicSupervisor.start_link(__MODULE__, %{}, name: __MODULE__)
end
@impl true
def init(_) do
DynamicSupervisor.init(strategy: :one_for_one)
end
def receive(%Telegex.Model.Update{} = update) do
DynamicSupervisor.start_child(
__MODULE__,
{Task,
fn ->
try do
Telegex.Plug.Pipeline.call(update, %State{})
rescue
e -> Logger.error("Uncaught Error: #{inspect(e)}")
end
end}
)
end
end
| 20.411765 | 67 | 0.632565 |
0348c5bce75f33b9f94ac214582a98e3f650883b | 14,679 | ex | Elixir | lib/codes/codes_r29.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_r29.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_r29.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | defmodule IcdCode.ICDCode.Codes_R29 do
alias IcdCode.ICDCode
def _R290 do
%ICDCode{full_code: "R290",
category_code: "R29",
short_code: "0",
full_name: "Tetany",
short_name: "Tetany",
category_name: "Tetany"
}
end
def _R291 do
%ICDCode{full_code: "R291",
category_code: "R29",
short_code: "1",
full_name: "Meningismus",
short_name: "Meningismus",
category_name: "Meningismus"
}
end
def _R292 do
%ICDCode{full_code: "R292",
category_code: "R29",
short_code: "2",
full_name: "Abnormal reflex",
short_name: "Abnormal reflex",
category_name: "Abnormal reflex"
}
end
def _R293 do
%ICDCode{full_code: "R293",
category_code: "R29",
short_code: "3",
full_name: "Abnormal posture",
short_name: "Abnormal posture",
category_name: "Abnormal posture"
}
end
def _R294 do
%ICDCode{full_code: "R294",
category_code: "R29",
short_code: "4",
full_name: "Clicking hip",
short_name: "Clicking hip",
category_name: "Clicking hip"
}
end
def _R295 do
%ICDCode{full_code: "R295",
category_code: "R29",
short_code: "5",
full_name: "Transient paralysis",
short_name: "Transient paralysis",
category_name: "Transient paralysis"
}
end
def _R296 do
%ICDCode{full_code: "R296",
category_code: "R29",
short_code: "6",
full_name: "Repeated falls",
short_name: "Repeated falls",
category_name: "Repeated falls"
}
end
def _R29700 do
%ICDCode{full_code: "R29700",
category_code: "R29",
short_code: "700",
full_name: "NIHSS score 0",
short_name: "NIHSS score 0",
category_name: "NIHSS score 0"
}
end
def _R29701 do
%ICDCode{full_code: "R29701",
category_code: "R29",
short_code: "701",
full_name: "NIHSS score 1",
short_name: "NIHSS score 1",
category_name: "NIHSS score 1"
}
end
def _R29702 do
%ICDCode{full_code: "R29702",
category_code: "R29",
short_code: "702",
full_name: "NIHSS score 2",
short_name: "NIHSS score 2",
category_name: "NIHSS score 2"
}
end
def _R29703 do
%ICDCode{full_code: "R29703",
category_code: "R29",
short_code: "703",
full_name: "NIHSS score 3",
short_name: "NIHSS score 3",
category_name: "NIHSS score 3"
}
end
def _R29704 do
%ICDCode{full_code: "R29704",
category_code: "R29",
short_code: "704",
full_name: "NIHSS score 4",
short_name: "NIHSS score 4",
category_name: "NIHSS score 4"
}
end
def _R29705 do
%ICDCode{full_code: "R29705",
category_code: "R29",
short_code: "705",
full_name: "NIHSS score 5",
short_name: "NIHSS score 5",
category_name: "NIHSS score 5"
}
end
def _R29706 do
%ICDCode{full_code: "R29706",
category_code: "R29",
short_code: "706",
full_name: "NIHSS score 6",
short_name: "NIHSS score 6",
category_name: "NIHSS score 6"
}
end
def _R29707 do
%ICDCode{full_code: "R29707",
category_code: "R29",
short_code: "707",
full_name: "NIHSS score 7",
short_name: "NIHSS score 7",
category_name: "NIHSS score 7"
}
end
def _R29708 do
%ICDCode{full_code: "R29708",
category_code: "R29",
short_code: "708",
full_name: "NIHSS score 8",
short_name: "NIHSS score 8",
category_name: "NIHSS score 8"
}
end
def _R29709 do
%ICDCode{full_code: "R29709",
category_code: "R29",
short_code: "709",
full_name: "NIHSS score 9",
short_name: "NIHSS score 9",
category_name: "NIHSS score 9"
}
end
def _R29710 do
%ICDCode{full_code: "R29710",
category_code: "R29",
short_code: "710",
full_name: "NIHSS score 10",
short_name: "NIHSS score 10",
category_name: "NIHSS score 10"
}
end
def _R29711 do
%ICDCode{full_code: "R29711",
category_code: "R29",
short_code: "711",
full_name: "NIHSS score 11",
short_name: "NIHSS score 11",
category_name: "NIHSS score 11"
}
end
def _R29712 do
%ICDCode{full_code: "R29712",
category_code: "R29",
short_code: "712",
full_name: "NIHSS score 12",
short_name: "NIHSS score 12",
category_name: "NIHSS score 12"
}
end
def _R29713 do
%ICDCode{full_code: "R29713",
category_code: "R29",
short_code: "713",
full_name: "NIHSS score 13",
short_name: "NIHSS score 13",
category_name: "NIHSS score 13"
}
end
def _R29714 do
%ICDCode{full_code: "R29714",
category_code: "R29",
short_code: "714",
full_name: "NIHSS score 14",
short_name: "NIHSS score 14",
category_name: "NIHSS score 14"
}
end
def _R29715 do
%ICDCode{full_code: "R29715",
category_code: "R29",
short_code: "715",
full_name: "NIHSS score 15",
short_name: "NIHSS score 15",
category_name: "NIHSS score 15"
}
end
def _R29716 do
%ICDCode{full_code: "R29716",
category_code: "R29",
short_code: "716",
full_name: "NIHSS score 16",
short_name: "NIHSS score 16",
category_name: "NIHSS score 16"
}
end
def _R29717 do
%ICDCode{full_code: "R29717",
category_code: "R29",
short_code: "717",
full_name: "NIHSS score 17",
short_name: "NIHSS score 17",
category_name: "NIHSS score 17"
}
end
def _R29718 do
%ICDCode{full_code: "R29718",
category_code: "R29",
short_code: "718",
full_name: "NIHSS score 18",
short_name: "NIHSS score 18",
category_name: "NIHSS score 18"
}
end
def _R29719 do
%ICDCode{full_code: "R29719",
category_code: "R29",
short_code: "719",
full_name: "NIHSS score 19",
short_name: "NIHSS score 19",
category_name: "NIHSS score 19"
}
end
def _R29720 do
%ICDCode{full_code: "R29720",
category_code: "R29",
short_code: "720",
full_name: "NIHSS score 20",
short_name: "NIHSS score 20",
category_name: "NIHSS score 20"
}
end
def _R29721 do
%ICDCode{full_code: "R29721",
category_code: "R29",
short_code: "721",
full_name: "NIHSS score 21",
short_name: "NIHSS score 21",
category_name: "NIHSS score 21"
}
end
def _R29722 do
%ICDCode{full_code: "R29722",
category_code: "R29",
short_code: "722",
full_name: "NIHSS score 22",
short_name: "NIHSS score 22",
category_name: "NIHSS score 22"
}
end
def _R29723 do
%ICDCode{full_code: "R29723",
category_code: "R29",
short_code: "723",
full_name: "NIHSS score 23",
short_name: "NIHSS score 23",
category_name: "NIHSS score 23"
}
end
def _R29724 do
%ICDCode{full_code: "R29724",
category_code: "R29",
short_code: "724",
full_name: "NIHSS score 24",
short_name: "NIHSS score 24",
category_name: "NIHSS score 24"
}
end
def _R29725 do
%ICDCode{full_code: "R29725",
category_code: "R29",
short_code: "725",
full_name: "NIHSS score 25",
short_name: "NIHSS score 25",
category_name: "NIHSS score 25"
}
end
def _R29726 do
%ICDCode{full_code: "R29726",
category_code: "R29",
short_code: "726",
full_name: "NIHSS score 26",
short_name: "NIHSS score 26",
category_name: "NIHSS score 26"
}
end
def _R29727 do
%ICDCode{full_code: "R29727",
category_code: "R29",
short_code: "727",
full_name: "NIHSS score 27",
short_name: "NIHSS score 27",
category_name: "NIHSS score 27"
}
end
def _R29728 do
%ICDCode{full_code: "R29728",
category_code: "R29",
short_code: "728",
full_name: "NIHSS score 28",
short_name: "NIHSS score 28",
category_name: "NIHSS score 28"
}
end
def _R29729 do
%ICDCode{full_code: "R29729",
category_code: "R29",
short_code: "729",
full_name: "NIHSS score 29",
short_name: "NIHSS score 29",
category_name: "NIHSS score 29"
}
end
def _R29730 do
%ICDCode{full_code: "R29730",
category_code: "R29",
short_code: "730",
full_name: "NIHSS score 30",
short_name: "NIHSS score 30",
category_name: "NIHSS score 30"
}
end
def _R29731 do
%ICDCode{full_code: "R29731",
category_code: "R29",
short_code: "731",
full_name: "NIHSS score 31",
short_name: "NIHSS score 31",
category_name: "NIHSS score 31"
}
end
def _R29732 do
%ICDCode{full_code: "R29732",
category_code: "R29",
short_code: "732",
full_name: "NIHSS score 32",
short_name: "NIHSS score 32",
category_name: "NIHSS score 32"
}
end
def _R29733 do
%ICDCode{full_code: "R29733",
category_code: "R29",
short_code: "733",
full_name: "NIHSS score 33",
short_name: "NIHSS score 33",
category_name: "NIHSS score 33"
}
end
def _R29734 do
%ICDCode{full_code: "R29734",
category_code: "R29",
short_code: "734",
full_name: "NIHSS score 34",
short_name: "NIHSS score 34",
category_name: "NIHSS score 34"
}
end
def _R29735 do
%ICDCode{full_code: "R29735",
category_code: "R29",
short_code: "735",
full_name: "NIHSS score 35",
short_name: "NIHSS score 35",
category_name: "NIHSS score 35"
}
end
def _R29736 do
%ICDCode{full_code: "R29736",
category_code: "R29",
short_code: "736",
full_name: "NIHSS score 36",
short_name: "NIHSS score 36",
category_name: "NIHSS score 36"
}
end
def _R29737 do
%ICDCode{full_code: "R29737",
category_code: "R29",
short_code: "737",
full_name: "NIHSS score 37",
short_name: "NIHSS score 37",
category_name: "NIHSS score 37"
}
end
def _R29738 do
%ICDCode{full_code: "R29738",
category_code: "R29",
short_code: "738",
full_name: "NIHSS score 38",
short_name: "NIHSS score 38",
category_name: "NIHSS score 38"
}
end
def _R29739 do
%ICDCode{full_code: "R29739",
category_code: "R29",
short_code: "739",
full_name: "NIHSS score 39",
short_name: "NIHSS score 39",
category_name: "NIHSS score 39"
}
end
def _R29740 do
%ICDCode{full_code: "R29740",
category_code: "R29",
short_code: "740",
full_name: "NIHSS score 40",
short_name: "NIHSS score 40",
category_name: "NIHSS score 40"
}
end
def _R29741 do
%ICDCode{full_code: "R29741",
category_code: "R29",
short_code: "741",
full_name: "NIHSS score 41",
short_name: "NIHSS score 41",
category_name: "NIHSS score 41"
}
end
def _R29742 do
%ICDCode{full_code: "R29742",
category_code: "R29",
short_code: "742",
full_name: "NIHSS score 42",
short_name: "NIHSS score 42",
category_name: "NIHSS score 42"
}
end
def _R29810 do
%ICDCode{full_code: "R29810",
category_code: "R29",
short_code: "810",
full_name: "Facial weakness",
short_name: "Facial weakness",
category_name: "Facial weakness"
}
end
def _R29818 do
%ICDCode{full_code: "R29818",
category_code: "R29",
short_code: "818",
full_name: "Other symptoms and signs involving the nervous system",
short_name: "Other symptoms and signs involving the nervous system",
category_name: "Other symptoms and signs involving the nervous system"
}
end
def _R29890 do
%ICDCode{full_code: "R29890",
category_code: "R29",
short_code: "890",
full_name: "Loss of height",
short_name: "Loss of height",
category_name: "Loss of height"
}
end
def _R29891 do
%ICDCode{full_code: "R29891",
category_code: "R29",
short_code: "891",
full_name: "Ocular torticollis",
short_name: "Ocular torticollis",
category_name: "Ocular torticollis"
}
end
def _R29898 do
%ICDCode{full_code: "R29898",
category_code: "R29",
short_code: "898",
full_name: "Other symptoms and signs involving the musculoskeletal system",
short_name: "Other symptoms and signs involving the musculoskeletal system",
category_name: "Other symptoms and signs involving the musculoskeletal system"
}
end
def _R2990 do
%ICDCode{full_code: "R2990",
category_code: "R29",
short_code: "90",
full_name: "Unspecified symptoms and signs involving the nervous system",
short_name: "Unspecified symptoms and signs involving the nervous system",
category_name: "Unspecified symptoms and signs involving the nervous system"
}
end
def _R2991 do
%ICDCode{full_code: "R2991",
category_code: "R29",
short_code: "91",
full_name: "Unspecified symptoms and signs involving the musculoskeletal system",
short_name: "Unspecified symptoms and signs involving the musculoskeletal system",
category_name: "Unspecified symptoms and signs involving the musculoskeletal system"
}
end
end
| 28.228846 | 94 | 0.554602 |
0348e68697d4c8fbde8506d6960e795f7f689e6b | 1,387 | ex | Elixir | apps/cms/lib/partial/route_pdf.ex | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 42 | 2019-05-29T16:05:30.000Z | 2021-08-09T16:03:37.000Z | apps/cms/lib/partial/route_pdf.ex | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 872 | 2019-05-29T17:55:50.000Z | 2022-03-30T09:28:43.000Z | apps/cms/lib/partial/route_pdf.ex | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 12 | 2019-07-01T18:33:21.000Z | 2022-03-10T02:13:57.000Z | defmodule CMS.Partial.RoutePdf do
@moduledoc """
Represents metadata about all the pdfs we have associated with a route.
"""
import CMS.Helpers, only: [field_value: 2, parse_date: 2, parse_files: 2]
@enforce_keys [:path, :date_start]
defstruct [
:path,
:date_start,
date_end: nil,
link_text_override: nil
]
@type t :: %__MODULE__{
path: String.t(),
date_start: Date.t(),
date_end: Date.t(),
link_text_override: String.t()
}
@spec from_api(map) :: t
def from_api(data) do
%__MODULE__{
path:
data
|> parse_files("field_route_pdf")
|> List.first()
|> Map.get(:url)
|> URI.parse()
|> Map.get(:path),
date_start: parse_date(data, "field_pdf_date_start"),
date_end: parse_date(data, "field_pdf_date_end"),
link_text_override: field_value(data, "field_link_text_override")
}
end
@spec custom?(t) :: boolean
def custom?(%__MODULE__{link_text_override: text}) do
text != nil and text != ""
end
@spec started?(t, Date.t()) :: boolean
def started?(%__MODULE__{date_start: date_start}, date) do
Date.compare(date_start, date) != :gt
end
@spec outdated?(t, Date.t()) :: boolean
def outdated?(%__MODULE__{date_end: date_end}, date) do
date_end != nil && Date.compare(date_end, date) == :lt
end
end
| 25.685185 | 75 | 0.61788 |
034905be3c4789482633f9efa4c23c1555185efa | 1,526 | ex | Elixir | example/test/support/data_case.ex | BrianPhilips/ecto_nested_changeset | 4d16f2955c1e7c71e25685030f66aefeba5ce0fc | [
"MIT"
] | null | null | null | example/test/support/data_case.ex | BrianPhilips/ecto_nested_changeset | 4d16f2955c1e7c71e25685030f66aefeba5ce0fc | [
"MIT"
] | null | null | null | example/test/support/data_case.ex | BrianPhilips/ecto_nested_changeset | 4d16f2955c1e7c71e25685030f66aefeba5ce0fc | [
"MIT"
] | null | null | null | defmodule Nested.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use Nested.DataCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
alias Nested.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Nested.DataCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Nested.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Nested.Repo, {:shared, self()})
end
:ok
end
@doc """
A helper that transforms changeset errors into a map of messages.
assert {:error, changeset} = Accounts.create_user(%{password: "short"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Regex.replace(~r"%{(\w+)}", message, fn _, key ->
opts |> Keyword.get(String.to_existing_atom(key), key) |> to_string()
end)
end)
end
end
| 27.25 | 77 | 0.687418 |
034930a483b68448dcecb1deb313c5440acce02a | 74 | ex | Elixir | lib/battle_box_web/gettext.ex | GrantJamesPowell/battle_box | 301091955b68cd4672f6513d645eca4e3c4e17d0 | [
"Apache-2.0"
] | 2 | 2020-10-17T05:48:49.000Z | 2020-11-11T02:34:15.000Z | lib/battle_box_web/gettext.ex | FlyingDutchmanGames/battle_box | 301091955b68cd4672f6513d645eca4e3c4e17d0 | [
"Apache-2.0"
] | 3 | 2020-05-18T05:52:21.000Z | 2020-06-09T07:24:14.000Z | lib/battle_box_web/gettext.ex | FlyingDutchmanGames/battle_box | 301091955b68cd4672f6513d645eca4e3c4e17d0 | [
"Apache-2.0"
] | null | null | null | defmodule BattleBoxWeb.Gettext do
use Gettext, otp_app: :battle_box
end
| 18.5 | 35 | 0.810811 |
03495c0d6bc72423baf9039b42300ea852664299 | 1,109 | exs | Elixir | config/config.exs | tonic-sys/hlclock | 6721871937ecc2ae3e77444faf29a3da051e30c4 | [
"Apache-2.0"
] | 39 | 2018-02-12T14:28:18.000Z | 2021-09-01T07:40:39.000Z | config/config.exs | elixir-toniq/hlclock | 6721871937ecc2ae3e77444faf29a3da051e30c4 | [
"Apache-2.0"
] | 6 | 2017-08-20T04:19:08.000Z | 2017-09-21T18:27:47.000Z | config/config.exs | keathley/hlclock | 6721871937ecc2ae3e77444faf29a3da051e30c4 | [
"Apache-2.0"
] | 4 | 2018-03-29T14:28:14.000Z | 2021-01-08T11:47:58.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :hlc, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:hlc, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 35.774194 | 73 | 0.749324 |
03495d1cc5cbe382f03dadeabaeb2c4866134e05 | 58,146 | ex | Elixir | lib/mix/lib/mix/tasks/release.ex | sega-yarkin/elixir | d588716d9f20b6bdd956a158d3a89e2815f14a98 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/release.ex | sega-yarkin/elixir | d588716d9f20b6bdd956a158d3a89e2815f14a98 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/release.ex | sega-yarkin/elixir | d588716d9f20b6bdd956a158d3a89e2815f14a98 | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.Release do
use Mix.Task
@shortdoc "Assembles a self-contained release"
@moduledoc """
Assembles a self-contained release for the current project:
$ MIX_ENV=prod mix release
$ MIX_ENV=prod mix release NAME
Once a release is assembled, it can be packaged and deployed to a
target, as long as the target runs on the same operating system (OS)
distribution and version as the machine running the `mix release`
command.
A release can be configured in your `mix.exs` file under the `:releases`
key inside `def project`:
def project do
[
releases: [
demo: [
include_executables_for: [:unix],
applications: [runtime_tools: :permanent]
],
...
]
]
end
You can specify multiple releases where the key is the release name
and the value is a keyword list with the release configuration.
Releasing a certain name is done with:
$ MIX_ENV=prod mix release demo
If the given name does not exist, an error is raised.
If `mix release`, without a name, is invoked and there are multiple names,
an error will be raised unless you set `default_release: NAME` at the root
of your project configuration.
If `mix release` is invoked and there are no names, a release using the
application name and default values is assembled.
## Why releases?
Releases allow developers to precompile and package all of their code
and the runtime into a single unit. The benefits of releases are:
* Code preloading. The VM has two mechanisms for loading code:
interactive and embedded. By default, it runs in the interactive
mode which dynamically loads modules when they are used for the
first time. The first time your application calls `Enum.map/2`,
the VM will find the `Enum` module and load it. There's a downside:
when you start a new server in production, it may need to load
many other modules, causing the first requests to have an unusual
spike in response time. With releases, the system is configured in
interactive mode and then it swaps to embedded mode, which preloads
all modules and guarantees your system is ready to handle requests
after booting.
* Configuration and customization. Releases give developers fine
grained control over system configuration and the VM flags used
to start the system.
* Self-contained. A release does not require the source code to be
included in your production artifacts. All of the code is precompiled
and packaged. Releases do not even require Erlang or Elixir in your
servers, as it includes the Erlang VM and its runtime by default.
Furthermore, both Erlang and Elixir standard libraries are stripped
to bring only the parts you are actually using.
* Multiple releases. You can assemble different releases with
different configuration per application or even with different
applications altogether.
* Management scripts. Releases come with scripts to start, restart,
connect to the running system remotely, execute RPC calls, run as
daemon, run as a Windows service, and more.
## Running the release
Once a release is assembled, you can start it by calling
`bin/RELEASE_NAME start` inside the release. In production, you would do:
$ MIX_ENV=prod mix release
$ _build/prod/rel/my_app/bin/my_app start
`bin/my_app start` will start the system connected to the current standard
input/output, where logs are also written to by default. This is the
preferred way to run the system. Many tools, such as `systemd`, platforms
as a service, such as Heroku, and many containers platforms, such as Docker,
are capable of processing the standard input/output and redirecting
the log contents elsewhere. Those tools and platforms also take care
of restarting the system in case it crashes.
You can also execute one-off commands, run the release as a daemon on
Unix-like system, or install it as a service on Windows. We will take a
look at those next. You can also list all available commands by invoking
`bin/RELEASE_NAME`.
### One-off commands (eval and rpc)
If you want to invoke specific modules and functions in your release,
you can do so in two ways: using `eval` or `rpc`.
$ bin/RELEASE_NAME eval "IO.puts(:hello)"
$ bin/RELEASE_NAME rpc "IO.puts(:hello)"
The `eval` command starts its own instance of the VM but without
starting any of the applications in the release and without starting
distribution. For example, if you need to do some prep work before
running the actual system, like migrating your database, `eval` can
be a good fit. Just keep in mind any application you may use during
eval has to be explicitly loaded and/or started.
You can start an application by calling `Application.ensure_all_started/1`.
However, if for some reason you cannot start an application, maybe
because it will run other services you do not want, you must at least
load the application by calling `Application.load/1`. If you don't
load the application, any attempt at reading its environment or
configuration may fail. Note that if you start an application,
it is automatically loaded before started.
Another way to run commands is with `rpc`, which will connect to the
system currently running and instruct it to execute the given
expression. This means you need to guarantee the system was already
started and be careful with the instructions you are executing.
You can also use `remote` to connect a remote IEx session to the
system.
#### Helper module
As you operate your system, you may find yourself running some piece of code
as a one-off command quite often. You may consider creating a module to group
these tasks:
# lib/my_app/release_tasks.ex
defmodule MyApp.ReleaseTasks do
def eval_purge_stale_data() do
# Eval commands needs to start the app before
# Or Application.load(:my_app) if you can't start it
Application.ensure_all_started(:my_app)
# Code that purges stale data
...
end
def rpc_print_connected_users() do
# Code that print users connected to the current running system
...
end
end
In the example above, we prefixed the function names with the command
name used to execute them, but that is entirely optional.
And to run them:
$ bin/RELEASE_NAME eval "MyApp.ReleaseTasks.eval_purge_stale_data()"
$ bin/RELEASE_NAME rpc "MyApp.ReleaseTasks.rpc_print_connected_users()"
### Daemon mode (Unix-like)
You can run the release in daemon mode with the command:
$ bin/RELEASE_NAME daemon
In daemon mode, the system is started on the background via
[`run_erl`](https://www.erlang.org/doc/man/run_erl.html). You may also
want to enable [`heart`](https://www.erlang.org/doc/man/heart.html)
in daemon mode so it automatically restarts the system in case
of crashes. See the generated `releases/RELEASE_VSN/env.sh` file.
The daemon will write all of its standard output to the "tmp/log/"
directory in the release root. You can watch the log file by doing
`tail -f tmp/log/erlang.log.1` or similar. Once files get too large,
the index suffix will be incremented. A developer can also attach
to the standard input of the daemon by invoking "to_erl tmp/pipe/"
from the release root. However, note that attaching to the system
should be done with extreme care, since the usual commands for
exiting an Elixir system, such as hitting Ctrl+C twice or Ctrl+\\,
will actually shut down the daemon. Therefore, using
`bin/RELEASE_NAME remote` should be preferred, even in daemon mode.
You can customize the tmp directory used both for logging and for
piping in daemon mode by setting the `RELEASE_TMP` environment
variable. See the "Customization" section.
### Services mode (Windows)
While daemons are not available on Windows, it is possible to install a
released system as a service on Windows with the help of
[`erlsrv`](https://www.erlang.org/doc/man/erlsrv.html). This can be done by
running:
$ bin/RELEASE_NAME install
Once installed, the service must be explicitly managed via the `erlsrv`
executable, which is included in the `erts-VSN/bin` directory.
The service is not started automatically after installing.
For example, if you have a release named `demo`, you can install
the service and then start it from the release root as follows:
$ bin/demo install
$ erts-VSN/bin/erlsrv.exe start demo_demo
The name of the service is `demo_demo` because the name is built
by concatenating the node name with the release name. Since Elixir
automatically uses the same name for both, the service will be
referenced as `demo_demo`.
The `install` command must be executed as an administrator.
### `bin/RELEASE_NAME` commands
The following commands are supported by `bin/RELEASE_NAME`:
```text
start Starts the system
start_iex Starts the system with IEx attached
daemon Starts the system as a daemon (Unix-like only)
daemon_iex Starts the system as a daemon with IEx attached (Unix-like only)
install Installs this system as a Windows service (Windows only)
eval "EXPR" Executes the given expression on a new, non-booted system
rpc "EXPR" Executes the given expression remotely on the running system
remote Connects to the running system via a remote shell
restart Restarts the running system via a remote command
stop Stops the running system via a remote command
pid Prints the operating system PID of the running system via a remote command
version Prints the release name and version to be booted
```
## Deployments
### Requirements
A release is built on a **host**, a machine which contains Erlang, Elixir,
and any other dependencies needed to compile your application. A release is
then deployed to a **target**, potentially the same machine as the host,
but usually separate, and often there are many targets (either multiple
instances, or the release is deployed to heterogeneous environments).
To deploy straight from a host to a separate target without cross-compilation,
the following must be the same between the host and the target:
* Target architecture (for example, x86_64 or ARM)
* Target vendor + operating system (for example, Windows, Linux, or Darwin/macOS)
* Target ABI (for example, musl or gnu)
This is often represented in the form of target triples, for example,
`x86_64-unknown-linux-gnu`, `x86_64-unknown-linux-musl`, `x86_64-apple-darwin`.
So to be more precise, to deploy straight from a host to a separate target,
the Erlang Runtime System (ERTS), and any native dependencies (NIFs), must
be compiled for the same target triple. If you are building on a MacBook
(`x86_64-apple-darwin`) and trying to deploy to a typical Ubuntu machine
(`x86_64-unknown-linux-gnu`), the release will not work. Instead you should
build the release on a `x86_64-unknown-linux-gnu` host. As we will see, this
can be done in multiple ways, such as releasing on the target itself, or by
using virtual machines or containers, usually as part of your release pipeline.
In addition to matching the target triple, it is also important that the
target has all of the system packages that your application will need at
runtime. A common one is the need for OpenSSL when building an application
that uses `:crypto` or `:ssl`, which is dynamically linked to ERTS. The other
common source for native dependencies like this comes from dependencies
containing NIFs (natively-implemented functions) which may expect to
dynamically link to libraries they use.
Of course, some operating systems and package managers can differ between
versions, so if your goal is to have full compatibility between host and
target, it is best to ensure the operating system and system package manager
have the same versions on host and target. This may even be a requirement in
some systems, especially so with package managers that try to create fully
reproducible environments (Nix, Guix).
Similarly, when creating a stand-alone package and release for Windows, note
the Erlang Runtime System has a dependency to some Microsoft libraries
(Visual C++ Redistributable Packages for Visual Studio 2013). These libraries
are installed (if not present before) when Erlang is installed but it is not
part of the standard Windows environment. Deploying a stand-alone release on
a computer without these libraries will result in a failure when trying to
run the release. One way to solve this is to download and install these
Microsoft libraries the first time a release is deployed (the Erlang installer
version 10.6 ships with “Microsoft Visual C++ 2013 Redistributable - 12.0.30501”).
Alternatively, you can also bundle the compiled object files in the release,
as long as they were compiled for the same target. If doing so, you need to
update `LD_LIBRARY_PATH` environment variable with the paths containing the
bundled objects on Unix-like systems or the `PATH` environment variable on
Windows systems.
Currently, there is no official way to cross-compile a release from one
target triple to another, due to the complexities involved in the process.
### Techniques
There are a couple of ways to guarantee that a release is built on a host with
the same properties as the target. A simple option is to fetch the source,
compile the code and assemble the release on the target itself. It would
be something like this:
$ git clone remote://path/to/my_app.git my_app_source
$ cd my_app_source
$ mix deps.get --only prod
$ MIX_ENV=prod mix release
$ _build/prod/rel/my_app/bin/my_app start
If you prefer, you can also compile the release to a separate directory,
so you can erase all source after the release is assembled:
$ git clone remote://path/to/my_app.git my_app_source
$ cd my_app_source
$ mix deps.get --only prod
$ MIX_ENV=prod mix release --path ../my_app_release
$ cd ../my_app_release
$ rm -rf ../my_app_source
$ bin/my_app start
However, this option can be expensive if you have multiple production
nodes or if the release assembling process is a long one, as each node
needs to individually assemble the release.
You can automate this process in a couple different ways. One option
is to make it part of your Continuous Integration (CI) / Continuous
Deployment (CD) pipeline. When you have a CI/CD pipeline, it is common
that the machines in your CI/CD pipeline run on the exact same target
triple as your production servers (if they don't, they should).
In this case, you can assemble the release at the end of your CI/CD
pipeline by calling `MIX_ENV=prod mix release` and push the artifact
to S3 or any other network storage. To perform the deployment, your
production machines can fetch the deployment from the network storage
and run `bin/my_app start`.
Another mechanism to automate deployments is to use images, such as
Amazon Machine Images, or container platforms, such as Docker.
For instance, you can use Docker to run locally a system with the
exact same target triple as your production servers. Inside the
container, you can invoke `MIX_ENV=prod mix release` and build
a complete image and/or container with the operating system, all
dependencies as well as the releases.
In other words, there are multiple ways systems can be deployed and
releases can be automated and incorporated into all of them as long
as you remember to build the system in the same target triple.
Once a system is deployed, shutting down the system can be done by
sending SIGINT/SIGTERM to the system, which is what most containers,
platforms and tools do, or by explicitly invoking `bin/RELEASE_NAME stop`.
Once the system receives the shutdown request, each application and
their respective supervision trees will stop, one by one, in the
opposite order that they were started.
## Customization
There are a couple ways in which developers can customize the generated
artifacts inside a release.
### Options
The following options can be set inside your `mix.exs` on each release definition:
* `:applications` - a keyword list with application names as keys and their
mode as value. By default `:applications` includes the current application and
all applications the current application depends on, recursively. You can include
new applications or change the mode of existing ones by listing them here.
The order of the applications given will be preserved as much as possible, with
only `:kernel`, `:stdlib`, `:sasl`, and `:elixir` listed before the given application
list. The supported values are:
* `:permanent` (default) - the application is started and the node shuts down
if the application terminates, regardless of reason
* `:transient` - the application is started and the node shuts down
if the application terminates abnormally
* `:temporary` - the application is started and the node does not
shut down if the application terminates
* `:load` - the application is only loaded
* `:none` - the application is part of the release but it is neither
loaded nor started
* `:strip_beams` - controls if BEAM files should have their debug information,
documentation chunks, and other non-essential metadata removed. Defaults to
`true`. May be set to `false` to disable stripping. Also accepts
`[keep: ["Docs", "Dbgi"]]` to keep certain chunks that are usually stripped.
You can also set the `:compress` option to true to enable individual
compression of BEAM files, although it is typically preferred to compress
the whole release instead.
* `:cookie` - a string representing the Erlang Distribution cookie. If this
option is not set, a random cookie is written to the `releases/COOKIE` file
when the first release is assembled. At runtime, we will first attempt
to fetch the cookie from the `RELEASE_COOKIE` environment variable and
then we'll read the `releases/COOKIE` file.
If you are setting this option manually, we recommend the cookie option
to be a long and randomly generated string, such as:
`Base.url_encode64(:crypto.strong_rand_bytes(40))`. We also recommend to restrict
the characters in the cookie to the subset returned by `Base.url_encode64/1`.
* `:validate_compile_env` - by default a release will match all runtime
configuration against any configuration that was marked at compile time
in your application of its dependencies via the `Application.compile_env/3`
function. If there is a mismatch between those, it means your system is
misconfigured and unable to boot. You can disable this check by setting
this option to false.
* `:path` - the path the release should be installed to.
Defaults to `"_build/MIX_ENV/rel/RELEASE_NAME"`.
* `:version` - the release version as a string or `{:from_app, app_name}`.
Defaults to the current application version. The `{:from_app, app_name}` format
can be used to easily reference the application version from another application.
This is particularly useful in umbrella applications.
* `:quiet` - a boolean that controls if releases should write steps to
the standard output. Defaults to `false`.
* `:include_erts` - a boolean, string, or anonymous function of arity zero.
If a boolean, it indicates whether the Erlang Runtime System (ERTS), which
includes the Erlang VM, should be included in the release. The default is
`true`, which is also the recommended value. If a string, it represents
the path to an existing ERTS installation. If an anonymous function of
arity zero, it's a function that returns any of the above (boolean or string).
You may also set this option to `false` if you desire to use the ERTS version installed
on the target. Note, however, that the ERTS version on the target must have **the
exact version** as the ERTS version used when the release is assembled. Setting it to
`false` also disables hot code upgrades. Therefore, `:include_erts` should be
set to `false` with caution and only if you are assembling the release on the
same server that runs it.
* `:include_executables_for` - a list of atoms detailing for which Operating
Systems executable files should be generated for. By default, it is set to
`[:unix, :windows]`. You can customize those as follows:
releases: [
demo: [
include_executables_for: [:unix] # Or [:windows] or []
]
]
* `:rel_templates_path` - the path to find template files that are copied to
the release, such as "vm.args.eex", "remote.vm.args.eex", "env.sh.eex"
(or "env.bat.eex"), and "overlays". Defaults to "rel" in the project root.
* `:overlays` - a list of directories with extra files to be copied
as is to the release. The "overlays" directory at `:rel_templates_path`
is always included in this list by default (typically at "rel/overlays").
See the "Overlays" section for more information.
* `:steps` - a list of steps to execute when assembling the release. See
the "Steps" section for more information.
* `:skip_mode_validation_for` - a list of application names
(atoms) specifying applications to skip strict validation of
"unsafe" modes. An "unsafe" case is when a parent application
mode is `:permanent` but one of the applications it depends on
is set to `:load`. Use this with care, as a release with
invalid modes may no longer boot without additional tweaks.
Defaults to `[]`.
Note each release definition can be given as an anonymous function. This
is useful if some release attributes are expensive to compute:
releases: [
demo: fn ->
[version: @version <> "+" <> git_ref()]
end
]
Besides the options above, it is possible to customize the generated
release with custom files, by tweaking the release steps or by running
custom options and commands on boot. We will detail both approaches next.
### Overlays
Often it is necessary to copy extra files to the release root after
the release is assembled. This can be easily done by placing such
files in the `rel/overlays` directory. Any file in there is copied
as is to the release root. For example, if you have placed a
"rel/overlays/Dockerfile" file, the "Dockerfile" will be copied as
is to the release root.
If you want to specify extra overlay directories, you can do so
with the `:overlays` option. If you need to copy files dynamically,
see the "Steps" section.
### Steps
It is possible to add one or more steps before and after the release is
assembled. This can be done with the `:steps` option:
releases: [
demo: [
steps: [&set_configs/1, :assemble, ©_extra_files/1]
]
]
The `:steps` option must be a list and it must always include the
atom `:assemble`, which does most of the release assembling. You
can pass anonymous functions before and after the `:assemble` to
customize your release assembling pipeline. Those anonymous functions
will receive a `Mix.Release` struct and must return the same or
an updated `Mix.Release` struct. It is also possible to build a tarball
of the release by passing the `:tar` step anywhere after `:assemble`.
If the release `:path` is not configured, the tarball is created in
`_build/MIX_ENV/RELEASE_NAME-RELEASE_VSN.tar.gz` Otherwise it is
created inside the configured `:path`.
See `Mix.Release` for more documentation on the struct and which
fields can be modified. Note that the `:steps` field itself can be
modified and it is updated every time a step is called. Therefore,
if you need to execute a command before and after assembling the
release, you only need to declare the first steps in your pipeline
and then inject the last step into the release struct. The steps
field can also be used to verify if the step was set before or
after assembling the release.
### vm.args and env.sh (env.bat)
Developers may want to customize the VM flags and environment variables
given when the release starts. The simplest way to customize those files
is by running `mix release.init`. The Mix task will copy custom
`rel/vm.args.eex`, `rel/remote.vm.args.eex`, `rel/env.sh.eex`, and
`rel/env.bat.eex` files to your project root. You can modify those files
and they will be evaluated every time you perform a new release. Those
files are regular EEx templates and they have a single assign, called
`@release`, with the `Mix.Release` struct.
The `vm.args` and `remote.vm.args` files may contain any of the VM flags
accepted by the [`erl` command](https://www.erlang.org/doc/man/erl.html).
The `env.sh` and `env.bat` is used to set environment variables.
In there, you can set vars such as `RELEASE_NODE`, `RELEASE_COOKIE`,
and `RELEASE_TMP` to customize your node name, cookie and tmp
directory respectively. Whenever `env.sh` or `env.bat` is invoked,
the variables `RELEASE_ROOT`, `RELEASE_NAME`, `RELEASE_VSN`, and
`RELEASE_COMMAND` have already been set, so you can rely on them.
See the section on environment variables for more information.
Furthermore, while the `vm.args` files are static, you can use
`env.sh` and `env.bat` to dynamically set VM options. For example,
if you want to make sure the Erlang Distribution listens only on
a given port known at runtime, you can set the following:
```bash
case $RELEASE_COMMAND in
start*|daemon*)
ELIXIR_ERL_OPTIONS="-kernel inet_dist_listen_min $BEAM_PORT inet_dist_listen_max $BEAM_PORT"
export ELIXIR_ERL_OPTIONS
;;
*)
;;
esac
```
Note we only set the port on start/daemon commands. If you also limit
the port on other commands, such as `rpc`, then you will be unable
to establish a remote connection as the port will already be in use
by the node.
On Windows, your `env.bat` would look like this:
```bash
IF NOT %RELEASE_COMMAND:start=%==%RELEASE_COMMAND% (
set ELIXIR_ERL_OPTIONS="-kernel inet_dist_listen_min %BEAM_PORT% inet_dist_listen_max %BEAM_PORT%"
)
```
## Application configuration
Mix provides two mechanisms for configuring the application environment
of your application and your dependencies: build-time and runtime. On this
section, we will learn how those mechanisms apply to releases. An introduction
to this topic can be found in the "Configuration" section of the `Mix` module.
### Build-time configuration
Whenever you invoke a `mix` command, Mix loads the configuration in
`config/config.exs`, if said file exists. We say that this configuration
is a build-time configuration as it is evaluated whenever you compile your
code or whenever you assemble the release.
In other words, if your configuration does something like:
import Config
config :my_app, :secret_key, System.fetch_env!("MY_APP_SECRET_KEY")
The `:secret_key` key under `:my_app` will be computed on the
host machine, whenever the release is built. Therefore if the machine
assembling the release not have access to all environment variables used
to run your code, loading the configuration will fail as the environment
variable is missing. Luckily, Mix also provides runtime configuration,
which should be preferred and we will see next.
### Runtime configuration
To enable runtime configuration in your release, all you need to do is
to create a file named `config/runtime.exs`:
import Config
config :my_app, :secret_key, System.fetch_env!("MY_APP_SECRET_KEY")
This file will be executed whenever your Mix project or your release
starts.
Your `config/runtime.exs` file needs to follow three important rules:
* It MUST `import Config` at the top instead of the deprecated `use Mix.Config`
* It MUST NOT import any other configuration file via `import_config`
* It MUST NOT access `Mix` in any way, as `Mix` is a build tool and
it is not available inside releases
If a `config/runtime.exs` exists, it will be copied to your release
and executed early in the boot process, when only Elixir and Erlang's
main applications have been started. Once the configuration is loaded,
the Erlang system will be restarted (within the same Operating System
process) and the new configuration will take place.
You can change the path to the runtime configuration file by setting
`:runtime_config_path` inside each release configuration. This path is
resolved at build time as the given configuration file is always copied
to inside the release:
releases: [
demo: [
runtime_config_path: ...
]
]
Finally, in order for runtime configuration to work properly (as well
as any other "Config provider" as defined next), it needs to be able
to persist the newly computed configuration to disk. The computed config
file will be written to "tmp" directory inside the release every time
the system boots. You can configure the "tmp" directory by setting the
`RELEASE_TMP` environment variable, either explicitly or inside your
`releases/RELEASE_VSN/env.sh` (or `env.bat` on Windows).
### Config providers
Releases also supports custom mechanisms, called config providers, to load
any sort of runtime configuration to the system while it boots. For instance,
if you need to access a vault or load configuration from a JSON file, it can
be achieved with config providers. The runtime configuration outlined in the
previous section is handled by the `Config.Reader` provider. See the
`Config.Provider` module for more information and more examples.
The following options can be set inside your releases key in your `mix.exs`
to control how config providers work:
* `:reboot_system_after_config` - every time your release is configured,
the system is rebooted to allow the new configuration to take place.
You can set this option to `false` to disable the rebooting for applications
that are sensitive to boot time but, in doing so, note you won't be able
to configure system applications, such as `:kernel` and `:stdlib`.
Defaults to `true` if using the deprecated `config/releases.exs`,
`false` otherwise.
* `:prune_runtime_sys_config_after_boot` - if `:reboot_system_after_config`
is set, every time your system boots, the release will write a config file
to your tmp directory. These configuration files are generally small.
But if you are concerned with disk space or if you have other restrictions,
you can ask the system to remove said config files after boot. The downside
is that you will no longer be able to restart the system internally (neither
via `System.restart/0` nor `bin/RELEASE_NAME restart`). If you need a restart,
you will have to terminate the Operating System process and start a new
one. Defaults to `false`.
* `:start_distribution_during_config` - if `:reboot_system_after_config` is
set, releases only start the Erlang VM distribution features after the config
files are evaluated. You can set it to `true` if you need distribution during
configuration. Defaults to `false`.
* `:config_providers` - a list of tuples with custom config providers.
See `Config.Provider` for more information. Defaults to `[]`.
### Customization and configuration summary
Generally speaking, the following files are available for customizing
and configuring the running system:
* `config/config.exs` (and `config/prod.exs`) - provides build-time
application configuration, which are executed when the release is
assembled
* `config/runtime.exs` - provides runtime application configuration.
It is executed every time your Mix project or your release boots
and is further extensible via config providers. If you want to
detect you are inside a release, you can check for release specific
environment variables, such as `RELEASE_NODE` or `RELEASE_MODE`
* `rel/vm.args.eex` and `rel/remote.vm.args.eex` - template files that
are copied into every release and provides static configuration of the
Erlang Virtual Machine and other runtime flags. `vm.args` runs on
`start`, `daemon`, and `eval` commands. `remote.vm.args` configures
the VM for `remote` and `rpc` commands
* `rel/env.sh.eex` and `rel/env.bat.eex` - template files that are copied
into every release and are executed on every command to set up environment
variables, including specific ones to the VM, and the general environment
## Directory structure
A release is organized as follows:
```text
bin/
RELEASE_NAME
erts-ERTS_VSN/
lib/
APP_NAME-APP_VSN/
ebin/
include/
priv/
releases/
RELEASE_VSN/
consolidated/
elixir
elixir.bat
env.bat
env.sh
iex
iex.bat
remote.vm.args
runtime.exs
start.boot
start.script
start_clean.boot
start_clean.script
sys.config
vm.args
COOKIE
start_erl.data
tmp/
```
We document this structure for completeness. In practice, developers
should not modify any of those files after the release is assembled.
Instead use env scripts, custom config provider, overlays, and all
other mechanisms described in this guide to configure how your release
works.
## Environment variables
The system sets different environment variables. The following variables
are set early on and can only be read by `env.sh` and `env.bat`:
* `RELEASE_ROOT` - points to the root of the release. If the system
includes ERTS, then it is the same as `:code.root_dir/0`. This
variable is always computed and it cannot be set to a custom value
* `RELEASE_COMMAND` - the command given to the release, such as `"start"`,
`"remote"`, `"eval"`, and so on. This is typically accessed inside `env.sh`
and `env.bat` to set different environment variables under different
conditions. Note, however, that `RELEASE_COMMAND` has not been
validated by the time `env.sh` and `env.bat` are called, so it may
be empty or contain invalid values. This variable is always computed
and it cannot be set to a custom value
* `RELEASE_NAME` - the name of the release. It can be set to a custom
value when invoking the release
* `RELEASE_VSN` - the version of the release, otherwise the latest
version is used. It can be set to a custom value when invoking the
release. The custom value must be an existing release version in
the `releases/` directory
* `RELEASE_PROG` - the command line executable used to start the release
The following variables can be set before you invoke the release or
inside `env.sh` and `env.bat`:
* `RELEASE_COOKIE` - the release cookie. By default uses the value
in `releases/COOKIE`. It can be set to a custom value
* `RELEASE_NODE` - the release node name, in the format `name` or
optionally `name@host` if running in distributed mode. It can be
set to a custom value. The name part must be made only of letters,
digits, underscores, and hyphens
* `RELEASE_SYS_CONFIG` - the location of the sys.config file. It can
be set to a custom path and it must not include the `.config` extension
* `RELEASE_VM_ARGS` - the location of the vm.args file. It can be set
to a custom path
* `RELEASE_REMOTE_VM_ARGS` - the location of the remote.vm.args file.
It can be set to a custom path
* `RELEASE_TMP` - the directory in the release to write temporary
files to. It can be set to a custom directory. It defaults to
`$RELEASE_ROOT/tmp`
* `RELEASE_MODE` - if the release should load code on demand (interactive)
or preload it (embedded). Defaults to "embedded", which increases boot
time but it means the runtime will respond faster as it doesn't have to
load code. Choose interactive if you need to decrease boot time and reduce
memory usage on boot. It applies only to start/daemon/install commands
* `RELEASE_DISTRIBUTION` - how do we want to run the distribution.
May be `name` (long names), `sname` (short names) or `none`
(distribution is not started automatically). Defaults to
`sname` which allows access only within the current system.
`name` allows external connections
* `RELEASE_BOOT_SCRIPT` - the name of the boot script to use when starting
the release. This script is used when running commands such as `start` and
`daemon`. The boot script is expected to be located at the
path `releases/RELEASE_VSN/RELEASE_BOOT_SCRIPT.boot`. Defaults to `start`
* `RELEASE_BOOT_SCRIPT_CLEAN` - the name of the boot script used when
starting the release clean, without your application or its dependencies.
This script is used by commands such as `eval`, `rpc`, and `remote`.
The boot script is expected to be located at the path
`releases/RELEASE_VSN/RELEASE_BOOT_SCRIPT_CLEAN.boot`. Defaults
to `start_clean`
## Umbrellas
Releases are well integrated with umbrella projects, allowing you to
release one or more subsets of your umbrella children. The only difference
between performing a release in the umbrella project compared to a
regular application is that umbrellas require you to explicitly list
your release and the starting point for each release. For example,
imagine this umbrella applications:
```text
my_app_umbrella/
apps/
my_app_core/
my_app_event_processing/
my_app_web/
```
where both `my_app_event_processing` and `my_app_web` depend on
`my_app_core` but they do not depend on each other.
Inside your umbrella, you can define multiple releases:
releases: [
web_and_event_processing: [
applications: [
my_app_event_processing: :permanent,
my_app_web: :permanent
]
],
web_only: [
applications: [my_app_web: :permanent]
],
event_processing_only: [
applications: [my_app_event_processing: :permanent]
]
]
Note you don't need to define all applications in `:applications`,
only the entry points. Also remember that the recommended mode
for all applications in the system is `:permanent`.
Finally, keep in mind it is not required for you to assemble the
release from the umbrella root. You can also assemble the release
from each child application individually. Doing it from the root,
however, allows you to include two applications that do not depend
on each other as part of the same release.
## Hot Code Upgrades
Erlang and Elixir are sometimes known for the capability of upgrading
a node that is running in production without shutting down that node.
However, this feature is not supported out of the box by Elixir releases.
The reason we don't provide hot code upgrades is because they are very
complicated to perform in practice, as they require careful coding of
your processes and applications as well as extensive testing. Given most
teams can use other techniques that are language agnostic to upgrade
their systems, such as Blue/Green deployments, Canary deployments,
Rolling deployments, and others, hot upgrades are rarely a viable
option. Let's understand why.
In a hot code upgrade, you want to update a node from version A to
version B. To do so, the first step is to write recipes for every application
that changed between those two releases, telling exactly how the application
changed between versions, those recipes are called `.appup` files.
While some of the steps in building `.appup` files can be automated,
not all of them can. Furthermore, each process in the application needs
to be explicitly coded with hot code upgrades in mind. Let's see an example.
Imagine your application has a counter process as a GenServer:
defmodule Counter do
use GenServer
def start_link(_) do
GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
end
def bump do
GenServer.call(__MODULE__, :bump)
end
## Callbacks
def init(:ok) do
{:ok, 0}
end
def handle_call(:bump, counter) do
{:reply, :ok, counter + 1}
end
end
You add this process as part of your supervision tree and ship version
0.1.0 of your system. Now let's imagine that on version 0.2.0 you added
two changes: instead of `bump/0`, that always increments the counter by
one, you introduce `bump/1` that passes the exact value to bump the
counter. You also change the state, because you want to store the maximum
bump value:
defmodule Counter do
use GenServer
def start_link(_) do
GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
end
def bump(by) do
GenServer.call(__MODULE__, {:bump, by})
end
## Callbacks
def init(:ok) do
{:ok, {0, 0}}
end
def handle_call({:bump, by}, {counter, max}) do
{:reply, :ok, {counter + by, max(max, by)}}
end
end
If you were to perform a hot code upgrade in such an application, it would
crash, because in the initial version the state was just a counter
but in the new version the state is a tuple. Furthermore, you changed
the format of the `call` message from `:bump` to `{:bump, by}` and
the process may have both old and new messages temporarily mixed, so
we need to handle both. The final version would be:
defmodule Counter do
use GenServer
def start_link(_) do
GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
end
def bump(by) do
GenServer.call(__MODULE__, {:bump, by})
end
## Callbacks
def init(:ok) do
{:ok, {0, 0}}
end
def handle_call(:bump, {counter, max}) do
{:reply, :ok, {counter + 1, max(max, 1)}}
end
def handle_call({:bump, by}, {counter, max}) do
{:reply, :ok, {counter + by, max(max, by)}}
end
def code_change(_, counter, _) do
{:ok, {counter, 0}}
end
end
Now you can proceed to list this process in the `.appup` file and
hot code upgrade it. This is one of the many steps necessary
to perform hot code upgrades and it must be taken into account by
every process and application being upgraded in the system.
The [`.appup` cookbook](https://www.erlang.org/doc/design_principles/appup_cookbook.html)
provides a good reference and more examples.
Once `.appup`s are created, the next step is to create a `.relup`
file with all instructions necessary to update the release itself.
Erlang documentation does provide a chapter on
[Creating and upgrading a target system](https://www.erlang.org/doc/system_principles/create_target.html).
[Learn You Some Erlang has a chapter on hot code upgrades](https://learnyousomeerlang.com/relups).
Overall, there are many steps, complexities and assumptions made
during hot code upgrades, which is ultimately why they are not
provided by Elixir out of the box. However, hot code upgrades can
still be achieved by teams who desire to implement those steps
on top of `mix release` in their projects or as separate libraries.
## Command line options
* `--force` - forces recompilation
* `--no-archives-check` - does not check archive
* `--no-deps-check` - does not check dependencies
* `--no-elixir-version-check` - does not check Elixir version
* `--no-compile` - does not compile before assembling the release
* `--overwrite` - if there is an existing release version, overwrite it
* `--path` - the path of the release
* `--quiet` - does not write progress to the standard output
* `--version` - the version of the release
"""
import Mix.Generator
@switches [
overwrite: :boolean,
force: :boolean,
quiet: :boolean,
path: :string,
version: :string,
compile: :boolean,
deps_check: :boolean,
archives_check: :boolean,
elixir_version_check: :boolean
]
@aliases [
f: :force
]
@impl true
def run(args) do
Mix.Project.get!()
Mix.Task.run("compile", args)
config = Mix.Project.config()
release =
case OptionParser.parse!(args, strict: @switches, aliases: @aliases) do
{overrides, [name]} -> Mix.Release.from_config!(String.to_atom(name), config, overrides)
{overrides, []} -> Mix.Release.from_config!(nil, config, overrides)
{_, _} -> Mix.raise("Expected \"mix release\" or \"mix release NAME\"")
end
if not File.exists?(release.version_path) or
yes?(release, "Release #{release.name}-#{release.version} already exists. Overwrite?") do
run_steps(release)
end
end
defp yes?(release, message) do
release.options[:overwrite] or Mix.shell().yes?(message)
end
defp run_steps(%{steps: [step | steps]} = release) when is_function(step) do
case step.(%{release | steps: steps}) do
%Mix.Release{} = release ->
run_steps(release)
other ->
Mix.raise(
"Expected step #{inspect(step)} to return a Mix.Release, got: #{inspect(other)}"
)
end
end
defp run_steps(%{steps: [:tar | steps]} = release) do
%{release | steps: steps} |> make_tar() |> run_steps()
end
defp run_steps(%{steps: [:assemble | steps]} = release) do
%{release | steps: steps} |> assemble() |> run_steps()
end
defp run_steps(%{steps: []} = release) do
announce(release)
end
defp assemble(release) do
config = Mix.Project.config()
message = "#{release.name}-#{release.version} on MIX_ENV=#{Mix.env()}"
info(release, [:green, "* assembling ", :reset, message])
# releases/
# VERSION/
# consolidated/
# NAME.rel
# start.boot
# start.script
# start_clean.boot
# start_clean.script
# sys.config
# releases/
# COOKIE
# start_erl.data
consolidation_path = build_rel(release, config)
[
# erts-VSN/
:erts,
# releases/VERSION/consolidated
{:consolidated, consolidation_path},
# bin/
# RELEASE_NAME
# RELEASE_NAME.bat
# start
# start.bat
# releases/
# VERSION/
# elixir
# elixir.bat
# iex
# iex.bat
{:executables, Keyword.get(release.options, :include_executables_for, [:unix, :windows])}
# lib/APP_NAME-APP_VSN/
| Map.keys(release.applications)
]
|> Task.async_stream(©(&1, release), ordered: false, timeout: :infinity)
|> Stream.run()
copy_overlays(release)
end
defp make_tar(release) do
build_path = Mix.Project.build_path()
dir_path =
if release.path == Path.join([build_path, "rel", Atom.to_string(release.name)]) do
build_path
else
release.path
end
out_path = Path.join(dir_path, "#{release.name}-#{release.version}.tar.gz")
info(release, [:green, "* building ", :reset, out_path])
lib_dirs =
Enum.reduce(release.applications, [], fn {name, app_config}, acc ->
vsn = Keyword.fetch!(app_config, :vsn)
[Path.join("lib", "#{name}-#{vsn}") | acc]
end)
erts_dir =
case release.erts_source do
nil -> []
_ -> ["erts-#{release.erts_version}"]
end
release_files =
for basename <- File.ls!(Path.join(release.path, "releases")),
not File.dir?(Path.join([release.path, "releases", basename])),
do: Path.join("releases", basename)
dirs =
["bin", Path.join("releases", release.version)] ++
erts_dir ++ lib_dirs ++ release_files
files =
dirs
|> Enum.filter(&File.exists?(Path.join(release.path, &1)))
|> Kernel.++(release.overlays)
|> Enum.map(&{String.to_charlist(&1), String.to_charlist(Path.join(release.path, &1))})
File.rm(out_path)
:ok = :erl_tar.create(String.to_charlist(out_path), files, [:dereference, :compressed])
release
end
# build_rel
defp build_rel(release, config) do
version_path = release.version_path
File.rm_rf!(version_path)
File.mkdir_p!(version_path)
release = maybe_add_config_reader_provider(config, release, version_path)
consolidation_path =
if config[:consolidate_protocols] do
Mix.Project.consolidation_path(config)
end
sys_config =
if File.regular?(config[:config_path]) do
config[:config_path] |> Config.Reader.read!(env: Mix.env(), target: Mix.target())
else
[]
end
vm_args_path = Path.join(version_path, "vm.args")
remote_vm_args_path = Path.join(version_path, "remote.vm.args")
cookie_path = Path.join(release.path, "releases/COOKIE")
start_erl_path = Path.join(release.path, "releases/start_erl.data")
config_provider_path = {:system, "RELEASE_SYS_CONFIG", ".config"}
with :ok <- make_boot_scripts(release, version_path, consolidation_path),
:ok <- make_vm_args(release, vm_args_path),
:ok <- make_vm_args(release, remote_vm_args_path),
:ok <- Mix.Release.make_sys_config(release, sys_config, config_provider_path),
:ok <- Mix.Release.make_cookie(release, cookie_path),
:ok <- Mix.Release.make_start_erl(release, start_erl_path) do
consolidation_path
else
{:error, message} ->
File.rm_rf!(version_path)
Mix.raise(message)
end
end
defp maybe_add_config_reader_provider(config, %{options: opts} = release, version_path) do
default_path = config[:config_path] |> Path.dirname() |> Path.join("runtime.exs")
deprecated_path = config[:config_path] |> Path.dirname() |> Path.join("releases.exs")
{path, reboot?} =
cond do
path = opts[:runtime_config_path] ->
{path, false}
File.exists?(default_path) ->
if File.exists?(deprecated_path) do
IO.warn(
"both #{inspect(default_path)} and #{inspect(deprecated_path)} have been " <>
"found, but only #{inspect(default_path)} will be used"
)
end
{default_path, false}
File.exists?(deprecated_path) ->
IO.warn(
"config/releases.exs is deprecated, use config/runtime.exs or set :runtime_config_path in your release configuration instead"
)
{deprecated_path, true}
true ->
{nil, false}
end
cond do
path ->
msg = "#{path} to configure the release at runtime"
Mix.shell().info([:green, "* using ", :reset, msg])
File.cp!(path, Path.join(version_path, "runtime.exs"))
init = {:system, "RELEASE_ROOT", "/releases/#{release.version}/runtime.exs"}
opts = [path: init, env: Mix.env(), target: Mix.target(), imports: :disabled]
release = update_in(release.config_providers, &[{Config.Reader, opts} | &1])
update_in(release.options, &Keyword.put_new(&1, :reboot_system_after_config, reboot?))
release.config_providers == [] ->
skipping("runtime configuration (#{default_path} not found)")
release
true ->
release
end
end
defp make_boot_scripts(release, version_path, consolidation_path) do
prepend_paths =
if consolidation_path do
["$RELEASE_LIB/../releases/#{release.version}/consolidated"]
else
[]
end
results =
for {boot_name, modes} <- release.boot_scripts do
sys_path = Path.join(version_path, Atom.to_string(boot_name))
with :ok <- Mix.Release.make_boot_script(release, sys_path, modes, prepend_paths) do
if boot_name == :start do
rel_path = Path.join(Path.dirname(sys_path), "#{release.name}.rel")
File.rename!(sys_path <> ".rel", rel_path)
else
File.rm(sys_path <> ".rel")
end
:ok
end
end
Enum.find(results, :ok, &(&1 != :ok))
end
defp make_vm_args(release, path) do
vm_args_template = Mix.Release.rel_templates_path(release, "#{Path.basename(path)}.eex")
if File.exists?(vm_args_template) do
copy_template(vm_args_template, path, [release: release], force: true)
else
File.write!(path, vm_args_template(release: release))
end
:ok
end
defp announce(release) do
path = Path.relative_to_cwd(release.path)
cmd = "#{path}/bin/#{release.name}"
info(release, """
Release created at #{path}
# To start your system
#{cmd} start
Once the release is running:
# To connect to it remotely
#{cmd} remote
# To stop it gracefully (you may also send SIGINT/SIGTERM)
#{cmd} stop
To list all commands:
#{cmd}
""")
end
defp info(release, message) do
unless release.options[:quiet] do
Mix.shell().info(message)
end
end
defp skipping(message) do
Mix.shell().info([:yellow, "* skipping ", :reset, message])
end
## Overlays
defp copy_overlays(release) do
target = release.path
default = Mix.Release.rel_templates_path(release, "overlays")
overlays =
if File.dir?(default) do
[default | List.wrap(release.options[:overlays])]
else
List.wrap(release.options[:overlays])
end
relative =
overlays
|> Enum.flat_map(&File.cp_r!(&1, target))
|> Enum.uniq()
|> List.delete(target)
|> Enum.map(&Path.relative_to(&1, target))
update_in(release.overlays, &(relative ++ &1))
end
## Copy operations
defp copy(:erts, release) do
_ = Mix.Release.copy_erts(release)
:ok
end
defp copy(app, release) when is_atom(app) do
Mix.Release.copy_app(release, app)
end
defp copy({:consolidated, consolidation_path}, release) do
if consolidation_path do
consolidation_target = Path.join(release.version_path, "consolidated")
_ = Mix.Release.copy_ebin(release, consolidation_path, consolidation_target)
end
:ok
end
defp copy({:executables, include_executables_for}, release) do
elixir_bin_path = Application.app_dir(:elixir, "../../bin")
bin_path = Path.join(release.path, "bin")
File.mkdir_p!(bin_path)
for os <- include_executables_for do
{env, env_fun, clis} = cli_for(os, release)
env_path = Path.join(release.version_path, env)
env_template_path = Mix.Release.rel_templates_path(release, env <> ".eex")
if File.exists?(env_template_path) do
copy_template(env_template_path, env_path, [release: release], force: true)
else
File.write!(env_path, env_fun.(release))
end
for {filename, contents} <- clis do
target = Path.join(bin_path, filename)
File.write!(target, contents)
executable!(target)
end
for {filename, contents_fun} <- elixir_cli_for(os, release) do
source = Path.join(elixir_bin_path, filename)
if File.regular?(source) do
target = Path.join(release.version_path, filename)
File.write!(target, contents_fun.(source))
executable!(target)
else
skipping("#{filename} for #{os} (bin/#{filename} not found in the Elixir installation)")
end
end
end
end
defp cli_for(:unix, release) do
{"env.sh", &env_template(release: &1), [{"#{release.name}", cli_template(release: release)}]}
end
defp cli_for(:windows, release) do
{"env.bat", &env_bat_template(release: &1),
[{"#{release.name}.bat", cli_bat_template(release: release)}]}
end
defp elixir_cli_for(:unix, release) do
[
{"elixir",
&(&1
|> File.read!()
|> String.replace(~s[ -pa "$SCRIPT_PATH"/../lib/*/ebin], "")
|> replace_erts_bin(release, ~s["$SCRIPT_PATH"/../../erts-#{release.erts_version}/bin/]))},
{"iex", &File.read!/1}
]
end
defp elixir_cli_for(:windows, release) do
[
{"elixir.bat",
&(&1
|> File.read!()
|> String.replace(~s[goto expand_erl_libs], ~s[goto run])
|> replace_erts_bin(release, ~s[%~dp0\\..\\..\\erts-#{release.erts_version}\\bin\\]))},
{"iex.bat", &File.read!/1}
]
end
@erts_bin [~s[ERTS_BIN="$ERTS_BIN"], ~s[ERTS_BIN=!ERTS_BIN!]]
defp replace_erts_bin(contents, release, new_path) do
if release.erts_source do
String.replace(contents, @erts_bin, ~s[ERTS_BIN=#{new_path}])
else
contents
end
end
defp executable!(path), do: File.chmod!(path, 0o755)
# Helper functions
defp release_mode(release, env_var) do
reboot? = Keyword.get(release.options, :reboot_system_after_config, false)
if reboot? and release.config_providers != [] do
"-elixir -config_provider_reboot_mode #{env_var}"
else
"-mode #{env_var}"
end
end
embed_template(:vm_args, Mix.Tasks.Release.Init.vm_args_text())
embed_template(:env, Mix.Tasks.Release.Init.env_text())
embed_template(:cli, Mix.Tasks.Release.Init.cli_text())
embed_template(:env_bat, Mix.Tasks.Release.Init.env_bat_text())
embed_template(:cli_bat, Mix.Tasks.Release.Init.cli_bat_text())
end
| 39.394309 | 137 | 0.698208 |
03498c815a018a185ab2b9712bbf30ea751ec0fe | 954 | exs | Elixir | test/vintage_net/interface/output_logger_test.exs | axelson/vintage_net | 68866ff223fa40e7ad6fd4d9fb2f7960cf51b1ca | [
"Apache-2.0"
] | 85 | 2019-05-09T14:54:38.000Z | 2022-02-08T16:52:04.000Z | test/vintage_net/interface/output_logger_test.exs | axelson/vintage_net | 68866ff223fa40e7ad6fd4d9fb2f7960cf51b1ca | [
"Apache-2.0"
] | 132 | 2019-05-09T15:57:59.000Z | 2022-02-28T16:31:22.000Z | test/vintage_net/interface/output_logger_test.exs | axelson/vintage_net | 68866ff223fa40e7ad6fd4d9fb2f7960cf51b1ca | [
"Apache-2.0"
] | 14 | 2019-07-08T19:18:23.000Z | 2022-02-08T16:52:05.000Z | defmodule VintageNet.Interface.OutputLoggerTest do
use ExUnit.Case
import ExUnit.CaptureLog
alias VintageNet.Interface.OutputLogger
test "logs each item" do
log =
capture_log(fn ->
Enum.into(["one", "two", "three"], OutputLogger.new(""))
end)
assert log =~ "[debug] one"
assert log =~ "[debug] two"
assert log =~ "[debug] three"
end
test "adds a prefix" do
log =
capture_log(fn ->
Enum.into(["one", "two", "three"], OutputLogger.new("prefix:"))
end)
assert log =~ "[debug] prefix:one"
assert log =~ "[debug] prefix:two"
assert log =~ "[debug] prefix:three"
end
test "handles multiple lines passed at the same time" do
log =
capture_log(fn ->
Enum.into(["one\ntwo\nthree"], OutputLogger.new("prefix:"))
end)
assert log =~ "[debug] prefix:one"
assert log =~ "[debug] prefix:two"
assert log =~ "[debug] prefix:three"
end
end
| 24.461538 | 71 | 0.604822 |
0349a8b1b3b835f9ede62be3eeb27b192cf1c547 | 499 | ex | Elixir | lib/user_service_web/views/error_view.ex | sb8244/pow_starter_pack | 82e16c3e4139ecc85295078b54024f58f95ab794 | [
"MIT"
] | 13 | 2020-03-31T21:45:40.000Z | 2021-02-13T12:17:22.000Z | lib/user_service_web/views/error_view.ex | sb8244/pow_starter_pack | 82e16c3e4139ecc85295078b54024f58f95ab794 | [
"MIT"
] | 1 | 2021-03-10T12:57:21.000Z | 2021-03-10T12:57:21.000Z | lib/user_service_web/views/error_view.ex | sb8244/pow_starter_pack | 82e16c3e4139ecc85295078b54024f58f95ab794 | [
"MIT"
] | 1 | 2020-04-28T15:41:56.000Z | 2020-04-28T15:41:56.000Z | defmodule UserServiceWeb.ErrorView do
use UserServiceWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.html", _assigns) do
# "Internal Server Error"
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.html" becomes
# "Not Found".
def template_not_found(template, _assigns) do
Phoenix.Controller.status_message_from_template(template)
end
end
| 29.352941 | 61 | 0.739479 |
0349b40a0c3d5e3d73c5e2e8cafad35b5ae5b61a | 2,042 | ex | Elixir | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/list_bid_response_errors_response.ex | ericrwolfe/elixir-google-api | 3dc0f17edd5e2d6843580c16ddae3bf84b664ffd | [
"Apache-2.0"
] | null | null | null | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/list_bid_response_errors_response.ex | ericrwolfe/elixir-google-api | 3dc0f17edd5e2d6843580c16ddae3bf84b664ffd | [
"Apache-2.0"
] | null | null | null | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/list_bid_response_errors_response.ex | ericrwolfe/elixir-google-api | 3dc0f17edd5e2d6843580c16ddae3bf84b664ffd | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.AdExchangeBuyer.V2beta1.Model.ListBidResponseErrorsResponse do
@moduledoc """
Response message for listing all reasons that bid responses resulted in an error.
## Attributes
- calloutStatusRows (List[CalloutStatusRow]): List of rows, with counts of bid responses aggregated by callout status. Defaults to: `null`.
- nextPageToken (String): A token to retrieve the next page of results. Pass this value in the ListBidResponseErrorsRequest.pageToken field in the subsequent call to the accounts.filterSets.bidResponseErrors.list method to retrieve the next page of results. Defaults to: `null`.
"""
defstruct [
:calloutStatusRows,
:nextPageToken
]
end
defimpl Poison.Decoder, for: GoogleApi.AdExchangeBuyer.V2beta1.Model.ListBidResponseErrorsResponse do
import GoogleApi.AdExchangeBuyer.V2beta1.Deserializer
def decode(value, options) do
value
|> deserialize(
:calloutStatusRows,
:list,
GoogleApi.AdExchangeBuyer.V2beta1.Model.CalloutStatusRow,
options
)
end
end
defimpl Poison.Encoder, for: GoogleApi.AdExchangeBuyer.V2beta1.Model.ListBidResponseErrorsResponse do
def encode(value, options) do
GoogleApi.AdExchangeBuyer.V2beta1.Deserializer.serialize_non_nil(value, options)
end
end
| 37.814815 | 280 | 0.772772 |
034a0ef9662b414c1a67e15f52e4aac1e724ef4f | 1,541 | ex | Elixir | lib/rfx_cli/main/execute_command.ex | andyl/rfxi | 9007c75693d643555c45a20e9634dd4b3867deba | [
"MIT"
] | 1 | 2021-08-10T14:46:10.000Z | 2021-08-10T14:46:10.000Z | lib/rfx_cli/main/execute_command.ex | andyl/rfxi | 9007c75693d643555c45a20e9634dd4b3867deba | [
"MIT"
] | 2 | 2021-06-22T14:12:37.000Z | 2021-06-28T05:06:23.000Z | lib/rfx_cli/main/execute_command.ex | andyl/rfxi | 9007c75693d643555c45a20e9634dd4b3867deba | [
"MIT"
] | null | null | null | defmodule RfxCli.Main.ExecuteCommand do
@moduledoc false
alias RfxCli.State
def run({:error, msg}) do
{:error, msg}
end
def run(state) do
case execute(state.command_args) do
{:error, msg} -> {:error, msg}
result -> State.assign(state, :changeset, result)
end
end
def execute(cmd_args) do
case cmd_args[:launch_cmd] do
:repl ->
RfxCli.Repl.start()
:server ->
RfxCli.Server.start()
_ ->
run_subcmd(cmd_args)
|> run_convert(cmd_args)
|> run_apply(cmd_args)
end
end
def run_subcmd(cmd_args) do
mod = cmd_args[:op_module]
fun = cmd_args[:op_scope]
arg = [cmd_args[:op_target], cmd_args[:op_args]]
apply(mod, fun, arg)
end
def run_convert(changeset, cmd_args) do
case cmd_args[:op_convert] do
[] -> changeset
_ -> perform_convert(changeset, cmd_args)
end
end
def run_apply(changeset, cmd_args) do
case cmd_args[:op_apply] do
true -> changeset |> Rfx.Change.Set.apply!()
false -> changeset
end
end
defp perform_convert(changeset, cmd_args) when is_list(cmd_args) do
cmd_args[:op_convert]
|> Enum.reduce(changeset, fn(el, acc) -> xconvert(el, acc) end)
end
defp xconvert(type, changelist) do
mod = Rfx.Change.Set
fun = :convert
arg = [changelist, to_atom(type)]
apply(mod, fun, arg)
end
defp to_atom(item) when is_binary(item) do
String.to_atom(item)
end
defp to_atom(item) when is_atom(item) do
item
end
end
| 21.402778 | 69 | 0.631408 |
034a17543d2f1fb95de3d43ee2e13b8164d06816 | 10,159 | exs | Elixir | lib/ex_unit/test/ex_unit/formatter_test.exs | irisTa56/elixir | 0a953d75fb2cbdc6f3d33040aa60738d85512a1f | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/test/ex_unit/formatter_test.exs | irisTa56/elixir | 0a953d75fb2cbdc6f3d33040aa60738d85512a1f | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/test/ex_unit/formatter_test.exs | irisTa56/elixir | 0a953d75fb2cbdc6f3d33040aa60738d85512a1f | [
"Apache-2.0"
] | null | null | null | Code.require_file("../test_helper.exs", __DIR__)
defmodule ExUnit.FormatterTest do
use ExUnit.Case
import ExUnit.Formatter
doctest ExUnit.Formatter
defmacrop catch_assertion(expr) do
quote do
try do
unquote(expr)
rescue
ex -> ex
end
end
end
defp test_module do
%ExUnit.TestModule{name: Hello}
end
defp test do
%ExUnit.Test{name: :world, module: Hello, tags: %{file: __ENV__.file, line: 1}}
end
def falsy() do
false
end
defp formatter(_kind, message) do
message
end
test "formats test case filters" do
filters = [run: true, slow: false]
assert format_filters(filters, :exclude) =~ "Excluding tags: [run: true, slow: false]"
assert format_filters(filters, :include) =~ "Including tags: [run: true, slow: false]"
end
test "formats test errors" do
failure = [{:error, catch_error(raise "oops"), []}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (RuntimeError) oops
"""
end
test "formats test exits" do
failure = [{:exit, 1, []}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) == """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (exit) 1
"""
end
test "formats test exits with mfa" do
failure = [{:exit, {:bye, {:mod, :fun, []}}, []}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) == """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (exit) exited in: :mod.fun()
** (EXIT) :bye
"""
end
test "formats test throws" do
failure = [{:throw, 1, []}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) == """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (throw) 1
"""
end
test "formats test EXITs" do
failure = [{{:EXIT, self()}, 1, []}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) == """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (EXIT from #{inspect(self())}) 1
"""
end
test "formats test errors with test_location_relative_path" do
Application.put_env(:ex_unit, :test_location_relative_path, "apps/sample")
failure = [{:error, catch_error(raise "oops"), []}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """
1) world (Hello)
apps/sample/test/ex_unit/formatter_test.exs:1
** (RuntimeError) oops
"""
after
Application.delete_env(:ex_unit, :test_location_relative_path)
end
test "formats test errors with code snippets" do
stack = {Hello, :world, 1, [file: __ENV__.file, line: 3]}
failure = [{:error, catch_error(raise "oops"), [stack]}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (RuntimeError) oops
code: defmodule ExUnit.FormatterTest do
"""
end
test "formats stacktraces" do
stacktrace = [{Oops, :wrong, 1, [file: "formatter_test.exs", line: 1]}]
failure = [{:error, catch_error(raise "oops"), stacktrace}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (RuntimeError) oops
stacktrace:
formatter_test.exs:1: Oops.wrong/1
"""
end
test "formats assertions" do
failure = [{:error, catch_assertion(assert ExUnit.FormatterTest.falsy()), []}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
Expected truthy, got false
code: assert ExUnit.FormatterTest.falsy()
"""
end
test "formats multiple assertions" do
failure = [
{:error, catch_assertion(assert ExUnit.FormatterTest.falsy()), []},
{:error, catch_assertion(assert 1 == 2), []}
]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
Failure #1
Expected truthy, got false
code: assert ExUnit.FormatterTest.falsy()
Failure #2
Assertion with == failed
code: assert 1 == 2
left: 1
right: 2
"""
end
defp trim_multiline_whitespace(string) do
String.replace(string, ~r"\n\s+\n", "\n\n")
end
test "blames function clause error" do
{error, stack} =
try do
Access.fetch(:foo, :bar)
rescue
exception -> {exception, __STACKTRACE__}
end
failure = format_test_failure(test(), [{:error, error, [hd(stack)]}], 1, 80, &formatter/2)
assert trim_multiline_whitespace(failure) =~ """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (FunctionClauseError) no function clause matching in Access.fetch/2
The following arguments were given to Access.fetch/2:
# 1
:foo
# 2
:bar
Attempted function clauses (showing 5 out of 5):
def fetch(%module{} = container, key)
"""
assert failure =~ ~r"\(elixir #{System.version()}\) lib/access\.ex:\d+: Access\.fetch/2"
end
test "formats setup_all errors" do
failure = [{:error, catch_error(raise "oops"), []}]
assert format_test_all_failure(test_module(), failure, 1, 80, &formatter/2) =~ """
1) Hello: failure on setup_all callback, all tests have been invalidated
** (RuntimeError) oops
"""
end
test "formats assertions with operators with no limit" do
failure = [{:error, catch_assertion(assert [1, 2, 3] == [4, 5, 6]), []}]
assert format_test_all_failure(test_module(), failure, 1, :infinity, &formatter/2) =~ """
1) Hello: failure on setup_all callback, all tests have been invalidated
Assertion with == failed
code: assert [1, 2, 3] == [4, 5, 6]
left: [1, 2, 3]
right: [4, 5, 6]
"""
end
test "formats assertions with operators with column limit" do
failure = [{:error, catch_assertion(assert [1, 2, 3] == [4, 5, 6]), []}]
assert format_test_all_failure(test_module(), failure, 1, 15, &formatter/2) =~ """
1) Hello: failure on setup_all callback, all tests have been invalidated
Assertion with == failed
code: assert [1, 2, 3] == [4, 5, 6]
left: [1, 2, 3]
right: [4,
5,
6]
"""
end
test "formats assertions with complex function call arguments" do
failure = [{:error, catch_assertion(assert is_list(List.to_tuple([1, 2, 3]))), []}]
assert format_test_all_failure(test_module(), failure, 1, 80, &formatter/2) =~ """
1) Hello: failure on setup_all callback, all tests have been invalidated
Expected truthy, got false
code: assert is_list(List.to_tuple([1, 2, 3]))
arguments:
# 1
{1, 2, 3}
"""
failure = [{:error, catch_assertion(assert is_list({1, 2})), []}]
assert format_test_all_failure(test_module(), failure, 1, 80, &formatter/2) =~ """
1) Hello: failure on setup_all callback, all tests have been invalidated
Expected truthy, got false
code: assert is_list({1, 2})
"""
end
test "formats assertions with message with multiple lines" do
message = "Some meaningful error:\nuseful info\nanother useful info"
failure = [{:error, catch_assertion(assert(false, message)), []}]
assert format_test_all_failure(test_module(), failure, 1, :infinity, &formatter/2) =~ """
1) Hello: failure on setup_all callback, all tests have been invalidated
Some meaningful error:
useful info
another useful info
"""
end
defmodule BadInspect do
defstruct key: 0
defimpl Inspect do
def inspect(struct, opts) when is_atom(opts) do
struct.unknown
end
end
end
test "inspect failure" do
failure = [{:error, catch_assertion(assert :will_fail == %BadInspect{}), []}]
message =
"got FunctionClauseError with message \"no function clause matching " <>
"in Inspect.ExUnit.FormatterTest.BadInspect.inspect/2\" while inspecting " <>
"%{__struct__: ExUnit.FormatterTest.BadInspect, key: 0}"
assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
Assertion with == failed
code: assert :will_fail == %BadInspect{}
left: :will_fail
right: %Inspect.Error{
message: #{inspect(message)}
}
"""
end
defmodule BadMessage do
defexception key: 0
@impl true
def message(_message) do
raise "oops"
end
end
test "message failure" do
failure = [{:error, catch_error(raise BadMessage), []}]
message =
"got RuntimeError with message \"oops\" while retrieving Exception.message/1 " <>
"for %ExUnit.FormatterTest.BadMessage{key: 0}"
assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (ExUnit.FormatterTest.BadMessage) #{message}
"""
end
end
| 31.746875 | 94 | 0.558913 |
034a36fc929810ed5b9167f5778cd6614ec327c9 | 2,060 | ex | Elixir | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2_list_intents_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2_list_intents_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2_list_intents_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2ListIntentsResponse do
@moduledoc """
The response message for Intents.ListIntents.
## Attributes
* `intents` (*type:* `list(GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2Intent.t)`, *default:* `nil`) - The list of agent intents. There will be a maximum number of items returned based on the page_size field in the request.
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - Token to retrieve the next page of results, or empty if there are no more results in the list.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:intents => list(GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2Intent.t()) | nil,
:nextPageToken => String.t() | nil
}
field(:intents, as: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2Intent, type: :list)
field(:nextPageToken)
end
defimpl Poison.Decoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2ListIntentsResponse do
def decode(value, options) do
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2ListIntentsResponse.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2ListIntentsResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.454545 | 236 | 0.749029 |
034a3dd8fb2f213c51e0e5d42958e6c90202eb17 | 14,152 | ex | Elixir | lib/ex_admin/helpers.ex | jcarlos7121/ex_admin | 60a07320efaf549814d29f3593715543aef51e6b | [
"MIT"
] | 1,347 | 2015-10-05T18:23:49.000Z | 2022-01-09T18:38:36.000Z | lib/ex_admin/helpers.ex | jcarlos7121/ex_admin | 60a07320efaf549814d29f3593715543aef51e6b | [
"MIT"
] | 402 | 2015-10-03T13:53:32.000Z | 2021-07-08T09:52:22.000Z | lib/ex_admin/helpers.ex | jcarlos7121/ex_admin | 60a07320efaf549814d29f3593715543aef51e6b | [
"MIT"
] | 333 | 2015-10-12T22:56:57.000Z | 2021-05-26T18:40:24.000Z | Code.ensure_compiled(ExAdmin.Utils)
defmodule ExAdmin.Helpers do
@moduledoc false
require Logger
require Integer
use Xain
import Kernel, except: [to_string: 1]
import ExAdmin.Utils
import ExAdmin.Render
def build_fieldset_legend(nil), do: []
def build_fieldset_legend(""), do: []
def build_fieldset_legend(name) do
[
legend ".inputs" do
span(name)
end
]
end
def build_link(nil, _, _, _, _), do: ""
def build_link("", _, _, _, _), do: ""
def build_link(contents, _conn, %{link: false}, _resource, _field_name), do: contents
def build_link(contents, conn, _, resource, field_name) do
case Map.get(resource, field_name) do
nil ->
contents
res when is_map(res) ->
if ExAdmin.Utils.authorized_action?(conn, :index, res.__struct__) do
path = admin_resource_path(res, :index)
"<a href='#{path}'>#{contents}</a>"
else
contents
end
end
end
def model_name(%{__struct__: name}), do: model_name(name)
def model_name(resource) when is_atom(resource) do
if has_function?(resource, :model_name, 0) do
resource.model_name()
else
resource |> ExAdmin.Utils.base_name() |> Inflex.underscore()
end
end
def build_link_for({:safe, _} = safe_contents, d, a, b, c) do
safe_contents
|> Phoenix.HTML.safe_to_string()
|> build_link_for(d, a, b, c)
end
def build_link_for("", _, _, _, _), do: ""
def build_link_for(nil, _, _, _, _), do: ""
def build_link_for(contents, _, %{link: false}, _, _), do: contents
def build_link_for(contents, conn, opts, resource, field_name) do
case Map.get(resource, field_name) do
nil ->
contents
%{__meta__: _} = res ->
build_content_link(true, conn, res, contents)
_ ->
build_content_link(opts[:link], conn, resource, contents)
end
end
defp build_content_link(link?, conn, resource, contents) do
if link? && ExAdmin.Utils.authorized_action?(conn, :show, resource) do
path = admin_resource_path(resource, :show)
"<a href='#{path}'>#{contents}</a>"
else
contents
end
end
def build_header_field(field, fun) do
case field do
{f_name, _} -> f_name
f_name -> f_name
end
|> fun.()
end
def get_relationship(resource, field_name) do
Map.get(resource, field_name, %{})
end
def map_relationship_fields(resource, fields, separator \\ " ")
def map_relationship_fields(nil, _fields, _separator), do: ""
def map_relationship_fields(resource, fields, separator) do
Enum.map(fields, &get_resource_field(resource, &1))
|> Enum.join(separator)
end
def get_association_fields(%{fields: fields}), do: fields
def get_association_fields(%{}), do: [:name]
def get_association_owner_key(resource, association) when is_binary(association),
do: get_association_owner_key(resource, String.to_atom(association))
def get_association_owner_key(resource, association) do
resource.__struct__.__schema__(:association, association).owner_key
end
defp get_field_type(%{__struct__: resource_struct, __meta__: _}, field) do
resource_struct.__schema__(:type, field)
end
defp get_field_type(_resource, _field), do: nil
@doc """
Builds a web field.
Handles parsing relationships, linking to the relationship, passing a
concatenated string of each of the given fields.
"""
def build_field(resource, conn, field_name, fun) do
case field_name do
{f_name, %{has_many: _} = map2} ->
_build_field(map2, conn, resource, f_name)
|> fun.(f_name)
{f_name, %{} = opts} ->
f_name =
case get_field_type(resource, f_name) do
nil -> f_name
type -> {type, f_name}
end
build_single_field(resource, conn, f_name, opts)
|> fun.(f_name)
{f_name, []} ->
build_single_field(resource, conn, f_name, %{})
|> fun.(f_name)
_ ->
fun.("", :none)
end
end
def build_single_field(resource, conn, {_, f_name}, opts) do
build_single_field(resource, conn, f_name, opts)
end
def build_single_field(resource, conn, f_name, %{fun: fun, image: true} = opts) do
attributes =
opts
|> Map.delete(:fun)
|> Map.delete(:image)
|> build_attributes
"<img src='#{fun.(resource)}'#{attributes} />"
|> build_link_for(conn, opts, resource, f_name)
end
def build_single_field(resource, conn, f_name, %{toggle: true}) do
build_single_field(resource, conn, f_name, %{toggle: ~w(YES NO)})
end
def build_single_field(resource, _conn, f_name, %{toggle: [yes, no]}) do
path = fn attr_value ->
admin_resource_path(resource, :toggle_attr, [[attr_name: f_name, attr_value: attr_value]])
end
current_value = Map.get(resource, f_name)
[yes_btn_css, no_btn_css] =
case current_value do
true ->
["btn-primary", "btn-default"]
false ->
["btn-default", "btn-primary"]
value ->
raise ArgumentError.exception(
"`toggle` option could be used only with columns of boolean type.\nBut `#{
f_name
}` is #{inspect(IEx.Info.info(value))}\nwith value == #{inspect(value)}"
)
end
[
~s(<a id="#{f_name}_true_#{resource.id}" class="toggle btn btn-sm #{yes_btn_css}" href="#{
path.(true)
}" data-remote="true" data-method="put" #{if !!current_value, do: "disabled"}>#{yes}</a>),
~s(<a id="#{f_name}_false_#{resource.id}" class="toggle btn btn-sm #{no_btn_css}" href="#{
path.(false)
}" data-remote="true" data-method="put" #{if !current_value, do: "disabled"}>#{no}</a>)
]
|> Enum.join()
end
def build_single_field(resource, conn, f_name, %{fun: fun} = opts) do
markup :nested do
case fun.(resource) do
[{_, list}] -> list
other -> other
end
end
|> build_link_for(conn, opts, resource, f_name)
end
def build_single_field(%{__struct__: resource_struct} = resource, conn, f_name, opts) do
resource_struct.__schema__(:type, f_name)
|> build_single_field_type(resource, conn, f_name, opts)
end
def build_single_field(%{} = resource, conn, f_name, opts) do
build_single_field_type(:array_map, resource, conn, f_name, opts)
end
defp build_single_field_type({:array, type}, resource, conn, f_name, opts)
when type in [:string, :integer] do
case get_resource_field(resource, f_name, opts) do
list when is_list(list) ->
Enum.map(list, &to_string(&1))
|> Enum.join(", ")
other ->
to_string(other)
end
|> build_link_for(conn, opts, resource, f_name)
end
defp build_single_field_type(:array_map, resource, conn, f_name, opts) do
Map.get(resource, to_string(f_name), "")
|> build_link_for(conn, opts, resource, f_name)
end
defp build_single_field_type(_, resource, conn, f_name, opts) do
get_resource_field(resource, f_name, opts)
|> format_contents
|> build_link_for(conn, opts, resource, f_name)
end
defp format_contents(contents) when is_list(contents) do
contents
|> Enum.map(&format_contents/1)
|> to_string
end
defp format_contents(%{__struct__: _} = contents), do: to_string(contents)
defp format_contents(%{} = contents) do
Enum.reduce(contents, [], fn {k, v}, acc ->
value = ExAdmin.Render.to_string(v)
["#{k}: #{value}" | acc]
end)
|> Enum.reverse()
|> Enum.join(", ")
end
defp format_contents(contents), do: to_string(contents)
def get_resource_model(resources) do
case resources do
[] ->
""
[resource | _] ->
get_resource_model(resource)
%{__struct__: name} ->
name |> base_name |> Inflex.underscore()
%{} ->
:map
end
end
defp _build_field(%{fields: fields} = map, conn, resource, field_name) do
get_relationship(resource, field_name)
|> map_relationship_fields(fields)
|> build_link(conn, map, resource, field_name)
end
defp _build_field(%{}, _, _resource, _field_name), do: []
def get_resource_field2(resource, field_name) do
case Map.get(resource, field_name) do
nil -> []
%Ecto.Association.NotLoaded{} -> []
other -> other
end
end
def get_resource_field(resource, field, opts \\ %{}) when is_map(resource) do
opts = Enum.into(opts, %{})
case resource do
%{__struct__: struct_name} ->
cond do
field in struct_name.__schema__(:fields) ->
Map.get(resource, field)
field in struct_name.__schema__(:associations) ->
get_relationship(resource, field)
|> map_relationship_fields(get_association_fields(opts))
has_function?(struct_name, field, 1) ->
try_function(struct_name, resource, field, fn _error ->
raise ExAdmin.RuntimeError,
message: "Could not call resource function #{:field} on #{struct_name}"
end)
function_exported?(
ExAdmin.get_registered(resource.__struct__).__struct__,
:display_name,
1
) ->
apply(ExAdmin.get_registered(resource.__struct__).__struct__, :display_name, [
resource
])
function_exported?(resource.__struct__, :display_name, 1) ->
apply(resource.__struct__, :display_name, [resource])
true ->
case resource.__struct__.__schema__(:fields) do
[_, first | _] ->
Map.get(resource, first)
[id | _] ->
Map.get(resource, id)
_ ->
raise ExAdmin.RuntimeError,
message: "Could not find field #{inspect(field)} in #{inspect(resource)}"
end
end
_ ->
raise ExAdmin.RuntimeError, message: "Resource must be a struct"
end
end
def get_name_field(resource_model) do
fields = resource_model.__schema__(:fields)
name_field = fields |> Enum.find(fn field -> field == :name || field == :title end)
if name_field do
name_field
else
fields |> Enum.find(fn field -> resource_model.__schema__(:type, field) == :string end)
end
end
def display_name(resource) do
defn = ExAdmin.get_registered(resource.__struct__)
cond do
is_nil(defn) ->
get_name_column_field(resource)
function_exported?(defn.__struct__, :display_name, 1) ->
apply(defn.__struct__, :display_name, [resource])
function_exported?(resource.__struct__, :display_name, 1) ->
apply(resource.__struct__, :display_name, [resource])
true ->
case defn.name_column do
nil -> get_name_column_field(resource)
name_field -> resource |> Map.get(name_field) |> to_string
end
end
end
defp get_name_column_field(resource) do
case get_name_field(resource.__struct__) do
nil -> inspect(resource)
field -> Map.get(resource, field)
end
end
def resource_identity(resource, field \\ :name)
def resource_identity(resource, field) when is_map(resource) do
case Map.get(resource, field) do
nil ->
case resource do
%{__struct__: struct_name} ->
if {field, 1} in struct_name.__info__(:functions) do
try do
apply(struct_name, field, [resource])
rescue
_ ->
struct_name |> base_name |> titleize
end
else
struct_name |> base_name |> titleize
end
_ ->
""
end
name ->
name
end
end
def resource_identity(_, _), do: ""
def has_function?(struct_name, function, arity) do
{function, arity} in struct_name.__info__(:functions)
end
def try_function(struct_name, resource, function, rescue_fun \\ nil) do
try do
apply(struct_name, function, [resource])
rescue
error ->
if rescue_fun, do: rescue_fun.(error)
end
end
def timestamp do
:os.timestamp() |> Tuple.to_list() |> Enum.join() |> String.to_integer()
end
def group_by(collection, fun) do
list =
Enum.map(collection, fun)
|> Enum.uniq_by(& &1)
|> Enum.map(&{&1, []})
Enum.reduce(collection, list, fn item, acc ->
key = fun.(item)
{_, val} = List.keyfind(acc, key, 0)
List.keyreplace(acc, key, 0, {key, val ++ [item]})
end)
end
def group_reduce_by_reverse(collection) do
empty =
Keyword.keys(collection)
|> Enum.reduce([], &Keyword.put(&2, &1, []))
Enum.reduce(collection, empty, fn {k, v}, acc ->
Keyword.put(acc, k, [v | acc[k]])
end)
end
def group_reduce_by(collection) do
group_reduce_by_reverse(collection)
|> Enum.reduce([], fn {k, v}, acc ->
Keyword.put(acc, k, Enum.reverse(v))
end)
end
def to_class(prefix, field_name), do: prefix <> to_class(field_name)
def to_class({_, field_name}), do: to_class(field_name)
def to_class(field_name) when is_binary(field_name),
do: field_name_to_class(Inflex.parameterize(field_name, "_"))
def to_class(field_name) when is_atom(field_name),
do: field_name_to_class(Atom.to_string(field_name))
def build_attributes(%{} = opts) do
build_attributes(Map.to_list(opts))
end
def build_attributes(opts) do
Enum.reduce(opts, "", fn {k, v}, acc ->
acc <> " #{k}='#{v}'"
end)
end
def translate_field(defn, field) do
case Regex.scan(~r/(.+)_id$/, Atom.to_string(field)) do
[[_, assoc]] ->
assoc = String.to_atom(assoc)
if assoc in defn.resource_model.__schema__(:associations), do: assoc, else: field
_ ->
case defn.resource_model.__schema__(:type, field) do
:map -> {:map, field}
{:array, :map} -> {:maps, field}
_ -> field
end
end
end
def field_name_to_class(field_name) do
parameterize(String.replace_suffix(field_name, "?", ""))
end
end
| 28.191235 | 96 | 0.616945 |
034a47a594deb607c6a0be6bc7c55888b22939bd | 5,543 | ex | Elixir | tools/astarte_e2e/lib/astarte_e2e/service_notifier.ex | Annopaolo/astarte | f8190e8bf044759a9b84bdeb5786a55b6f793a4f | [
"Apache-2.0"
] | 191 | 2018-03-30T13:23:08.000Z | 2022-03-02T12:05:32.000Z | tools/astarte_e2e/lib/astarte_e2e/service_notifier.ex | Annopaolo/astarte | f8190e8bf044759a9b84bdeb5786a55b6f793a4f | [
"Apache-2.0"
] | 402 | 2018-03-30T13:37:00.000Z | 2022-03-31T16:47:10.000Z | tools/astarte_e2e/lib/astarte_e2e/service_notifier.ex | Annopaolo/astarte | f8190e8bf044759a9b84bdeb5786a55b6f793a4f | [
"Apache-2.0"
] | 24 | 2018-03-30T13:29:48.000Z | 2022-02-28T11:10:26.000Z | #
# This file is part of Astarte.
#
# Copyright 2020 Ispirata Srl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
defmodule AstarteE2E.ServiceNotifier do
@behaviour :gen_statem
require Logger
alias AstarteE2E.ServiceNotifier.{Email, Mailer}
alias AstarteE2E.Config
@default_failure_id "unknown"
# API
def start_link(args) do
with {:ok, pid} <- :gen_statem.start_link({:local, __MODULE__}, __MODULE__, args, []) do
Logger.info("Started process with pid #{inspect(pid)}.", tag: "process_started")
{:ok, pid}
end
end
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]},
type: :worker,
restart: :permanent,
shutdown: 500
}
end
def notify_service_down(reason) do
:gen_statem.call(__MODULE__, {:notify_service_down, reason})
end
def notify_service_up do
:gen_statem.call(__MODULE__, :notify_service_up)
end
defp deliver(%Bamboo.Email{} = email) do
service_notifier_config = Config.service_notifier_config()
configured_email =
email
|> Bamboo.ConfigAdapter.Email.put_config(service_notifier_config)
with %Bamboo.Email{} = sent_email <- Mailer.deliver_later(configured_email) do
{:ok, sent_email}
end
end
# Callbacks
@impl true
def callback_mode() do
:state_functions
end
@impl true
def init(_) do
data = %{
failures_before_alert: Config.failures_before_alert!(),
failure_id: @default_failure_id
}
{:ok, :starting, data, [{:state_timeout, 60_000, nil}]}
end
def starting(:state_timeout, _content, data) do
reason = "Timeout at startup"
event_id = Hukai.generate("%a-%A")
# setting failures_before_alert to -1 prevent the system from sending two identical
# email alerts
updated_data =
data
|> Map.put(:failures_before_alert, -1)
|> Map.put(:failure_id, event_id)
reason
|> Email.service_down_email(event_id)
|> deliver()
Logger.warn(
"Service down. The user has been notified. Reason: #{reason}.",
tag: "service_down_notified",
failure_id: event_id
)
{:next_state, :service_down, updated_data}
end
def starting({:call, from}, :notify_service_up, data) do
actions = [{:reply, from, :ok}]
updated_data = Map.put(data, :failures_before_alert, Config.failures_before_alert!())
Logger.info("Service up.", tag: "service_up")
{:next_state, :service_up, updated_data, actions}
end
def starting({:call, from}, {:notify_service_down, _reason}, _data) do
actions = [{:reply, from, :not_started_yet}]
{:keep_state_and_data, actions}
end
def service_down({:call, from}, :notify_service_up, %{failure_id: failure_id} = data) do
actions = [{:reply, from, :ok}]
updated_data =
data
|> Map.put(:failures_before_alert, Config.failures_before_alert!())
|> Map.put(:failure_id, failure_id)
Email.service_up_email(failure_id)
|> deliver()
Logger.info("Service up. The user has been notified.",
tag: "service_up_notified",
failure_id: failure_id
)
{:next_state, :service_up, updated_data, actions}
end
def service_down(
{:call, from},
{:notify_service_down, _reason},
_data
) do
actions = [{:reply, from, :nothing_to_do}]
{:keep_state_and_data, actions}
end
def service_up({:call, from}, :notify_service_up, data) do
actions = [{:reply, from, :ok}]
updated_data = Map.put(data, :failures_before_alert, Config.failures_before_alert!())
{:keep_state, updated_data, actions}
end
def service_up(
{:call, from},
{:notify_service_down, "Client disconnected" = reason},
data
) do
event_id = Hukai.generate("%a-%A")
updated_data =
data
|> Map.put(:failure_id, event_id)
reason
|> Email.service_down_email(event_id)
|> deliver()
Logger.warn(
"Service down. The user has been notified. Reason: #{reason}.",
tag: "service_down_notified",
failure_id: event_id
)
actions = [{:reply, from, :mail_sent}]
{:next_state, :service_down, updated_data, actions}
end
def service_up(
{:call, from},
{:notify_service_down, reason},
%{failures_before_alert: 0} = data
) do
event_id = Hukai.generate("%a-%A")
updated_data =
data
|> Map.put(:failure_id, event_id)
reason
|> Email.service_down_email(event_id)
|> deliver()
Logger.warn(
"Service down. The user has been notified. Reason: #{reason}.",
tag: "service_down_notified",
failure_id: event_id
)
actions = [{:reply, from, :mail_sent}]
{:next_state, :service_down, updated_data, actions}
end
def service_up({:call, from}, {:notify_service_down, _reason}, data) do
actions = [{:reply, from, :nothing_to_do}]
updated_data = Map.put(data, :failures_before_alert, data.failures_before_alert - 1)
{:keep_state, updated_data, actions}
end
end
| 25.081448 | 92 | 0.662096 |
034a5265aa826fb4258805f2ee5e78a4a3b1208f | 378 | ex | Elixir | api_sample/web/views/error_view.ex | saiidalhalawi/phoenix-ansible | d03843256c20b74fd69bb48f058825785dd7fea2 | [
"Apache-2.0"
] | null | null | null | api_sample/web/views/error_view.ex | saiidalhalawi/phoenix-ansible | d03843256c20b74fd69bb48f058825785dd7fea2 | [
"Apache-2.0"
] | null | null | null | api_sample/web/views/error_view.ex | saiidalhalawi/phoenix-ansible | d03843256c20b74fd69bb48f058825785dd7fea2 | [
"Apache-2.0"
] | null | null | null | defmodule ApiSample.ErrorView do
use ApiSample.Web, :view
def render("404.html", _assigns) do
"Page not found"
end
def render("500.html", _assigns) do
"Server internal error"
end
# In case no render clause matches or no
# template is found, let's render it as 500
def template_not_found(_template, assigns) do
render "500.html", assigns
end
end
| 21 | 47 | 0.701058 |
034a6d34669c3426f4fbddd3bb1666e71ed28eb3 | 2,203 | exs | Elixir | test/base_test.exs | norbertskakste/Elibuf | 6976eddfbbe9498a15fc6bfb587d7b53fb8845ce | [
"Apache-2.0"
] | 4 | 2017-04-11T14:31:23.000Z | 2019-06-04T20:08:02.000Z | test/base_test.exs | norbertskakste/Elibuf | 6976eddfbbe9498a15fc6bfb587d7b53fb8845ce | [
"Apache-2.0"
] | null | null | null | test/base_test.exs | norbertskakste/Elibuf | 6976eddfbbe9498a15fc6bfb587d7b53fb8845ce | [
"Apache-2.0"
] | null | null | null | defmodule ElibufTest.Base do
use ExUnit.Case
alias Elibuf.Primitives.Base
alias Elibuf.Primitives
test "Primitive creation (check ? methods)" do
test_primitive = Primitives.int32()
assert Base.repeating?(test_primitive) == false
assert Base.has_default?(test_primitive) == false
assert Base.has_name?(test_primitive) == false
assert Base.has_order?(test_primitive) == false
assert Base.valid?(test_primitive) == false
end
test "Primitive creation (valid primitive)" do
test_primitive = Primitives.int32()
|> Base.set_name("test_primitive")
|> Base.set_order(1)
|> Base.set_repeating(false)
|> Base.set_default(nil)
assert Base.valid?(test_primitive) == true
end
test "Primitive creation (invalid primitive)" do
test_primitive = Primitives.int32()
|> Base.set_name("test_primitive")
|> Base.set_repeating(false)
|> Base.set_default(nil)
assert Base.valid?(test_primitive) == false
end
test "Primitive type test (valid)" do
test_int32 = %Primitives.Base{type: :int32}
assert Base.valid_type(test_int32) == true
end
test "Primitive type test (invalid)" do
test_false_primitive = %Primitives.Base{type: :abc}
assert Base.valid_type(test_false_primitive) == false
end
test "Primitive generation" do
int32 = Primitives.int32()
|> Base.set_order(1)
|> Base.set_name("TestPrimitive")
|> Base.set_repeating(true)
generated_value = Primitives.Base.generate(int32)
assert generated_value == ~s(repeated int32 TestPrimitive = 1; // %Elibuf.Primitives.Base{default: nil, imports: [], name: "TestPrimitive", order: 1, repeating: true, type: :int32}\n)
value_without_repeating = int32
|> Base.set_repeating(false)
generated_value_without_repeating = Primitives.Base.generate(value_without_repeating)
assert generated_value_without_repeating == ~s(int32 TestPrimitive = 1; // %Elibuf.Primitives.Base{default: nil, imports: [], name: "TestPrimitive", order: 1, repeating: false, type: :int32}\n)
end
end
| 33.892308 | 201 | 0.664548 |
034a6decd43b1de8f08d29df9c1124f3e44d8bf6 | 2,473 | ex | Elixir | lib/resty/associations.ex | paulhenri-l/resty | b6aec738569355bab53fbc732bfd323c63348b85 | [
"MIT"
] | 3 | 2018-11-17T11:11:47.000Z | 2019-09-13T16:13:43.000Z | lib/resty/associations.ex | paulhenri-l/resty | b6aec738569355bab53fbc732bfd323c63348b85 | [
"MIT"
] | 38 | 2018-11-11T01:28:41.000Z | 2019-04-01T21:28:02.000Z | lib/resty/associations.ex | paulhenri-l/resty | b6aec738569355bab53fbc732bfd323c63348b85 | [
"MIT"
] | 1 | 2019-01-10T12:41:48.000Z | 2019-01-10T12:41:48.000Z | defmodule Resty.Associations do
alias Resty.Associations.NotLoaded
alias Resty.Associations.LoadError
@moduledoc """
Resty supports associations between resources. The best way to learn how to
use an association is to go check its doc.
## Supported associations
- `Resty.Resource.Base.belongs_to/3`
- `Resty.Resource.Base.has_one/3`
- `Resty.Resource.Base.has_many/3`
## Load error
If loading an association results in an error its value will be replaced by
the `Resty.Associations.LoadError` struct. The `:error` attribute will be
loaded with the error that has been returned by the `Resty.Repo.find/2` call.
## Not loaded
If an association has not been loaded its value will be of the type
`Resty.Associations.NotLoaded`
What may cause an association to not be loaded is that the foreign key of the
relation was set to null or that the relation automatic loading has been
disabled.
"""
@doc """
Load all of the associations of the given resource that should be eager
loaded.
"""
def eager_load(resource = %{__struct__: resource_module}) do
associations =
resource_module.associations()
|> Enum.filter(& &1.eager_load)
load(resource, associations)
end
@doc """
Load all of the associations of the given resource whether they should be
eager loaded or not.
*This function will not recursively load non eager loaded associations.*
"""
def load(resource = %{__struct__: resource_module}) do
# Parallel loading could easily be done.
load(resource, resource_module.associations())
end
@doc false
def load(resource, []), do: resource
def load(resource, [association | next_associations]) do
resource = load(resource, association)
load(resource, next_associations)
end
def load(resource, association = %{__struct__: association_module}) do
case association_module.fetch(association, resource) do
nil ->
Map.put(resource, association.attribute, %NotLoaded{})
{:ok, related_resource} ->
Map.put(resource, association.attribute, related_resource)
{:error, error} ->
Map.put(resource, association.attribute, %LoadError{error: error})
end
end
@doc false
def list(%{__struct__: resource_module}, type) do
list(resource_module, type)
end
def list(resource_module, type) when is_atom(resource_module) do
resource_module.associations
|> Enum.filter(&(&1.__struct__ == type))
end
end
| 29.094118 | 79 | 0.717347 |
034a95271d7d893a55542f797828ffed6c5892ac | 1,637 | ex | Elixir | lib/ninescraft_web.ex | drobertduke/ninescraft | 00a08e4f3433b1ffd8f0a8dc77927d043b4e3ce8 | [
"Apache-2.0"
] | null | null | null | lib/ninescraft_web.ex | drobertduke/ninescraft | 00a08e4f3433b1ffd8f0a8dc77927d043b4e3ce8 | [
"Apache-2.0"
] | null | null | null | lib/ninescraft_web.ex | drobertduke/ninescraft | 00a08e4f3433b1ffd8f0a8dc77927d043b4e3ce8 | [
"Apache-2.0"
] | null | null | null | defmodule NinescraftWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use NinescraftWeb, :controller
use NinescraftWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: NinescraftWeb
import Plug.Conn
import NinescraftWeb.Router.Helpers
import NinescraftWeb.Gettext
end
end
def view do
quote do
use Phoenix.View, root: "lib/ninescraft_web/templates",
namespace: NinescraftWeb
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_flash: 2, view_module: 1]
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
import NinescraftWeb.Router.Helpers
import NinescraftWeb.ErrorHelpers
import NinescraftWeb.Gettext
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
end
end
def channel do
quote do
use Phoenix.Channel
import NinescraftWeb.Gettext
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 24.073529 | 69 | 0.693341 |
034abbc318e9dc2efca2925a62d569c4a29cc116 | 1,607 | exs | Elixir | config/test.exs | mindvalley/mv-opentelemetry | 90d654c023e6912f74c2cce518530878b46dd20b | [
"MIT"
] | 9 | 2021-10-05T08:01:18.000Z | 2022-03-30T07:19:38.000Z | config/test.exs | mindvalley/mv-opentelemetry | 90d654c023e6912f74c2cce518530878b46dd20b | [
"MIT"
] | 29 | 2021-10-04T05:46:47.000Z | 2022-03-31T18:47:10.000Z | config/test.exs | mindvalley/mv-opentelemetry | 90d654c023e6912f74c2cce518530878b46dd20b | [
"MIT"
] | 2 | 2021-10-04T05:50:38.000Z | 2021-10-05T05:01:34.000Z | use Mix.Config
config :mv_opentelemetry_harness,
ecto_repos: [MvOpentelemetryHarness.Repo]
# Configures the endpoint
config :mv_opentelemetry_harness, MvOpentelemetryHarnessWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "yu69b2fyPQLDba6EWwyNe2xAXAkcAQT68owg8KhGL/Hfosl3QuYOffSE+eFvqeuX",
live_view: [signing_salt: "FqZ8F1CaCmC4SQIB"],
render_errors: [
view: MvOpentelemetryHarnessWeb.ErrorView,
accepts: ~w(html json),
layout: false
],
pubsub_server: MvOpentelemetryHarness.PubSub,
live_view: [signing_salt: "n4EZeui4"],
http: [port: 4002],
server: false
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Use Jason for JSON parsing in Phoenix
config :phoenix, :json_library, Jason
config :opentelemetry, processors: [otel_batch_processor: %{scheduled_delay_ms: 1}]
# Configure your database
#
# The MIX_TEST_PARTITION environment variable can be used
# to provide built-in test partitioning in CI environment.
# Run `mix help test` for more information.
config :mv_opentelemetry_harness, MvOpentelemetryHarness.Repo,
username: System.get_env("POSTGRES_USER") || "postgres",
password:
System.get_env("POSTGRES_ROOT_PASSWORD") || System.get_env("POSTGRES_PASSWORD") || "postgres",
database: "mv_opentelemetry_harness_test#{System.get_env("MIX_TEST_PARTITION")}",
hostname: System.get_env("POSTGRES_HOST") || "localhost",
pool: Ecto.Adapters.SQL.Sandbox
# Print only warnings and errors during test
config :logger, level: :warn
config :phoenix, :json_library, Jason
| 34.191489 | 98 | 0.768513 |
034acabe8319e183f60119af2a9bae17c9a8498e | 1,704 | ex | Elixir | clients/content/lib/google_api/content/v21/model/orders_reject_return_line_item_response.ex | kaaboaye/elixir-google-api | 1896784c4342151fd25becd089a5beb323eff567 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v21/model/orders_reject_return_line_item_response.ex | kaaboaye/elixir-google-api | 1896784c4342151fd25becd089a5beb323eff567 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v21/model/orders_reject_return_line_item_response.ex | kaaboaye/elixir-google-api | 1896784c4342151fd25becd089a5beb323eff567 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V21.Model.OrdersRejectReturnLineItemResponse do
@moduledoc """
## Attributes
* `executionStatus` (*type:* `String.t`, *default:* `nil`) - The status of the execution.
* `kind` (*type:* `String.t`, *default:* `content#ordersRejectReturnLineItemResponse`) - Identifies what kind of resource this is. Value: the fixed string "content#ordersRejectReturnLineItemResponse".
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:executionStatus => String.t(),
:kind => String.t()
}
field(:executionStatus)
field(:kind)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V21.Model.OrdersRejectReturnLineItemResponse do
def decode(value, options) do
GoogleApi.Content.V21.Model.OrdersRejectReturnLineItemResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V21.Model.OrdersRejectReturnLineItemResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.08 | 204 | 0.744131 |
034ae41f43741b79ebf715b36f9a5349a09cc448 | 170 | exs | Elixir | apps/customer/test/test_helper.exs | JaiMali/job_search-1 | 5fe1afcd80aa5d55b92befed2780cd6721837c88 | [
"MIT"
] | 102 | 2017-05-21T18:24:04.000Z | 2022-03-10T12:53:20.000Z | apps/customer/test/test_helper.exs | JaiMali/job_search-1 | 5fe1afcd80aa5d55b92befed2780cd6721837c88 | [
"MIT"
] | 2 | 2017-05-21T01:53:30.000Z | 2017-12-01T00:27:06.000Z | apps/customer/test/test_helper.exs | JaiMali/job_search-1 | 5fe1afcd80aa5d55b92befed2780cd6721837c88 | [
"MIT"
] | 18 | 2017-05-22T09:51:36.000Z | 2021-09-24T00:57:01.000Z | {:ok, _} = Application.ensure_all_started(:ex_machina)
ExUnit.start
Pact.put(:google_strategy, FakeGoogleStrategy)
Ecto.Adapters.SQL.Sandbox.mode(Customer.Repo, :manual)
| 34 | 54 | 0.805882 |
034b19ec86721a2ae802a1652ea235e8d524f485 | 497 | ex | Elixir | test/factories/course/sourcecast_factory.ex | Hou-Rui/cadet | f9036d76005bf3b267b632dce176067ae1a19f71 | [
"Apache-2.0"
] | null | null | null | test/factories/course/sourcecast_factory.ex | Hou-Rui/cadet | f9036d76005bf3b267b632dce176067ae1a19f71 | [
"Apache-2.0"
] | 10 | 2022-02-24T17:57:38.000Z | 2022-03-31T07:43:05.000Z | test/factories/course/sourcecast_factory.ex | Hou-Rui/cadet | f9036d76005bf3b267b632dce176067ae1a19f71 | [
"Apache-2.0"
] | 1 | 2020-06-01T03:26:02.000Z | 2020-06-01T03:26:02.000Z | defmodule Cadet.Course.SourcecastFactory do
@moduledoc """
Factory for Sourcecast entity
"""
defmacro __using__(_opts) do
quote do
alias Cadet.Course.Sourcecast
def sourcecast_factory do
%Sourcecast{
title: Faker.StarWars.character(),
description: Faker.StarWars.planet(),
audio: build(:upload),
playbackData: Faker.StarWars.planet(),
uploader: build(:user, %{role: :staff})
}
end
end
end
end
| 22.590909 | 49 | 0.615694 |
034b55c2a723e199fe8a48742bf2d125d1d67f08 | 2,100 | ex | Elixir | apps/ewallet/lib/ewallet/fetchers/transaction_consumption_fetcher.ex | saturnial/ewallet | a0a6a7604fca0891d495b3aebaead37fe02aae2c | [
"Apache-2.0"
] | null | null | null | apps/ewallet/lib/ewallet/fetchers/transaction_consumption_fetcher.ex | saturnial/ewallet | a0a6a7604fca0891d495b3aebaead37fe02aae2c | [
"Apache-2.0"
] | null | null | null | apps/ewallet/lib/ewallet/fetchers/transaction_consumption_fetcher.ex | saturnial/ewallet | a0a6a7604fca0891d495b3aebaead37fe02aae2c | [
"Apache-2.0"
] | null | null | null | defmodule EWallet.TransactionConsumptionFetcher do
@moduledoc """
Handles any kind of retrieval/fetching for the TransactionConsumptionGate.
All functions here are only meant to load and format data related to
transaction consumptions.
"""
alias EWalletDB.{TransactionConsumption, Transaction}
@spec get(String.t()) ::
{:ok, %TransactionConsumption{}}
| {:error, :transaction_consumption_not_found}
def get(nil), do: {:error, :transaction_consumption_not_found}
def get(id) do
%{id: id}
|> get_by()
|> return_consumption()
end
defp return_consumption(nil), do: {:error, :transaction_consumption_not_found}
defp return_consumption(consumption), do: {:ok, consumption}
@spec idempotent_fetch(String.t()) ::
{:ok, nil}
| {:idempotent_call, %TransactionConsumption{}}
| {:error, %TransactionConsumption{}, atom(), String.t()}
| {:error, %TransactionConsumption{}, String.t(), String.t()}
def idempotent_fetch(idempotency_token) do
%{idempotency_token: idempotency_token}
|> get_by()
|> return_idempotent()
end
defp get_by(attrs) do
TransactionConsumption.get_by(
attrs,
preload: [
:account,
:user,
:wallet,
:token,
:transaction_request,
:transaction,
:exchange_account,
:exchange_wallet
]
)
end
defp return_idempotent(nil), do: {:ok, nil}
defp return_idempotent(%TransactionConsumption{transaction: nil} = consumption) do
{:idempotent_call, consumption}
end
defp return_idempotent(%TransactionConsumption{transaction: transaction} = consumption) do
return_transaction_result(consumption, failed_transaction: Transaction.failed?(transaction))
end
defp return_transaction_result(consumption, failed_transaction: true) do
{code, description} = Transaction.get_error(consumption.transaction)
{:error, consumption, code, description}
end
defp return_transaction_result(consumption, failed_transaction: false) do
{:idempotent_call, consumption}
end
end
| 30 | 96 | 0.695238 |
034b793bd8b986a62a45185af9cd6b40d3b70641 | 88,841 | ex | Elixir | lib/resources.ex | bitchef/exocco | b0e93b01794d71c0458f52907b2f66b03e3f207e | [
"MIT"
] | null | null | null | lib/resources.ex | bitchef/exocco | b0e93b01794d71c0458f52907b2f66b03e3f207e | [
"MIT"
] | 1 | 2016-02-09T12:27:42.000Z | 2016-02-09T12:27:42.000Z | lib/resources.ex | bitchef/exocco | b0e93b01794d71c0458f52907b2f66b03e3f207e | [
"MIT"
] | null | null | null | defmodule Resources do
# CSS stylesheet.
defmacro css do
quote do
"""
/*--------------------- Layout and Typography ----------------------------*/
body {
font-family: 'Palatino Linotype', 'Book Antiqua', Palatino, FreeSerif, serif;
font-size: 14px;
line-height: 16px;
color: #252519;
margin: 0; padding: 0;
}
a { color: #4183C4; }
a:visited { color: #3773AA; }
p, ul, ol { margin: 0 0 15px; }
h1, h2, h3, h4, h5, h6 { margin: 30px 0 15px 0; }
h1 { margin-top: 40px; }
hr {
border: 0 none;
border-top: 1px solid #e5e5ee;
height: 1px;
margin: 20px 0;
}
pre, tt, code {
font-size: 12px; line-height: 16px;
font-family: Menlo, Monaco, Consolas, "Lucida Console", monospace;
margin: 0; padding: 0;
}
ul {
list-style: none;
padding:0;
}
ul.sections {
padding:0 0 5px 0;
margin:0;
}
ul.sections > li > div.content:empty { display: none; }
/*
Force border-box so that % widths fit the parent
container without overlap because of margin/padding.
More Info : http://www.quirksmode.org/css/box.html
*/
ul.sections > li > div {
-moz-box-sizing: border-box; /* firefox */
-ms-box-sizing: border-box; /* ie */
-webkit-box-sizing: border-box; /* webkit */
-khtml-box-sizing: border-box; /* konqueror */
box-sizing: border-box; /* css3 */
}
/*---------------------- Dropdown Menu -----------------------------*/
.menu, .dropdown {
margin: 0;
font: 16px Arial;
cursor: pointer;
text-align: right;
list-style: none;
}
.dropdown { background: #e62e00; }
.menu a {
text-decoration: none;
color: white;
}
.dropdown span.large {
display: none;
font-weight: bold;
}
.dropdown span.small {
font-size: 22px;
font-weight: bold;
}
.menu, .dropdown, .dropdown-menu {
position: fixed;
right: 0; top: 0;
padding: 10px 15px;
margin:0;
}
.dropdown-menu {
display: none;
padding:0;
z-index: 1000;
}
.dropdown:hover .dropdown-menu {
display: block;
}
.dropdown-menu li {
padding: 5px 0 3px;
background-color: #5b5a51 !important;
}
.dropdown-menu li a {
display: block;
padding: 15px;
text-decoration: none;
text-align: left;
}
.dropdown-menu li a:hover {
background: #f5f5ff;
color: #261a3b;
}
/*---------------------- Low resolutions (> 320px) ---------------------*/
@media only screen and (min-width: 320px) {
.pilwrap { display: none; }
ul.sections > li > div {
display: block;
padding:5px 10px 0 10px;
}
ul.sections > li > div.annotation { background: #fff; }
ul.sections > li > div.annotation ul, ul.sections > li > div.annotation ol {
padding-left: 30px;
}
ul.sections > li > div.content {
overflow-x:auto;
-webkit-box-shadow: inset 0 0 5px #e5e5ee;
box-shadow: inset 0 0 5px #e5e5ee;
border: 1px solid #dedede;
margin:5px 10px 5px 10px;
padding-bottom: 5px;
background-color: #49483e;
}
ul.sections > li > div.annotation pre {
margin: 7px 0 7px;
padding-left: 15px;
}
ul.sections > li > div.annotation p tt, .annotation code {
background: #f8f8ff;
border: 1px solid #dedede;
font-size: 12px;
padding: 0 0.2em;
}
}
/*---------------------- (> 768px) ---------------------*/
@media only screen and (min-width: 768px) {
body {
background-color: #49483e;
font-size: 15px;
line-height: 22px;
}
pre, tt, code { line-height: 18px; }
.menu, .dropdown {
padding: 5px 10px;
font: 10px Arial;
text-transform: uppercase;
}
.dropdown-menu li a {
padding: 5px 10px;
text-align: left;
}
.dropdown span.large { display: inline-block; }
.dropdown span.small { display: none; }
ul.sections > li > div.annotation ul, ul.sections > li > div.annotation ol {
padding-left: 40px;
}
ul.sections > li { white-space: nowrap; }
ul.sections > li > div { display: inline-block; }
ul.sections > li > div.annotation {
max-width: 350px;
min-width: 350px;
min-height: 5px;
padding: 13px;
overflow-x: hidden;
white-space: normal;
vertical-align: top;
text-align: left;
display: table-cell;
}
ul.sections > li > div.annotation pre {
margin: 15px 0 15px;
padding-left: 15px;
}
ul.sections > li > div.content {
padding: 13px;
vertical-align: top;
border: none;
-webkit-box-shadow: none;
box-shadow: none;
display: table-cell;
}
.pilwrap {
position: relative;
display: inline;
}
.pilcrow {
font: 12px Arial;
text-decoration: none;
color: #454545;
position: absolute;
top: 3px; left: -20px;
padding: 1px 2px;
opacity: 0;
-webkit-transition: opacity 0.2s linear;
}
}
/*---------------------- (> 1025px) ---------------------*/
@media only screen and (min-width: 1025px) {
ul.sections > li > div.annotation {
max-width: 525px;
min-width: 525px;
padding: 10px 25px 1px 50px;
}
ul.sections > li > div.content {
padding: 9px 15px 16px 25px;
}
}
/*---------------------- Syntax Highlighting (Monokai)-----------------------------*/
.highlight { background-color: #49483e }
.c { color: #75715e } /* Comment */
.err { color: #960050; background-color: #1e0010 } /* Error */
.k { color: #66d9ef } /* Keyword */
.l { color: #ae81ff } /* Literal */
.n { color: #f8f8f2 } /* Name */
.o { color: #f92672 } /* Operator */
.p { color: #f8f8f2 } /* Punctuation */
.cm { color: #75715e } /* Comment.Multiline */
.cp { color: #75715e } /* Comment.Preproc */
.c1 { color: #75715e } /* Comment.Single */
.cs { color: #75715e } /* Comment.Special */
.ge { font-style: italic } /* Generic.Emph */
.gs { font-weight: bold } /* Generic.Strong */
.kc { color: #66d9ef } /* Keyword.Constant */
.kd { color: #66d9ef } /* Keyword.Declaration */
.kn { color: #f92672 } /* Keyword.Namespace */
.kp { color: #66d9ef } /* Keyword.Pseudo */
.kr { color: #66d9ef } /* Keyword.Reserved */
.kt { color: #66d9ef } /* Keyword.Type */
.ld { color: #e6db74 } /* Literal.Date */
.m { color: #ae81ff } /* Literal.Number */
.s { color: #e6db74 } /* Literal.String */
.na { color: #a6e22e } /* Name.Attribute */
.nb { color: #f8f8f2 } /* Name.Builtin */
.nc { color: #a6e22e } /* Name.Class */
.no { color: #66d9ef } /* Name.Constant */
.nd { color: #a6e22e } /* Name.Decorator */
.ni { color: #f8f8f2 } /* Name.Entity */
.ne { color: #a6e22e } /* Name.Exception */
.nf { color: #a6e22e } /* Name.Function */
.nl { color: #f8f8f2 } /* Name.Label */
.nn { color: #f8f8f2 } /* Name.Namespace */
.nx { color: #a6e22e } /* Name.Other */
.py { color: #f8f8f2 } /* Name.Property */
.nt { color: #f92672 } /* Name.Tag */
.nv { color: #f8f8f2 } /* Name.Variable */
.ow { color: #f92672 } /* Operator.Word */
.w { color: #f8f8f2 } /* Text.Whitespace */
.mf { color: #ae81ff } /* Literal.Number.Float */
.mh { color: #ae81ff } /* Literal.Number.Hex */
.mi { color: #ae81ff } /* Literal.Number.Integer */
.mo { color: #ae81ff } /* Literal.Number.Oct */
.sb { color: #e6db74 } /* Literal.String.Backtick */
.sc { color: #e6db74 } /* Literal.String.Char */
.sd { color: #e6db74 } /* Literal.String.Doc */
.s2 { color: #e6db74 } /* Literal.String.Double */
.se { color: #ae81ff } /* Literal.String.Escape */
.sh { color: #e6db74 } /* Literal.String.Heredoc */
.si { color: #e6db74 } /* Literal.String.Interpol */
.sx { color: #e6db74 } /* Literal.String.Other */
.sr { color: #e6db74 } /* Literal.String.Regex */
.s1 { color: #e6db74 } /* Literal.String.Single */
.ss { color: #e6db74 } /* Literal.String.Symbol */
.bp { color: #f8f8f2 } /* Name.Builtin.Pseudo */
.vc { color: #f8f8f2 } /* Name.Variable.Class */
.vg { color: #f8f8f2 } /* Name.Variable.Global */
.vi { color: #f8f8f2 } /* Name.Variable.Instance */
.il { color: #ae81ff } /* Literal.Number.Integer.Long */
"""
end
end #defmacro css
# Documentation file HTML template.
defmacro htmlHeader do
quote do
"""
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=0">
<title>{{title}}</title>
<link rel="stylesheet" href="{{css}}" type="text/css" media="screen"/>
</head>
<body>
<page>
{{navigation}}
<content>
<ul class="sections">
<li id="title">
<div class="annotation">
<h1>{{title}}</h1>
</div>
</li><!-- end #title -->
"""
end
end #defmacro htmlHeader
defmacro htmlJumpBegin do
quote do
"""
<nav role="navigation">
<ul class="menu">
<li class="dropdown">
<a href="javascript:void(0);"><span class="large">Jump To…</span><span class="small">+</span></a>
<ul class="dropdown-menu" role="menu">
"""
end
end #defmacro htmlJumpBegin
defmacro htmlJumpLink do
quote do
"<li><a href=\"{{url}}\">{{filename}}</a></li>"
end
end #defmacro htmlJumpLink
defmacro htmlJumpEnd do
quote do
"""
</ul><!-- end dropdown-menu -->
</li><!-- end dropdown -->
</ul><!-- end menu -->
</nav>
"""
end
end #defmacro htmlJumpEnd
defmacro htmlBody do
quote do
"""
<li id="section-{{index}} ">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-{{index}}">¶</a>
</div>
{{docs}}
</div> <!-- end annotation -->
<div class="content">{{code}}</div><!-- end content -->
</li> <!-- end #section-{{index}} -->
"""
end
end #defmacro htmlBody
defmacro htmlFooter do
quote do
"""
</ul> <!-- end sections -->
</content>
</page>
{{scripts}}
</body>
</html>
"""
end
end #defmacro htmlFooter
defmacro jsLink do
quote do
"<script type=\"text/javascript\" src=\"{{js}}\"></script>"
end
end #defmacro jsLink
# Javascript for animating long dropdown menus.
defmacro dropdownjs do
quote do
"""
/* Credits to http://css-tricks.com/long-dropdowns-solution/ */
var maxHeight = 400;
$(function(){
$(".dropdown").hover(function() {
var $container = $(this),
$list = $container.find("ul"),
height = $list.height() * 1.1, // make sure there is enough room at the bottom
multiplier = height / maxHeight; // needs to move faster if list is taller
// need to save height here so it can revert on mouseout
$container.data("origHeight", $container.height());
// make sure dropdown appears directly below parent list item
$list
.show()
.css({
paddingTop: $container.data("origHeight")
});
// don't do any animation if list shorter than max
if (multiplier > 1) {
$container
.css({
overflow: "hidden"
})
.mousemove(function(e) {
var offset = $container.offset();
var relativeY = ((e.pageY - offset.top) * multiplier) - ($container.data("origHeight") * multiplier);
if (relativeY > $container.data("origHeight")) {
$list.css("top", -relativeY + $container.data("origHeight"));
};
});
}
}, function() {
var $el = $(this);
// put things back to normal
$el
.height($(this).data("origHeight"))
.find("ul")
.css({ top: 0 })
.hide();
});
});
"""
end
end #defmacro dropdownjs
defmacro jquery do
quote do
"""
/*! jQuery v2.0.3 -wrap,-ajax,-ajax/script,-ajax/jsonp,-ajax/xhr,-deprecated | (c) 2005, 2013 jQuery Foundation, Inc. | jquery.org/license
//@ sourceMappingURL=jquery.min.map
*/
(function(e,undefined){var t,n,r=typeof undefined,i=e.location,o=e.document,s=o.documentElement,a=e.jQuery,u=e.$,l={},c=[],f="2.0.3 -wrap,-ajax,-ajax/script,-ajax/jsonp,-ajax/xhr,-deprecated",p=c.concat,h=c.push,d=c.slice,g=c.indexOf,m=l.toString,y=l.hasOwnProperty,v=f.trim,x=function(e,n){return new x.fn.init(e,n,t)},b=/[+-]?(?:\\d*\\.|)\\d+(?:[eE][+-]?\\d+|)/.source,w=/\\S+/g,T=/^(?:\\s*(<[\\w\\W]+>)[^>]*|#([\\w-]*))$/,C=/^<(\\w+)\\s*\\/?>(?:<\\/\\1>|)$/,N=/^-ms-/,k=/-([\\da-z])/gi,E=function(e,t){return t.toUpperCase()},D=function(){o.removeEventListener("DOMContentLoaded",D,!1),e.removeEventListener("load",D,!1),x.ready()};x.fn=x.prototype={jquery:f,constructor:x,init:function(e,t,n){var r,i;if(!e)return this;if("string"==typeof e){if(r="<"===e.charAt(0)&&">"===e.charAt(e.length-1)&&e.length>=3?[null,e,null]:T.exec(e),!r||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof x?t[0]:t,x.merge(this,x.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:o,!0)),C.test(r[1])&&x.isPlainObject(t))for(r in t)x.isFunction(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return i=o.getElementById(r[2]),i&&i.parentNode&&(this.length=1,this[0]=i),this.context=o,this.selector=e,this}return e.nodeType?(this.context=this[0]=e,this.length=1,this):x.isFunction(e)?n.ready(e):(e.selector!==undefined&&(this.selector=e.selector,this.context=e.context),x.makeArray(e,this))},selector:"",length:0,toArray:function(){return d.call(this)},get:function(e){return null==e?this.toArray():0>e?this[this.length+e]:this[e]},pushStack:function(e){var t=x.merge(this.constructor(),e);return t.prevObject=this,t.context=this.context,t},each:function(e,t){return x.each(this,e,t)},ready:function(e){return x.ready.promise().done(e),this},slice:function(){return this.pushStack(d.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(e){var t=this.length,n=+e+(0>e?t:0);return this.pushStack(n>=0&&t>n?[this[n]]:[])},map:function(e){return this.pushStack(x.map(this,function(t,n){return e.call(t,n,t)}))},end:function(){return this.prevObject||this.constructor(null)},push:h,sort:[].sort,splice:[].splice},x.fn.init.prototype=x.fn,x.extend=x.fn.extend=function(){var e,t,n,r,i,o,s=arguments[0]||{},a=1,u=arguments.length,l=!1;for("boolean"==typeof s&&(l=s,s=arguments[1]||{},a=2),"object"==typeof s||x.isFunction(s)||(s={}),u===a&&(s=this,--a);u>a;a++)if(null!=(e=arguments[a]))for(t in e)n=s[t],r=e[t],s!==r&&(l&&r&&(x.isPlainObject(r)||(i=x.isArray(r)))?(i?(i=!1,o=n&&x.isArray(n)?n:[]):o=n&&x.isPlainObject(n)?n:{},s[t]=x.extend(l,o,r)):r!==undefined&&(s[t]=r));return s},x.extend({expando:"jQuery"+(f+Math.random()).replace(/\\D/g,""),noConflict:function(t){return e.$===x&&(e.$=u),t&&e.jQuery===x&&(e.jQuery=a),x},isReady:!1,readyWait:1,holdReady:function(e){e?x.readyWait++:x.ready(!0)},ready:function(e){(e===!0?--x.readyWait:x.isReady)||(x.isReady=!0,e!==!0&&--x.readyWait>0||(n.resolveWith(o,[x]),x.fn.trigger&&x(o).trigger("ready").off("ready")))},isFunction:function(e){return"function"===x.type(e)},isArray:Array.isArray,isWindow:function(e){return null!=e&&e===e.window},isNumeric:function(e){return!isNaN(parseFloat(e))&&isFinite(e)},type:function(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?l[m.call(e)]||"object":typeof e},isPlainObject:function(e){if("object"!==x.type(e)||e.nodeType||x.isWindow(e))return!1;try{if(e.constructor&&!y.call(e.constructor.prototype,"isPrototypeOf"))return!1}catch(t){return!1}return!0},isEmptyObject:function(e){var t;for(t in e)return!1;return!0},error:function(e){throw Error(e)},parseHTML:function(e,t,n){if(!e||"string"!=typeof e)return null;"boolean"==typeof t&&(n=t,t=!1),t=t||o;var r=C.exec(e),i=!n&&[];return r?[t.createElement(r[1])]:(r=x.buildFragment([e],t,i),i&&x(i).remove(),x.merge([],r.childNodes))},parseJSON:JSON.parse,parseXML:function(e){var t,n;if(!e||"string"!=typeof e)return null;try{n=new DOMParser,t=n.parseFromString(e,"text/xml")}catch(r){t=undefined}return(!t||t.getElementsByTagName("parsererror").length)&&x.error("Invalid XML: "+e),t},noop:function(){},globalEval:function(e){var t,n=eval;e=x.trim(e),e&&(1===e.indexOf("use strict")?(t=o.createElement("script"),t.text=e,o.head.appendChild(t).parentNode.removeChild(t)):n(e))},camelCase:function(e){return e.replace(N,"ms-").replace(k,E)},nodeName:function(e,t){return e.nodeName&&e.nodeName.toLowerCase()===t.toLowerCase()},each:function(e,t,n){var r,i=0,o=e.length,s=A(e);if(n){if(s){for(;o>i;i++)if(r=t.apply(e[i],n),r===!1)break}else for(i in e)if(r=t.apply(e[i],n),r===!1)break}else if(s){for(;o>i;i++)if(r=t.call(e[i],i,e[i]),r===!1)break}else for(i in e)if(r=t.call(e[i],i,e[i]),r===!1)break;return e},trim:function(e){return null==e?"":v.call(e)},makeArray:function(e,t){var n=t||[];return null!=e&&(A(Object(e))?x.merge(n,"string"==typeof e?[e]:e):h.call(n,e)),n},inArray:function(e,t,n){return null==t?-1:g.call(t,e,n)},merge:function(e,t){var n=t.length,r=e.length,i=0;if("number"==typeof n)for(;n>i;i++)e[r++]=t[i];else while(t[i]!==undefined)e[r++]=t[i++];return e.length=r,e},grep:function(e,t,n){var r,i=[],o=0,s=e.length;for(n=!!n;s>o;o++)r=!!t(e[o],o),n!==r&&i.push(e[o]);return i},map:function(e,t,n){var r,i=0,o=e.length,s=A(e),a=[];if(s)for(;o>i;i++)r=t(e[i],i,n),null!=r&&(a[a.length]=r);else for(i in e)r=t(e[i],i,n),null!=r&&(a[a.length]=r);return p.apply([],a)},guid:1,proxy:function(e,t){var n,r,i;return"string"==typeof t&&(n=e[t],t=e,e=n),x.isFunction(e)?(r=d.call(arguments,2),i=function(){return e.apply(t||this,r.concat(d.call(arguments)))},i.guid=e.guid=e.guid||x.guid++,i):undefined},access:function(e,t,n,r,i,o,s){var a=0,u=e.length,l=null==n;if("object"===x.type(n)){i=!0;for(a in n)x.access(e,t,a,n[a],!0,o,s)}else if(r!==undefined&&(i=!0,x.isFunction(r)||(s=!0),l&&(s?(t.call(e,r),t=null):(l=t,t=function(e,t,n){return l.call(x(e),n)})),t))for(;u>a;a++)t(e[a],n,s?r:r.call(e[a],a,t(e[a],n)));return i?e:l?t.call(e):u?t(e[0],n):o},now:Date.now,swap:function(e,t,n,r){var i,o,s={};for(o in t)s[o]=e.style[o],e.style[o]=t[o];i=n.apply(e,r||[]);for(o in t)e.style[o]=s[o];return i}}),x.ready.promise=function(t){return n||(n=x.Deferred(),"complete"===o.readyState?setTimeout(x.ready):(o.addEventListener("DOMContentLoaded",D,!1),e.addEventListener("load",D,!1))),n.promise(t)},x.each("Boolean Number String Function Array Date RegExp Object Error".split(" "),function(e,t){l["[object "+t+"]"]=t.toLowerCase()});function A(e){var t=e.length,n=x.type(e);return x.isWindow(e)?!1:1===e.nodeType&&t?!0:"array"===n||"function"!==n&&(0===t||"number"==typeof t&&t>0&&t-1 in e)}t=x(o),function(e,undefined){var t,n,r,i,o,s,a,u,l,c,f,p,h,d,g,m,y,v="sizzle"+-new Date,b=e.document,w=0,T=0,C=st(),N=st(),k=st(),E=!1,D=function(e,t){return e===t?(E=!0,0):0},A=typeof undefined,S=1<<31,L={}.hasOwnProperty,q=[],j=q.pop,H=q.push,O=q.push,P=q.slice,F=q.indexOf||function(e){var t=0,n=this.length;for(;n>t;t++)if(this[t]===e)return t;return-1},W="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",R="[\\\\x20\\\\t\\\\r\\\\n\\\\f]",M="(?:\\\\\\\\.|[\\\\w-]|[^\\\\x00-\\\\xa0])+",B=M.replace("w","w#"),$="\\\\["+R+"*("+M+")"+R+"*(?:([*^$|!~]?=)"+R+"*(?:(['\\"])((?:\\\\\\\\.|[^\\\\\\\\])*?)\\\\3|("+B+")|)|)"+R+"*\\\\]",I=":("+M+")(?:\\\\(((['\\"])((?:\\\\\\\\.|[^\\\\\\\\])*?)\\\\3|((?:\\\\\\\\.|[^\\\\\\\\()[\\\\]]|"+$.replace(3,8)+")*)|.*)\\\\)|)",z=RegExp("^"+R+"+|((?:^|[^\\\\\\\\])(?:\\\\\\\\.)*)"+R+"+$","g"),_=RegExp("^"+R+"*,"+R+"*"),X=RegExp("^"+R+"*([>+~]|"+R+")"+R+"*"),U=RegExp(R+"*[+~]"),Y=RegExp("="+R+"*([^\\\\]'\\"]*)"+R+"*\\\\]","g"),V=RegExp(I),Q=RegExp("^"+B+"$"),G={ID:RegExp("^#("+M+")"),CLASS:RegExp("^\\\\.("+M+")"),TAG:RegExp("^("+M.replace("w","w*")+")"),ATTR:RegExp("^"+$),PSEUDO:RegExp("^"+I),CHILD:RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\\\("+R+"*(even|odd|(([+-]|)(\\\\d*)n|)"+R+"*(?:([+-]|)"+R+"*(\\\\d+)|))"+R+"*\\\\)|)","i"),bool:RegExp("^(?:"+W+")$","i"),needsContext:RegExp("^"+R+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\\\("+R+"*((?:-\\\\d)?\\\\d*)"+R+"*\\\\)|)(?=[^-]|$)","i")},K=/^[^{]+\\{\\s*\\[native \\w/,J=/^(?:#([\\w-]+)|(\\w+)|\\.([\\w-]+))$/,Z=/^(?:input|select|textarea|button)$/i,et=/^h\\d$/i,tt=/'|\\\\/g,nt=RegExp("\\\\\\\\([\\\\da-f]{1,6}"+R+"?|("+R+")|.)","ig"),rt=function(e,t,n){var r="0x"+t-65536;return r!==r||n?t:0>r?String.fromCharCode(r+65536):String.fromCharCode(55296|r>>10,56320|1023&r)};try{O.apply(q=P.call(b.childNodes),b.childNodes),q[b.childNodes.length].nodeType}catch(it){O={apply:q.length?function(e,t){H.apply(e,P.call(t))}:function(e,t){var n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function ot(e,t,r,i){var o,s,a,u,l,p,g,m,x,w;if((t?t.ownerDocument||t:b)!==f&&c(t),t=t||f,r=r||[],!e||"string"!=typeof e)return r;if(1!==(u=t.nodeType)&&9!==u)return[];if(h&&!i){if(o=J.exec(e))if(a=o[1]){if(9===u){if(s=t.getElementById(a),!s||!s.parentNode)return r;if(s.id===a)return r.push(s),r}else if(t.ownerDocument&&(s=t.ownerDocument.getElementById(a))&&y(t,s)&&s.id===a)return r.push(s),r}else{if(o[2])return O.apply(r,t.getElementsByTagName(e)),r;if((a=o[3])&&n.getElementsByClassName&&t.getElementsByClassName)return O.apply(r,t.getElementsByClassName(a)),r}if(n.qsa&&(!d||!d.test(e))){if(m=g=v,x=t,w=9===u&&e,1===u&&"object"!==t.nodeName.toLowerCase()){p=gt(e),(g=t.getAttribute("id"))?m=g.replace(tt,"\\\\$&"):t.setAttribute("id",m),m="[id='"+m+"'] ",l=p.length;while(l--)p[l]=m+mt(p[l]);x=U.test(e)&&t.parentNode||t,w=p.join(",")}if(w)try{return O.apply(r,x.querySelectorAll(w)),r}catch(T){}finally{g||t.removeAttribute("id")}}}return Nt(e.replace(z,"$1"),t,r,i)}function st(){var e=[];function t(n,r){return e.push(n+=" ")>i.cacheLength&&delete t[e.shift()],t[n]=r}return t}function at(e){return e[v]=!0,e}function ut(e){var t=f.createElement("div");try{return!!e(t)}catch(n){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function lt(e,t){var n=e.split("|"),r=e.length;while(r--)i.attrHandle[n[r]]=t}function ct(e,t){var n=t&&e,r=n&&1===e.nodeType&&1===t.nodeType&&(~t.sourceIndex||S)-(~e.sourceIndex||S);if(r)return r;if(n)while(n=n.nextSibling)if(n===t)return-1;return e?1:-1}function ft(e){return function(t){var n=t.nodeName.toLowerCase();return"input"===n&&t.type===e}}function pt(e){return function(t){var n=t.nodeName.toLowerCase();return("input"===n||"button"===n)&&t.type===e}}function ht(e){return at(function(t){return t=+t,at(function(n,r){var i,o=e([],n.length,t),s=o.length;while(s--)n[i=o[s]]&&(n[i]=!(r[i]=n[i]))})})}s=ot.isXML=function(e){var t=e&&(e.ownerDocument||e).documentElement;return t?"HTML"!==t.nodeName:!1},n=ot.support={},c=ot.setDocument=function(e){var t=e?e.ownerDocument||e:b,r=t.defaultView;return t!==f&&9===t.nodeType&&t.documentElement?(f=t,p=t.documentElement,h=!s(t),r&&r.attachEvent&&r!==r.top&&r.attachEvent("onbeforeunload",function(){c()}),n.attributes=ut(function(e){return e.className="i",!e.getAttribute("className")}),n.getElementsByTagName=ut(function(e){return e.appendChild(t.createComment("")),!e.getElementsByTagName("*").length}),n.getElementsByClassName=ut(function(e){return e.innerHTML="<div class='a'></div><div class='a i'></div>",e.firstChild.className="i",2===e.getElementsByClassName("i").length}),n.getById=ut(function(e){return p.appendChild(e).id=v,!t.getElementsByName||!t.getElementsByName(v).length}),n.getById?(i.find.ID=function(e,t){if(typeof t.getElementById!==A&&h){var n=t.getElementById(e);return n&&n.parentNode?[n]:[]}},i.filter.ID=function(e){var t=e.replace(nt,rt);return function(e){return e.getAttribute("id")===t}}):(delete i.find.ID,i.filter.ID=function(e){var t=e.replace(nt,rt);return function(e){var n=typeof e.getAttributeNode!==A&&e.getAttributeNode("id");return n&&n.value===t}}),i.find.TAG=n.getElementsByTagName?function(e,t){return typeof t.getElementsByTagName!==A?t.getElementsByTagName(e):undefined}:function(e,t){var n,r=[],i=0,o=t.getElementsByTagName(e);if("*"===e){while(n=o[i++])1===n.nodeType&&r.push(n);return r}return o},i.find.CLASS=n.getElementsByClassName&&function(e,t){return typeof t.getElementsByClassName!==A&&h?t.getElementsByClassName(e):undefined},g=[],d=[],(n.qsa=K.test(t.querySelectorAll))&&(ut(function(e){e.innerHTML="<select><option selected=''></option></select>",e.querySelectorAll("[selected]").length||d.push("\\\\["+R+"*(?:value|"+W+")"),e.querySelectorAll(":checked").length||d.push(":checked")}),ut(function(e){var n=t.createElement("input");n.setAttribute("type","hidden"),e.appendChild(n).setAttribute("t",""),e.querySelectorAll("[t^='']").length&&d.push("[*^$]="+R+"*(?:''|\\"\\")"),e.querySelectorAll(":enabled").length||d.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),d.push(",.*:")})),(n.matchesSelector=K.test(m=p.webkitMatchesSelector||p.mozMatchesSelector||p.oMatchesSelector||p.msMatchesSelector))&&ut(function(e){n.disconnectedMatch=m.call(e,"div"),m.call(e,"[s!='']:x"),g.push("!=",I)}),d=d.length&&RegExp(d.join("|")),g=g.length&&RegExp(g.join("|")),y=K.test(p.contains)||p.compareDocumentPosition?function(e,t){var n=9===e.nodeType?e.documentElement:e,r=t&&t.parentNode;return e===r||!(!r||1!==r.nodeType||!(n.contains?n.contains(r):e.compareDocumentPosition&&16&e.compareDocumentPosition(r)))}:function(e,t){if(t)while(t=t.parentNode)if(t===e)return!0;return!1},D=p.compareDocumentPosition?function(e,r){if(e===r)return E=!0,0;var i=r.compareDocumentPosition&&e.compareDocumentPosition&&e.compareDocumentPosition(r);return i?1&i||!n.sortDetached&&r.compareDocumentPosition(e)===i?e===t||y(b,e)?-1:r===t||y(b,r)?1:l?F.call(l,e)-F.call(l,r):0:4&i?-1:1:e.compareDocumentPosition?-1:1}:function(e,n){var r,i=0,o=e.parentNode,s=n.parentNode,a=[e],u=[n];if(e===n)return E=!0,0;if(!o||!s)return e===t?-1:n===t?1:o?-1:s?1:l?F.call(l,e)-F.call(l,n):0;if(o===s)return ct(e,n);r=e;while(r=r.parentNode)a.unshift(r);r=n;while(r=r.parentNode)u.unshift(r);while(a[i]===u[i])i++;return i?ct(a[i],u[i]):a[i]===b?-1:u[i]===b?1:0},t):f},ot.matches=function(e,t){return ot(e,null,null,t)},ot.matchesSelector=function(e,t){if((e.ownerDocument||e)!==f&&c(e),t=t.replace(Y,"='$1']"),!(!n.matchesSelector||!h||g&&g.test(t)||d&&d.test(t)))try{var r=m.call(e,t);if(r||n.disconnectedMatch||e.document&&11!==e.document.nodeType)return r}catch(i){}return ot(t,f,null,[e]).length>0},ot.contains=function(e,t){return(e.ownerDocument||e)!==f&&c(e),y(e,t)},ot.attr=function(e,t){(e.ownerDocument||e)!==f&&c(e);var r=i.attrHandle[t.toLowerCase()],o=r&&L.call(i.attrHandle,t.toLowerCase())?r(e,t,!h):undefined;return o===undefined?n.attributes||!h?e.getAttribute(t):(o=e.getAttributeNode(t))&&o.specified?o.value:null:o},ot.error=function(e){throw Error("Syntax error, unrecognized expression: "+e)},ot.uniqueSort=function(e){var t,r=[],i=0,o=0;if(E=!n.detectDuplicates,l=!n.sortStable&&e.slice(0),e.sort(D),E){while(t=e[o++])t===e[o]&&(i=r.push(o));while(i--)e.splice(r[i],1)}return e},o=ot.getText=function(e){var t,n="",r=0,i=e.nodeType;if(i){if(1===i||9===i||11===i){if("string"==typeof e.textContent)return e.textContent;for(e=e.firstChild;e;e=e.nextSibling)n+=o(e)}else if(3===i||4===i)return e.nodeValue}else for(;t=e[r];r++)n+=o(t);return n},i=ot.selectors={cacheLength:50,createPseudo:at,match:G,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(nt,rt),e[3]=(e[4]||e[5]||"").replace(nt,rt),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||ot.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&ot.error(e[0]),e},PSEUDO:function(e){var t,n=!e[5]&&e[2];return G.CHILD.test(e[0])?null:(e[3]&&e[4]!==undefined?e[2]=e[4]:n&&V.test(n)&&(t=gt(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(nt,rt).toLowerCase();return"*"===e?function(){return!0}:function(e){return e.nodeName&&e.nodeName.toLowerCase()===t}},CLASS:function(e){var t=C[e+" "];return t||(t=RegExp("(^|"+R+")"+e+"("+R+"|$)"))&&C(e,function(e){return t.test("string"==typeof e.className&&e.className||typeof e.getAttribute!==A&&e.getAttribute("class")||"")})},ATTR:function(e,t,n){return function(r){var i=ot.attr(r,e);return null==i?"!="===t:t?(i+="","="===t?i===n:"!="===t?i!==n:"^="===t?n&&0===i.indexOf(n):"*="===t?n&&i.indexOf(n)>-1:"$="===t?n&&i.slice(-n.length)===n:"~="===t?(" "+i+" ").indexOf(n)>-1:"|="===t?i===n||i.slice(0,n.length+1)===n+"-":!1):!0}},CHILD:function(e,t,n,r,i){var o="nth"!==e.slice(0,3),s="last"!==e.slice(-4),a="of-type"===t;return 1===r&&0===i?function(e){return!!e.parentNode}:function(t,n,u){var l,c,f,p,h,d,g=o!==s?"nextSibling":"previousSibling",m=t.parentNode,y=a&&t.nodeName.toLowerCase(),x=!u&&!a;if(m){if(o){while(g){f=t;while(f=f[g])if(a?f.nodeName.toLowerCase()===y:1===f.nodeType)return!1;d=g="only"===e&&!d&&"nextSibling"}return!0}if(d=[s?m.firstChild:m.lastChild],s&&x){c=m[v]||(m[v]={}),l=c[e]||[],h=l[0]===w&&l[1],p=l[0]===w&&l[2],f=h&&m.childNodes[h];while(f=++h&&f&&f[g]||(p=h=0)||d.pop())if(1===f.nodeType&&++p&&f===t){c[e]=[w,h,p];break}}else if(x&&(l=(t[v]||(t[v]={}))[e])&&l[0]===w)p=l[1];else while(f=++h&&f&&f[g]||(p=h=0)||d.pop())if((a?f.nodeName.toLowerCase()===y:1===f.nodeType)&&++p&&(x&&((f[v]||(f[v]={}))[e]=[w,p]),f===t))break;return p-=i,p===r||0===p%r&&p/r>=0}}},PSEUDO:function(e,t){var n,r=i.pseudos[e]||i.setFilters[e.toLowerCase()]||ot.error("unsupported pseudo: "+e);return r[v]?r(t):r.length>1?(n=[e,e,"",t],i.setFilters.hasOwnProperty(e.toLowerCase())?at(function(e,n){var i,o=r(e,t),s=o.length;while(s--)i=F.call(e,o[s]),e[i]=!(n[i]=o[s])}):function(e){return r(e,0,n)}):r}},pseudos:{not:at(function(e){var t=[],n=[],r=a(e.replace(z,"$1"));return r[v]?at(function(e,t,n,i){var o,s=r(e,null,i,[]),a=e.length;while(a--)(o=s[a])&&(e[a]=!(t[a]=o))}):function(e,i,o){return t[0]=e,r(t,null,o,n),!n.pop()}}),has:at(function(e){return function(t){return ot(e,t).length>0}}),contains:at(function(e){return function(t){return(t.textContent||t.innerText||o(t)).indexOf(e)>-1}}),lang:at(function(e){return Q.test(e||"")||ot.error("unsupported lang: "+e),e=e.replace(nt,rt).toLowerCase(),function(t){var n;do if(n=h?t.lang:t.getAttribute("xml:lang")||t.getAttribute("lang"))return n=n.toLowerCase(),n===e||0===n.indexOf(e+"-");while((t=t.parentNode)&&1===t.nodeType);return!1}}),target:function(t){var n=e.location&&e.location.hash;return n&&n.slice(1)===t.id},root:function(e){return e===p},focus:function(e){return e===f.activeElement&&(!f.hasFocus||f.hasFocus())&&!!(e.type||e.href||~e.tabIndex)},enabled:function(e){return e.disabled===!1},disabled:function(e){return e.disabled===!0},checked:function(e){var t=e.nodeName.toLowerCase();return"input"===t&&!!e.checked||"option"===t&&!!e.selected},selected:function(e){return e.parentNode&&e.parentNode.selectedIndex,e.selected===!0},empty:function(e){for(e=e.firstChild;e;e=e.nextSibling)if(e.nodeName>"@"||3===e.nodeType||4===e.nodeType)return!1;return!0},parent:function(e){return!i.pseudos.empty(e)},header:function(e){return et.test(e.nodeName)},input:function(e){return Z.test(e.nodeName)},button:function(e){var t=e.nodeName.toLowerCase();return"input"===t&&"button"===e.type||"button"===t},text:function(e){var t;return"input"===e.nodeName.toLowerCase()&&"text"===e.type&&(null==(t=e.getAttribute("type"))||t.toLowerCase()===e.type)},first:ht(function(){return[0]}),last:ht(function(e,t){return[t-1]}),eq:ht(function(e,t,n){return[0>n?n+t:n]}),even:ht(function(e,t){var n=0;for(;t>n;n+=2)e.push(n);return e}),odd:ht(function(e,t){var n=1;for(;t>n;n+=2)e.push(n);return e}),lt:ht(function(e,t,n){var r=0>n?n+t:n;for(;--r>=0;)e.push(r);return e}),gt:ht(function(e,t,n){var r=0>n?n+t:n;for(;t>++r;)e.push(r);return e})}},i.pseudos.nth=i.pseudos.eq;for(t in{radio:!0,checkbox:!0,file:!0,password:!0,image:!0})i.pseudos[t]=ft(t);for(t in{submit:!0,reset:!0})i.pseudos[t]=pt(t);function dt(){}dt.prototype=i.filters=i.pseudos,i.setFilters=new dt;function gt(e,t){var n,r,o,s,a,u,l,c=N[e+" "];if(c)return t?0:c.slice(0);a=e,u=[],l=i.preFilter;while(a){(!n||(r=_.exec(a)))&&(r&&(a=a.slice(r[0].length)||a),u.push(o=[])),n=!1,(r=X.exec(a))&&(n=r.shift(),o.push({value:n,type:r[0].replace(z," ")}),a=a.slice(n.length));for(s in i.filter)!(r=G[s].exec(a))||l[s]&&!(r=l[s](r))||(n=r.shift(),o.push({value:n,type:s,matches:r}),a=a.slice(n.length));if(!n)break}return t?a.length:a?ot.error(e):N(e,u).slice(0)}function mt(e){var t=0,n=e.length,r="";for(;n>t;t++)r+=e[t].value;return r}function yt(e,t,n){var i=t.dir,o=n&&"parentNode"===i,s=T++;return t.first?function(t,n,r){while(t=t[i])if(1===t.nodeType||o)return e(t,n,r)}:function(t,n,a){var u,l,c,f=w+" "+s;if(a){while(t=t[i])if((1===t.nodeType||o)&&e(t,n,a))return!0}else while(t=t[i])if(1===t.nodeType||o)if(c=t[v]||(t[v]={}),(l=c[i])&&l[0]===f){if((u=l[1])===!0||u===r)return u===!0}else if(l=c[i]=[f],l[1]=e(t,n,a)||r,l[1]===!0)return!0}}function vt(e){return e.length>1?function(t,n,r){var i=e.length;while(i--)if(!e[i](t,n,r))return!1;return!0}:e[0]}function xt(e,t,n,r,i){var o,s=[],a=0,u=e.length,l=null!=t;for(;u>a;a++)(o=e[a])&&(!n||n(o,r,i))&&(s.push(o),l&&t.push(a));return s}function bt(e,t,n,r,i,o){return r&&!r[v]&&(r=bt(r)),i&&!i[v]&&(i=bt(i,o)),at(function(o,s,a,u){var l,c,f,p=[],h=[],d=s.length,g=o||Ct(t||"*",a.nodeType?[a]:a,[]),m=!e||!o&&t?g:xt(g,p,e,a,u),y=n?i||(o?e:d||r)?[]:s:m;if(n&&n(m,y,a,u),r){l=xt(y,h),r(l,[],a,u),c=l.length;while(c--)(f=l[c])&&(y[h[c]]=!(m[h[c]]=f))}if(o){if(i||e){if(i){l=[],c=y.length;while(c--)(f=y[c])&&l.push(m[c]=f);i(null,y=[],l,u)}c=y.length;while(c--)(f=y[c])&&(l=i?F.call(o,f):p[c])>-1&&(o[l]=!(s[l]=f))}}else y=xt(y===s?y.splice(d,y.length):y),i?i(null,s,y,u):O.apply(s,y)})}function wt(e){var t,n,r,o=e.length,s=i.relative[e[0].type],a=s||i.relative[" "],l=s?1:0,c=yt(function(e){return e===t},a,!0),f=yt(function(e){return F.call(t,e)>-1},a,!0),p=[function(e,n,r){return!s&&(r||n!==u)||((t=n).nodeType?c(e,n,r):f(e,n,r))}];for(;o>l;l++)if(n=i.relative[e[l].type])p=[yt(vt(p),n)];else{if(n=i.filter[e[l].type].apply(null,e[l].matches),n[v]){for(r=++l;o>r;r++)if(i.relative[e[r].type])break;return bt(l>1&&vt(p),l>1&&mt(e.slice(0,l-1).concat({value:" "===e[l-2].type?"*":""})).replace(z,"$1"),n,r>l&&wt(e.slice(l,r)),o>r&&wt(e=e.slice(r)),o>r&&mt(e))}p.push(n)}return vt(p)}function Tt(e,t){var n=0,o=t.length>0,s=e.length>0,a=function(a,l,c,p,h){var d,g,m,y=[],v=0,x="0",b=a&&[],T=null!=h,C=u,N=a||s&&i.find.TAG("*",h&&l.parentNode||l),k=w+=null==C?1:Math.random()||.1;for(T&&(u=l!==f&&l,r=n);null!=(d=N[x]);x++){if(s&&d){g=0;while(m=e[g++])if(m(d,l,c)){p.push(d);break}T&&(w=k,r=++n)}o&&((d=!m&&d)&&v--,a&&b.push(d))}if(v+=x,o&&x!==v){g=0;while(m=t[g++])m(b,y,l,c);if(a){if(v>0)while(x--)b[x]||y[x]||(y[x]=j.call(p));y=xt(y)}O.apply(p,y),T&&!a&&y.length>0&&v+t.length>1&&ot.uniqueSort(p)}return T&&(w=k,u=C),b};return o?at(a):a}a=ot.compile=function(e,t){var n,r=[],i=[],o=k[e+" "];if(!o){t||(t=gt(e)),n=t.length;while(n--)o=wt(t[n]),o[v]?r.push(o):i.push(o);o=k(e,Tt(i,r))}return o};function Ct(e,t,n){var r=0,i=t.length;for(;i>r;r++)ot(e,t[r],n);return n}function Nt(e,t,r,o){var s,u,l,c,f,p=gt(e);if(!o&&1===p.length){if(u=p[0]=p[0].slice(0),u.length>2&&"ID"===(l=u[0]).type&&n.getById&&9===t.nodeType&&h&&i.relative[u[1].type]){if(t=(i.find.ID(l.matches[0].replace(nt,rt),t)||[])[0],!t)return r;e=e.slice(u.shift().value.length)}s=G.needsContext.test(e)?0:u.length;while(s--){if(l=u[s],i.relative[c=l.type])break;if((f=i.find[c])&&(o=f(l.matches[0].replace(nt,rt),U.test(u[0].type)&&t.parentNode||t))){if(u.splice(s,1),e=o.length&&mt(u),!e)return O.apply(r,o),r;break}}}return a(e,p)(o,t,!h,r,U.test(e)),r}n.sortStable=v.split("").sort(D).join("")===v,n.detectDuplicates=E,c(),n.sortDetached=ut(function(e){return 1&e.compareDocumentPosition(f.createElement("div"))}),ut(function(e){return e.innerHTML="<a href='#'></a>","#"===e.firstChild.getAttribute("href")})||lt("type|href|height|width",function(e,t,n){return n?undefined:e.getAttribute(t,"type"===t.toLowerCase()?1:2)}),n.attributes&&ut(function(e){return e.innerHTML="<input/>",e.firstChild.setAttribute("value",""),""===e.firstChild.getAttribute("value")})||lt("value",function(e,t,n){return n||"input"!==e.nodeName.toLowerCase()?undefined:e.defaultValue}),ut(function(e){return null==e.getAttribute("disabled")})||lt(W,function(e,t,n){var r;return n?undefined:(r=e.getAttributeNode(t))&&r.specified?r.value:e[t]===!0?t.toLowerCase():null}),x.find=ot,x.expr=ot.selectors,x.expr[":"]=x.expr.pseudos,x.unique=ot.uniqueSort,x.text=ot.getText,x.isXMLDoc=ot.isXML,x.contains=ot.contains}(e);var S={};function L(e){var t=S[e]={};return x.each(e.match(w)||[],function(e,n){t[n]=!0}),t}x.Callbacks=function(e){e="string"==typeof e?S[e]||L(e):x.extend({},e);var t,n,r,i,o,s,a=[],u=!e.once&&[],l=function(f){for(t=e.memory&&f,n=!0,s=i||0,i=0,o=a.length,r=!0;a&&o>s;s++)if(a[s].apply(f[0],f[1])===!1&&e.stopOnFalse){t=!1;break}r=!1,a&&(u?u.length&&l(u.shift()):t?a=[]:c.disable())},c={add:function(){if(a){var n=a.length;(function s(t){x.each(t,function(t,n){var r=x.type(n);"function"===r?e.unique&&c.has(n)||a.push(n):n&&n.length&&"string"!==r&&s(n)})})(arguments),r?o=a.length:t&&(i=n,l(t))}return this},remove:function(){return a&&x.each(arguments,function(e,t){var n;while((n=x.inArray(t,a,n))>-1)a.splice(n,1),r&&(o>=n&&o--,s>=n&&s--)}),this},has:function(e){return e?x.inArray(e,a)>-1:!(!a||!a.length)},empty:function(){return a=[],o=0,this},disable:function(){return a=u=t=undefined,this},disabled:function(){return!a},lock:function(){return u=undefined,t||c.disable(),this},locked:function(){return!u},fireWith:function(e,t){return!a||n&&!u||(t=t||[],t=[e,t.slice?t.slice():t],r?u.push(t):l(t)),this},fire:function(){return c.fireWith(this,arguments),this},fired:function(){return!!n}};return c},x.extend({Deferred:function(e){var t=[["resolve","done",x.Callbacks("once memory"),"resolved"],["reject","fail",x.Callbacks("once memory"),"rejected"],["notify","progress",x.Callbacks("memory")]],n="pending",r={state:function(){return n},always:function(){return i.done(arguments).fail(arguments),this},then:function(){var e=arguments;return x.Deferred(function(n){x.each(t,function(t,o){var s=o[0],a=x.isFunction(e[t])&&e[t];i[o[1]](function(){var e=a&&a.apply(this,arguments);e&&x.isFunction(e.promise)?e.promise().done(n.resolve).fail(n.reject).progress(n.notify):n[s+"With"](this===r?n.promise():this,a?[e]:arguments)})}),e=null}).promise()},promise:function(e){return null!=e?x.extend(e,r):r}},i={};return r.pipe=r.then,x.each(t,function(e,o){var s=o[2],a=o[3];r[o[1]]=s.add,a&&s.add(function(){n=a},t[1^e][2].disable,t[2][2].lock),i[o[0]]=function(){return i[o[0]+"With"](this===i?r:this,arguments),this},i[o[0]+"With"]=s.fireWith}),r.promise(i),e&&e.call(i,i),i},when:function(e){var t=0,n=d.call(arguments),r=n.length,i=1!==r||e&&x.isFunction(e.promise)?r:0,o=1===i?e:x.Deferred(),s=function(e,t,n){return function(r){t[e]=this,n[e]=arguments.length>1?d.call(arguments):r,n===a?o.notifyWith(t,n):--i||o.resolveWith(t,n)}},a,u,l;if(r>1)for(a=Array(r),u=Array(r),l=Array(r);r>t;t++)n[t]&&x.isFunction(n[t].promise)?n[t].promise().done(s(t,l,n)).fail(o.reject).progress(s(t,u,a)):--i;return i||o.resolveWith(l,n),o.promise()}}),x.support=function(t){var n=o.createElement("input"),r=o.createDocumentFragment(),i=o.createElement("div"),s=o.createElement("select"),a=s.appendChild(o.createElement("option"));return n.type?(n.type="checkbox",t.checkOn=""!==n.value,t.optSelected=a.selected,t.reliableMarginRight=!0,t.boxSizingReliable=!0,t.pixelPosition=!1,n.checked=!0,t.noCloneChecked=n.cloneNode(!0).checked,s.disabled=!0,t.optDisabled=!a.disabled,n=o.createElement("input"),n.value="t",n.type="radio",t.radioValue="t"===n.value,n.setAttribute("checked","t"),n.setAttribute("name","t"),r.appendChild(n),t.checkClone=r.cloneNode(!0).cloneNode(!0).lastChild.checked,t.focusinBubbles="onfocusin"in e,i.style.backgroundClip="content-box",i.cloneNode(!0).style.backgroundClip="",t.clearCloneStyle="content-box"===i.style.backgroundClip,x(function(){var n,r,s="padding:0;margin:0;border:0;display:block;-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box",a=o.getElementsByTagName("body")[0];a&&(n=o.createElement("div"),n.style.cssText="border:0;width:0;height:0;position:absolute;top:0;left:-9999px;margin-top:1px",a.appendChild(n).appendChild(i),i.innerHTML="",i.style.cssText="-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;padding:1px;border:1px;display:block;width:4px;margin-top:1%;position:absolute;top:1%",x.swap(a,null!=a.style.zoom?{zoom:1}:{},function(){t.boxSizing=4===i.offsetWidth}),e.getComputedStyle&&(t.pixelPosition="1%"!==(e.getComputedStyle(i,null)||{}).top,t.boxSizingReliable="4px"===(e.getComputedStyle(i,null)||{width:"4px"}).width,r=i.appendChild(o.createElement("div")),r.style.cssText=i.style.cssText=s,r.style.marginRight=r.style.width="0",i.style.width="1px",t.reliableMarginRight=!parseFloat((e.getComputedStyle(r,null)||{}).marginRight)),a.removeChild(n))}),t):t}({});var q,j,H=/(?:\\{[\\s\\S]*\\}|\\[[\\s\\S]*\\])$/,O=/([A-Z])/g;function P(){Object.defineProperty(this.cache={},0,{get:function(){return{}}}),this.expando=x.expando+Math.random()}P.uid=1,P.accepts=function(e){return e.nodeType?1===e.nodeType||9===e.nodeType:!0},P.prototype={key:function(e){if(!P.accepts(e))return 0;var t={},n=e[this.expando];if(!n){n=P.uid++;try{t[this.expando]={value:n},Object.defineProperties(e,t)}catch(r){t[this.expando]=n,x.extend(e,t)}}return this.cache[n]||(this.cache[n]={}),n},set:function(e,t,n){var r,i=this.key(e),o=this.cache[i];if("string"==typeof t)o[t]=n;else if(x.isEmptyObject(o))x.extend(this.cache[i],t);else for(r in t)o[r]=t[r];return o},get:function(e,t){var n=this.cache[this.key(e)];return t===undefined?n:n[t]},access:function(e,t,n){var r;return t===undefined||t&&"string"==typeof t&&n===undefined?(r=this.get(e,t),r!==undefined?r:this.get(e,x.camelCase(t))):(this.set(e,t,n),n!==undefined?n:t)},remove:function(e,t){var n,r,i,o=this.key(e),s=this.cache[o];if(t===undefined)this.cache[o]={};else{x.isArray(t)?r=t.concat(t.map(x.camelCase)):(i=x.camelCase(t),t in s?r=[t,i]:(r=i,r=r in s?[r]:r.match(w)||[])),n=r.length;while(n--)delete s[r[n]]}},hasData:function(e){return!x.isEmptyObject(this.cache[e[this.expando]]||{})},discard:function(e){e[this.expando]&&delete this.cache[e[this.expando]]}},q=new P,j=new P,x.extend({acceptData:P.accepts,hasData:function(e){return q.hasData(e)||j.hasData(e)},data:function(e,t,n){return q.access(e,t,n)},removeData:function(e,t){q.remove(e,t)},_data:function(e,t,n){return j.access(e,t,n)},_removeData:function(e,t){j.remove(e,t)}}),x.fn.extend({data:function(e,t){var n,r,i=this[0],o=0,s=null;if(e===undefined){if(this.length&&(s=q.get(i),1===i.nodeType&&!j.get(i,"hasDataAttrs"))){for(n=i.attributes;n.length>o;o++)r=n[o].name,0===r.indexOf("data-")&&(r=x.camelCase(r.slice(5)),F(i,r,s[r]));j.set(i,"hasDataAttrs",!0)}return s}return"object"==typeof e?this.each(function(){q.set(this,e)}):x.access(this,function(t){var n,r=x.camelCase(e);if(i&&t===undefined){if(n=q.get(i,e),n!==undefined)return n;if(n=q.get(i,r),n!==undefined)return n;if(n=F(i,r,undefined),n!==undefined)return n}else this.each(function(){var n=q.get(this,r);q.set(this,r,t),-1!==e.indexOf("-")&&n!==undefined&&q.set(this,e,t)})},null,t,arguments.length>1,null,!0)},removeData:function(e){return this.each(function(){q.remove(this,e)})}});function F(e,t,n){var r;if(n===undefined&&1===e.nodeType)if(r="data-"+t.replace(O,"-$1").toLowerCase(),n=e.getAttribute(r),"string"==typeof n){try{n="true"===n?!0:"false"===n?!1:"null"===n?null:+n+""===n?+n:H.test(n)?JSON.parse(n):n}catch(i){}q.set(e,t,n)}else n=undefined;return n}x.extend({queue:function(e,t,n){var r;return e?(t=(t||"fx")+"queue",r=j.get(e,t),n&&(!r||x.isArray(n)?r=j.access(e,t,x.makeArray(n)):r.push(n)),r||[]):undefined},dequeue:function(e,t){t=t||"fx";var n=x.queue(e,t),r=n.length,i=n.shift(),o=x._queueHooks(e,t),s=function(){x.dequeue(e,t)
};"inprogress"===i&&(i=n.shift(),r--),i&&("fx"===t&&n.unshift("inprogress"),delete o.stop,i.call(e,s,o)),!r&&o&&o.empty.fire()},_queueHooks:function(e,t){var n=t+"queueHooks";return j.get(e,n)||j.access(e,n,{empty:x.Callbacks("once memory").add(function(){j.remove(e,[t+"queue",n])})})}}),x.fn.extend({queue:function(e,t){var n=2;return"string"!=typeof e&&(t=e,e="fx",n--),n>arguments.length?x.queue(this[0],e):t===undefined?this:this.each(function(){var n=x.queue(this,e,t);x._queueHooks(this,e),"fx"===e&&"inprogress"!==n[0]&&x.dequeue(this,e)})},dequeue:function(e){return this.each(function(){x.dequeue(this,e)})},delay:function(e,t){return e=x.fx?x.fx.speeds[e]||e:e,t=t||"fx",this.queue(t,function(t,n){var r=setTimeout(t,e);n.stop=function(){clearTimeout(r)}})},clearQueue:function(e){return this.queue(e||"fx",[])},promise:function(e,t){var n,r=1,i=x.Deferred(),o=this,s=this.length,a=function(){--r||i.resolveWith(o,[o])};"string"!=typeof e&&(t=e,e=undefined),e=e||"fx";while(s--)n=j.get(o[s],e+"queueHooks"),n&&n.empty&&(r++,n.empty.add(a));return a(),i.promise(t)}});var W,R,M=/[\\t\\r\\n\\f]/g,B=/\\r/g,$=/^(?:input|select|textarea|button)$/i;x.fn.extend({attr:function(e,t){return x.access(this,x.attr,e,t,arguments.length>1)},removeAttr:function(e){return this.each(function(){x.removeAttr(this,e)})},prop:function(e,t){return x.access(this,x.prop,e,t,arguments.length>1)},removeProp:function(e){return this.each(function(){delete this[x.propFix[e]||e]})},addClass:function(e){var t,n,r,i,o,s=0,a=this.length,u="string"==typeof e&&e;if(x.isFunction(e))return this.each(function(t){x(this).addClass(e.call(this,t,this.className))});if(u)for(t=(e||"").match(w)||[];a>s;s++)if(n=this[s],r=1===n.nodeType&&(n.className?(" "+n.className+" ").replace(M," "):" ")){o=0;while(i=t[o++])0>r.indexOf(" "+i+" ")&&(r+=i+" ");n.className=x.trim(r)}return this},removeClass:function(e){var t,n,r,i,o,s=0,a=this.length,u=0===arguments.length||"string"==typeof e&&e;if(x.isFunction(e))return this.each(function(t){x(this).removeClass(e.call(this,t,this.className))});if(u)for(t=(e||"").match(w)||[];a>s;s++)if(n=this[s],r=1===n.nodeType&&(n.className?(" "+n.className+" ").replace(M," "):"")){o=0;while(i=t[o++])while(r.indexOf(" "+i+" ")>=0)r=r.replace(" "+i+" "," ");n.className=e?x.trim(r):""}return this},toggleClass:function(e,t){var n=typeof e;return"boolean"==typeof t&&"string"===n?t?this.addClass(e):this.removeClass(e):x.isFunction(e)?this.each(function(n){x(this).toggleClass(e.call(this,n,this.className,t),t)}):this.each(function(){if("string"===n){var t,i=0,o=x(this),s=e.match(w)||[];while(t=s[i++])o.hasClass(t)?o.removeClass(t):o.addClass(t)}else(n===r||"boolean"===n)&&(this.className&&j.set(this,"__className__",this.className),this.className=this.className||e===!1?"":j.get(this,"__className__")||"")})},hasClass:function(e){var t=" "+e+" ",n=0,r=this.length;for(;r>n;n++)if(1===this[n].nodeType&&(" "+this[n].className+" ").replace(M," ").indexOf(t)>=0)return!0;return!1},val:function(e){var t,n,r,i=this[0];{if(arguments.length)return r=x.isFunction(e),this.each(function(n){var i;1===this.nodeType&&(i=r?e.call(this,n,x(this).val()):e,null==i?i="":"number"==typeof i?i+="":x.isArray(i)&&(i=x.map(i,function(e){return null==e?"":e+""})),t=x.valHooks[this.type]||x.valHooks[this.nodeName.toLowerCase()],t&&"set"in t&&t.set(this,i,"value")!==undefined||(this.value=i))});if(i)return t=x.valHooks[i.type]||x.valHooks[i.nodeName.toLowerCase()],t&&"get"in t&&(n=t.get(i,"value"))!==undefined?n:(n=i.value,"string"==typeof n?n.replace(B,""):null==n?"":n)}}}),x.extend({valHooks:{option:{get:function(e){var t=e.attributes.value;return!t||t.specified?e.value:e.text}},select:{get:function(e){var t,n,r=e.options,i=e.selectedIndex,o="select-one"===e.type||0>i,s=o?null:[],a=o?i+1:r.length,u=0>i?a:o?i:0;for(;a>u;u++)if(n=r[u],!(!n.selected&&u!==i||(x.support.optDisabled?n.disabled:null!==n.getAttribute("disabled"))||n.parentNode.disabled&&x.nodeName(n.parentNode,"optgroup"))){if(t=x(n).val(),o)return t;s.push(t)}return s},set:function(e,t){var n,r,i=e.options,o=x.makeArray(t),s=i.length;while(s--)r=i[s],(r.selected=x.inArray(x(r).val(),o)>=0)&&(n=!0);return n||(e.selectedIndex=-1),o}}},attr:function(e,t,n){var i,o,s=e.nodeType;if(e&&3!==s&&8!==s&&2!==s)return typeof e.getAttribute===r?x.prop(e,t,n):(1===s&&x.isXMLDoc(e)||(t=t.toLowerCase(),i=x.attrHooks[t]||(x.expr.match.bool.test(t)?R:W)),n===undefined?i&&"get"in i&&null!==(o=i.get(e,t))?o:(o=x.find.attr(e,t),null==o?undefined:o):null!==n?i&&"set"in i&&(o=i.set(e,n,t))!==undefined?o:(e.setAttribute(t,n+""),n):(x.removeAttr(e,t),undefined))},removeAttr:function(e,t){var n,r,i=0,o=t&&t.match(w);if(o&&1===e.nodeType)while(n=o[i++])r=x.propFix[n]||n,x.expr.match.bool.test(n)&&(e[r]=!1),e.removeAttribute(n)},attrHooks:{type:{set:function(e,t){if(!x.support.radioValue&&"radio"===t&&x.nodeName(e,"input")){var n=e.value;return e.setAttribute("type",t),n&&(e.value=n),t}}}},propFix:{"for":"htmlFor","class":"className"},prop:function(e,t,n){var r,i,o,s=e.nodeType;if(e&&3!==s&&8!==s&&2!==s)return o=1!==s||!x.isXMLDoc(e),o&&(t=x.propFix[t]||t,i=x.propHooks[t]),n!==undefined?i&&"set"in i&&(r=i.set(e,n,t))!==undefined?r:e[t]=n:i&&"get"in i&&null!==(r=i.get(e,t))?r:e[t]},propHooks:{tabIndex:{get:function(e){return e.hasAttribute("tabindex")||$.test(e.nodeName)||e.href?e.tabIndex:-1}}}}),R={set:function(e,t,n){return t===!1?x.removeAttr(e,n):e.setAttribute(n,n),n}},x.each(x.expr.match.bool.source.match(/\\w+/g),function(e,t){var n=x.expr.attrHandle[t]||x.find.attr;x.expr.attrHandle[t]=function(e,t,r){var i=x.expr.attrHandle[t],o=r?undefined:(x.expr.attrHandle[t]=undefined)!=n(e,t,r)?t.toLowerCase():null;return x.expr.attrHandle[t]=i,o}}),x.support.optSelected||(x.propHooks.selected={get:function(e){var t=e.parentNode;return t&&t.parentNode&&t.parentNode.selectedIndex,null}}),x.each(["tabIndex","readOnly","maxLength","cellSpacing","cellPadding","rowSpan","colSpan","useMap","frameBorder","contentEditable"],function(){x.propFix[this.toLowerCase()]=this}),x.each(["radio","checkbox"],function(){x.valHooks[this]={set:function(e,t){return x.isArray(t)?e.checked=x.inArray(x(e).val(),t)>=0:undefined}},x.support.checkOn||(x.valHooks[this].get=function(e){return null===e.getAttribute("value")?"on":e.value})});var I=/^key/,z=/^(?:mouse|contextmenu)|click/,_=/^(?:focusinfocus|focusoutblur)$/,X=/^([^.]*)(?:\\.(.+)|)$/;function U(){return!0}function Y(){return!1}function V(){try{return o.activeElement}catch(e){}}x.event={global:{},add:function(e,t,n,i,o){var s,a,u,l,c,f,p,h,d,g,m,y=j.get(e);if(y){n.handler&&(s=n,n=s.handler,o=s.selector),n.guid||(n.guid=x.guid++),(l=y.events)||(l=y.events={}),(a=y.handle)||(a=y.handle=function(e){return typeof x===r||e&&x.event.triggered===e.type?undefined:x.event.dispatch.apply(a.elem,arguments)},a.elem=e),t=(t||"").match(w)||[""],c=t.length;while(c--)u=X.exec(t[c])||[],d=m=u[1],g=(u[2]||"").split(".").sort(),d&&(p=x.event.special[d]||{},d=(o?p.delegateType:p.bindType)||d,p=x.event.special[d]||{},f=x.extend({type:d,origType:m,data:i,handler:n,guid:n.guid,selector:o,needsContext:o&&x.expr.match.needsContext.test(o),namespace:g.join(".")},s),(h=l[d])||(h=l[d]=[],h.delegateCount=0,p.setup&&p.setup.call(e,i,g,a)!==!1||e.addEventListener&&e.addEventListener(d,a,!1)),p.add&&(p.add.call(e,f),f.handler.guid||(f.handler.guid=n.guid)),o?h.splice(h.delegateCount++,0,f):h.push(f),x.event.global[d]=!0);e=null}},remove:function(e,t,n,r,i){var o,s,a,u,l,c,f,p,h,d,g,m=j.hasData(e)&&j.get(e);if(m&&(u=m.events)){t=(t||"").match(w)||[""],l=t.length;while(l--)if(a=X.exec(t[l])||[],h=g=a[1],d=(a[2]||"").split(".").sort(),h){f=x.event.special[h]||{},h=(r?f.delegateType:f.bindType)||h,p=u[h]||[],a=a[2]&&RegExp("(^|\\\\.)"+d.join("\\\\.(?:.*\\\\.|)")+"(\\\\.|$)"),s=o=p.length;while(o--)c=p[o],!i&&g!==c.origType||n&&n.guid!==c.guid||a&&!a.test(c.namespace)||r&&r!==c.selector&&("**"!==r||!c.selector)||(p.splice(o,1),c.selector&&p.delegateCount--,f.remove&&f.remove.call(e,c));s&&!p.length&&(f.teardown&&f.teardown.call(e,d,m.handle)!==!1||x.removeEvent(e,h,m.handle),delete u[h])}else for(h in u)x.event.remove(e,h+t[l],n,r,!0);x.isEmptyObject(u)&&(delete m.handle,j.remove(e,"events"))}},trigger:function(t,n,r,i){var s,a,u,l,c,f,p,h=[r||o],d=y.call(t,"type")?t.type:t,g=y.call(t,"namespace")?t.namespace.split("."):[];if(a=u=r=r||o,3!==r.nodeType&&8!==r.nodeType&&!_.test(d+x.event.triggered)&&(d.indexOf(".")>=0&&(g=d.split("."),d=g.shift(),g.sort()),c=0>d.indexOf(":")&&"on"+d,t=t[x.expando]?t:new x.Event(d,"object"==typeof t&&t),t.isTrigger=i?2:3,t.namespace=g.join("."),t.namespace_re=t.namespace?RegExp("(^|\\\\.)"+g.join("\\\\.(?:.*\\\\.|)")+"(\\\\.|$)"):null,t.result=undefined,t.target||(t.target=r),n=null==n?[t]:x.makeArray(n,[t]),p=x.event.special[d]||{},i||!p.trigger||p.trigger.apply(r,n)!==!1)){if(!i&&!p.noBubble&&!x.isWindow(r)){for(l=p.delegateType||d,_.test(l+d)||(a=a.parentNode);a;a=a.parentNode)h.push(a),u=a;u===(r.ownerDocument||o)&&h.push(u.defaultView||u.parentWindow||e)}s=0;while((a=h[s++])&&!t.isPropagationStopped())t.type=s>1?l:p.bindType||d,f=(j.get(a,"events")||{})[t.type]&&j.get(a,"handle"),f&&f.apply(a,n),f=c&&a[c],f&&x.acceptData(a)&&f.apply&&f.apply(a,n)===!1&&t.preventDefault();return t.type=d,i||t.isDefaultPrevented()||p._default&&p._default.apply(h.pop(),n)!==!1||!x.acceptData(r)||c&&x.isFunction(r[d])&&!x.isWindow(r)&&(u=r[c],u&&(r[c]=null),x.event.triggered=d,r[d](),x.event.triggered=undefined,u&&(r[c]=u)),t.result}},dispatch:function(e){e=x.event.fix(e);var t,n,r,i,o,s=[],a=d.call(arguments),u=(j.get(this,"events")||{})[e.type]||[],l=x.event.special[e.type]||{};if(a[0]=e,e.delegateTarget=this,!l.preDispatch||l.preDispatch.call(this,e)!==!1){s=x.event.handlers.call(this,e,u),t=0;while((i=s[t++])&&!e.isPropagationStopped()){e.currentTarget=i.elem,n=0;while((o=i.handlers[n++])&&!e.isImmediatePropagationStopped())(!e.namespace_re||e.namespace_re.test(o.namespace))&&(e.handleObj=o,e.data=o.data,r=((x.event.special[o.origType]||{}).handle||o.handler).apply(i.elem,a),r!==undefined&&(e.result=r)===!1&&(e.preventDefault(),e.stopPropagation()))}return l.postDispatch&&l.postDispatch.call(this,e),e.result}},handlers:function(e,t){var n,r,i,o,s=[],a=t.delegateCount,u=e.target;if(a&&u.nodeType&&(!e.button||"click"!==e.type))for(;u!==this;u=u.parentNode||this)if(u.disabled!==!0||"click"!==e.type){for(r=[],n=0;a>n;n++)o=t[n],i=o.selector+" ",r[i]===undefined&&(r[i]=o.needsContext?x(i,this).index(u)>=0:x.find(i,this,null,[u]).length),r[i]&&r.push(o);r.length&&s.push({elem:u,handlers:r})}return t.length>a&&s.push({elem:this,handlers:t.slice(a)}),s},props:"altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "),fixHooks:{},keyHooks:{props:"char charCode key keyCode".split(" "),filter:function(e,t){return null==e.which&&(e.which=null!=t.charCode?t.charCode:t.keyCode),e}},mouseHooks:{props:"button buttons clientX clientY offsetX offsetY pageX pageY screenX screenY toElement".split(" "),filter:function(e,t){var n,r,i,s=t.button;return null==e.pageX&&null!=t.clientX&&(n=e.target.ownerDocument||o,r=n.documentElement,i=n.body,e.pageX=t.clientX+(r&&r.scrollLeft||i&&i.scrollLeft||0)-(r&&r.clientLeft||i&&i.clientLeft||0),e.pageY=t.clientY+(r&&r.scrollTop||i&&i.scrollTop||0)-(r&&r.clientTop||i&&i.clientTop||0)),e.which||s===undefined||(e.which=1&s?1:2&s?3:4&s?2:0),e}},fix:function(e){if(e[x.expando])return e;var t,n,r,i=e.type,s=e,a=this.fixHooks[i];a||(this.fixHooks[i]=a=z.test(i)?this.mouseHooks:I.test(i)?this.keyHooks:{}),r=a.props?this.props.concat(a.props):this.props,e=new x.Event(s),t=r.length;while(t--)n=r[t],e[n]=s[n];return e.target||(e.target=o),3===e.target.nodeType&&(e.target=e.target.parentNode),a.filter?a.filter(e,s):e},special:{load:{noBubble:!0},focus:{trigger:function(){return this!==V()&&this.focus?(this.focus(),!1):undefined},delegateType:"focusin"},blur:{trigger:function(){return this===V()&&this.blur?(this.blur(),!1):undefined},delegateType:"focusout"},click:{trigger:function(){return"checkbox"===this.type&&this.click&&x.nodeName(this,"input")?(this.click(),!1):undefined},_default:function(e){return x.nodeName(e.target,"a")}},beforeunload:{postDispatch:function(e){e.result!==undefined&&(e.originalEvent.returnValue=e.result)}}},simulate:function(e,t,n,r){var i=x.extend(new x.Event,n,{type:e,isSimulated:!0,originalEvent:{}});r?x.event.trigger(i,null,t):x.event.dispatch.call(t,i),i.isDefaultPrevented()&&n.preventDefault()}},x.removeEvent=function(e,t,n){e.removeEventListener&&e.removeEventListener(t,n,!1)},x.Event=function(e,t){return this instanceof x.Event?(e&&e.type?(this.originalEvent=e,this.type=e.type,this.isDefaultPrevented=e.defaultPrevented||e.getPreventDefault&&e.getPreventDefault()?U:Y):this.type=e,t&&x.extend(this,t),this.timeStamp=e&&e.timeStamp||x.now(),this[x.expando]=!0,undefined):new x.Event(e,t)},x.Event.prototype={isDefaultPrevented:Y,isPropagationStopped:Y,isImmediatePropagationStopped:Y,preventDefault:function(){var e=this.originalEvent;this.isDefaultPrevented=U,e&&e.preventDefault&&e.preventDefault()},stopPropagation:function(){var e=this.originalEvent;this.isPropagationStopped=U,e&&e.stopPropagation&&e.stopPropagation()},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=U,this.stopPropagation()}},x.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(e,t){x.event.special[e]={delegateType:t,bindType:t,handle:function(e){var n,r=this,i=e.relatedTarget,o=e.handleObj;return(!i||i!==r&&!x.contains(r,i))&&(e.type=o.origType,n=o.handler.apply(this,arguments),e.type=t),n}}}),x.support.focusinBubbles||x.each({focus:"focusin",blur:"focusout"},function(e,t){var n=0,r=function(e){x.event.simulate(t,e.target,x.event.fix(e),!0)};x.event.special[t]={setup:function(){0===n++&&o.addEventListener(e,r,!0)},teardown:function(){0===--n&&o.removeEventListener(e,r,!0)}}}),x.fn.extend({on:function(e,t,n,r,i){var o,s;if("object"==typeof e){"string"!=typeof t&&(n=n||t,t=undefined);for(s in e)this.on(s,t,n,e[s],i);return this}if(null==n&&null==r?(r=t,n=t=undefined):null==r&&("string"==typeof t?(r=n,n=undefined):(r=n,n=t,t=undefined)),r===!1)r=Y;else if(!r)return this;return 1===i&&(o=r,r=function(e){return x().off(e),o.apply(this,arguments)},r.guid=o.guid||(o.guid=x.guid++)),this.each(function(){x.event.add(this,e,r,n,t)})},one:function(e,t,n,r){return this.on(e,t,n,r,1)},off:function(e,t,n){var r,i;if(e&&e.preventDefault&&e.handleObj)return r=e.handleObj,x(e.delegateTarget).off(r.namespace?r.origType+"."+r.namespace:r.origType,r.selector,r.handler),this;if("object"==typeof e){for(i in e)this.off(i,t,e[i]);return this}return(t===!1||"function"==typeof t)&&(n=t,t=undefined),n===!1&&(n=Y),this.each(function(){x.event.remove(this,e,n,t)})},trigger:function(e,t){return this.each(function(){x.event.trigger(e,t,this)})},triggerHandler:function(e,t){var n=this[0];return n?x.event.trigger(e,t,n,!0):undefined}});var Q=/^.[^:#\\[\\.,]*$/,G=/^(?:parents|prev(?:Until|All))/,K=x.expr.match.needsContext,J={children:!0,contents:!0,next:!0,prev:!0};x.fn.extend({find:function(e){var t,n=[],r=this,i=r.length;if("string"!=typeof e)return this.pushStack(x(e).filter(function(){for(t=0;i>t;t++)if(x.contains(r[t],this))return!0}));for(t=0;i>t;t++)x.find(e,r[t],n);return n=this.pushStack(i>1?x.unique(n):n),n.selector=this.selector?this.selector+" "+e:e,n},has:function(e){var t=x(e,this),n=t.length;return this.filter(function(){var e=0;for(;n>e;e++)if(x.contains(this,t[e]))return!0})},not:function(e){return this.pushStack(et(this,e||[],!0))},filter:function(e){return this.pushStack(et(this,e||[],!1))},is:function(e){return!!et(this,"string"==typeof e&&K.test(e)?x(e):e||[],!1).length},closest:function(e,t){var n,r=0,i=this.length,o=[],s=K.test(e)||"string"!=typeof e?x(e,t||this.context):0;for(;i>r;r++)for(n=this[r];n&&n!==t;n=n.parentNode)if(11>n.nodeType&&(s?s.index(n)>-1:1===n.nodeType&&x.find.matchesSelector(n,e))){n=o.push(n);break}return this.pushStack(o.length>1?x.unique(o):o)},index:function(e){return e?"string"==typeof e?g.call(x(e),this[0]):g.call(this,e.jquery?e[0]:e):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(e,t){var n="string"==typeof e?x(e,t):x.makeArray(e&&e.nodeType?[e]:e),r=x.merge(this.get(),n);return this.pushStack(x.unique(r))},addBack:function(e){return this.add(null==e?this.prevObject:this.prevObject.filter(e))}});function Z(e,t){while((e=e[t])&&1!==e.nodeType);return e}x.each({parent:function(e){var t=e.parentNode;return t&&11!==t.nodeType?t:null},parents:function(e){return x.dir(e,"parentNode")},parentsUntil:function(e,t,n){return x.dir(e,"parentNode",n)},next:function(e){return Z(e,"nextSibling")},prev:function(e){return Z(e,"previousSibling")},nextAll:function(e){return x.dir(e,"nextSibling")},prevAll:function(e){return x.dir(e,"previousSibling")},nextUntil:function(e,t,n){return x.dir(e,"nextSibling",n)},prevUntil:function(e,t,n){return x.dir(e,"previousSibling",n)},siblings:function(e){return x.sibling((e.parentNode||{}).firstChild,e)},children:function(e){return x.sibling(e.firstChild)},contents:function(e){return e.contentDocument||x.merge([],e.childNodes)}},function(e,t){x.fn[e]=function(n,r){var i=x.map(this,t,n);return"Until"!==e.slice(-5)&&(r=n),r&&"string"==typeof r&&(i=x.filter(r,i)),this.length>1&&(J[e]||x.unique(i),G.test(e)&&i.reverse()),this.pushStack(i)}}),x.extend({filter:function(e,t,n){var r=t[0];return n&&(e=":not("+e+")"),1===t.length&&1===r.nodeType?x.find.matchesSelector(r,e)?[r]:[]:x.find.matches(e,x.grep(t,function(e){return 1===e.nodeType}))},dir:function(e,t,n){var r=[],i=n!==undefined;while((e=e[t])&&9!==e.nodeType)if(1===e.nodeType){if(i&&x(e).is(n))break;r.push(e)}return r},sibling:function(e,t){var n=[];for(;e;e=e.nextSibling)1===e.nodeType&&e!==t&&n.push(e);return n}});function et(e,t,n){if(x.isFunction(t))return x.grep(e,function(e,r){return!!t.call(e,r,e)!==n});if(t.nodeType)return x.grep(e,function(e){return e===t!==n});if("string"==typeof t){if(Q.test(t))return x.filter(t,e,n);t=x.filter(t,e)}return x.grep(e,function(e){return g.call(t,e)>=0!==n})}var tt=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\\w:]+)[^>]*)\\/>/gi,nt=/<([\\w:]+)/,rt=/<|&#?\\w+;/,it=/<(?:script|style|link)/i,ot=/^(?:checkbox|radio)$/i,st=/checked\\s*(?:[^=]|=\\s*.checked.)/i,at=/^$|\\/(?:java|ecma)script/i,ut=/^true\\/(.*)/,lt=/^\\s*<!(?:\\[CDATA\\[|--)|(?:\\]\\]|--)>\\s*$/g,ct={option:[1,"<select multiple='multiple'>","</select>"],thead:[1,"<table>","</table>"],col:[2,"<table><colgroup>","</colgroup></table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],_default:[0,"",""]};ct.optgroup=ct.option,ct.tbody=ct.tfoot=ct.colgroup=ct.caption=ct.thead,ct.th=ct.td,x.fn.extend({text:function(e){return x.access(this,function(e){return e===undefined?x.text(this):this.empty().append((this[0]&&this[0].ownerDocument||o).createTextNode(e))},null,e,arguments.length)},append:function(){return this.domManip(arguments,function(e){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var t=ft(this,e);t.appendChild(e)}})},prepend:function(){return this.domManip(arguments,function(e){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var t=ft(this,e);t.insertBefore(e,t.firstChild)}})},before:function(){return this.domManip(arguments,function(e){this.parentNode&&this.parentNode.insertBefore(e,this)})},after:function(){return this.domManip(arguments,function(e){this.parentNode&&this.parentNode.insertBefore(e,this.nextSibling)})},remove:function(e,t){var n,r=e?x.filter(e,this):this,i=0;for(;null!=(n=r[i]);i++)t||1!==n.nodeType||x.cleanData(mt(n)),n.parentNode&&(t&&x.contains(n.ownerDocument,n)&&dt(mt(n,"script")),n.parentNode.removeChild(n));return this},empty:function(){var e,t=0;for(;null!=(e=this[t]);t++)1===e.nodeType&&(x.cleanData(mt(e,!1)),e.textContent="");return this},clone:function(e,t){return e=null==e?!1:e,t=null==t?e:t,this.map(function(){return x.clone(this,e,t)})},html:function(e){return x.access(this,function(e){var t=this[0]||{},n=0,r=this.length;if(e===undefined&&1===t.nodeType)return t.innerHTML;if("string"==typeof e&&!it.test(e)&&!ct[(nt.exec(e)||["",""])[1].toLowerCase()]){e=e.replace(tt,"<$1></$2>");try{for(;r>n;n++)t=this[n]||{},1===t.nodeType&&(x.cleanData(mt(t,!1)),t.innerHTML=e);t=0}catch(i){}}t&&this.empty().append(e)},null,e,arguments.length)},replaceWith:function(){var e=x.map(this,function(e){return[e.nextSibling,e.parentNode]}),t=0;return this.domManip(arguments,function(n){var r=e[t++],i=e[t++];i&&(r&&r.parentNode!==i&&(r=this.nextSibling),x(this).remove(),i.insertBefore(n,r))},!0),t?this:this.remove()},detach:function(e){return this.remove(e,!0)},domManip:function(e,t,n){e=p.apply([],e);var r,i,o,s,a,u,l=0,c=this.length,f=this,h=c-1,d=e[0],g=x.isFunction(d);if(g||!(1>=c||"string"!=typeof d||x.support.checkClone)&&st.test(d))return this.each(function(r){var i=f.eq(r);g&&(e[0]=d.call(this,r,i.html())),i.domManip(e,t,n)});if(c&&(r=x.buildFragment(e,this[0].ownerDocument,!1,!n&&this),i=r.firstChild,1===r.childNodes.length&&(r=i),i)){for(o=x.map(mt(r,"script"),pt),s=o.length;c>l;l++)a=r,l!==h&&(a=x.clone(a,!0,!0),s&&x.merge(o,mt(a,"script"))),t.call(this[l],a,l);if(s)for(u=o[o.length-1].ownerDocument,x.map(o,ht),l=0;s>l;l++)a=o[l],at.test(a.type||"")&&!j.access(a,"globalEval")&&x.contains(u,a)&&(a.src?x._evalUrl(a.src):x.globalEval(a.textContent.replace(lt,"")))}return this}}),x.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(e,t){x.fn[e]=function(e){var n,r=[],i=x(e),o=i.length-1,s=0;for(;o>=s;s++)n=s===o?this:this.clone(!0),x(i[s])[t](n),h.apply(r,n.get());return this.pushStack(r)}}),x.extend({clone:function(e,t,n){var r,i,o,s,a=e.cloneNode(!0),u=x.contains(e.ownerDocument,e);if(!(x.support.noCloneChecked||1!==e.nodeType&&11!==e.nodeType||x.isXMLDoc(e)))for(s=mt(a),o=mt(e),r=0,i=o.length;i>r;r++)yt(o[r],s[r]);if(t)if(n)for(o=o||mt(e),s=s||mt(a),r=0,i=o.length;i>r;r++)gt(o[r],s[r]);else gt(e,a);return s=mt(a,"script"),s.length>0&&dt(s,!u&&mt(e,"script")),a},buildFragment:function(e,t,n,r){var i,o,s,a,u,l,c=0,f=e.length,p=t.createDocumentFragment(),h=[];for(;f>c;c++)if(i=e[c],i||0===i)if("object"===x.type(i))x.merge(h,i.nodeType?[i]:i);else if(rt.test(i)){o=o||p.appendChild(t.createElement("div")),s=(nt.exec(i)||["",""])[1].toLowerCase(),a=ct[s]||ct._default,o.innerHTML=a[1]+i.replace(tt,"<$1></$2>")+a[2],l=a[0];while(l--)o=o.lastChild;x.merge(h,o.childNodes),o=p.firstChild,o.textContent=""}else h.push(t.createTextNode(i));p.textContent="",c=0;while(i=h[c++])if((!r||-1===x.inArray(i,r))&&(u=x.contains(i.ownerDocument,i),o=mt(p.appendChild(i),"script"),u&&dt(o),n)){l=0;while(i=o[l++])at.test(i.type||"")&&n.push(i)}return p},cleanData:function(e){var t,n,r,i,o,s,a=x.event.special,u=0;for(;(n=e[u])!==undefined;u++){if(P.accepts(n)&&(o=n[j.expando],o&&(t=j.cache[o]))){if(r=Object.keys(t.events||{}),r.length)for(s=0;(i=r[s])!==undefined;s++)a[i]?x.event.remove(n,i):x.removeEvent(n,i,t.handle);j.cache[o]&&delete j.cache[o]}delete q.cache[n[q.expando]]}},_evalUrl:function(e){return x.ajax({url:e,type:"GET",dataType:"script",async:!1,global:!1,"throws":!0})}});function ft(e,t){return x.nodeName(e,"table")&&x.nodeName(1===t.nodeType?t:t.firstChild,"tr")?e.getElementsByTagName("tbody")[0]||e.appendChild(e.ownerDocument.createElement("tbody")):e}function pt(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function ht(e){var t=ut.exec(e.type);return t?e.type=t[1]:e.removeAttribute("type"),e}function dt(e,t){var n=e.length,r=0;for(;n>r;r++)j.set(e[r],"globalEval",!t||j.get(t[r],"globalEval"))}function gt(e,t){var n,r,i,o,s,a,u,l;if(1===t.nodeType){if(j.hasData(e)&&(o=j.access(e),s=j.set(t,o),l=o.events)){delete s.handle,s.events={};for(i in l)for(n=0,r=l[i].length;r>n;n++)x.event.add(t,i,l[i][n])}q.hasData(e)&&(a=q.access(e),u=x.extend({},a),q.set(t,u))}}function mt(e,t){var n=e.getElementsByTagName?e.getElementsByTagName(t||"*"):e.querySelectorAll?e.querySelectorAll(t||"*"):[];return t===undefined||t&&x.nodeName(e,t)?x.merge([e],n):n}function yt(e,t){var n=t.nodeName.toLowerCase();"input"===n&&ot.test(e.type)?t.checked=e.checked:("input"===n||"textarea"===n)&&(t.defaultValue=e.defaultValue)}var vt,xt,bt=/^(none|table(?!-c[ea]).+)/,wt=/^margin/,Tt=RegExp("^("+b+")(.*)$","i"),Ct=RegExp("^("+b+")(?!px)[a-z%]+$","i"),Nt=RegExp("^([+-])=("+b+")","i"),kt={BODY:"block"},Et={position:"absolute",visibility:"hidden",display:"block"},Dt={letterSpacing:0,fontWeight:400},At=["Top","Right","Bottom","Left"],St=["Webkit","O","Moz","ms"];function Lt(e,t){if(t in e)return t;var n=t.charAt(0).toUpperCase()+t.slice(1),r=t,i=St.length;while(i--)if(t=St[i]+n,t in e)return t;return r}function qt(e,t){return e=t||e,"none"===x.css(e,"display")||!x.contains(e.ownerDocument,e)}function jt(t){return e.getComputedStyle(t,null)}function Ht(e,t){var n,r,i,o=[],s=0,a=e.length;for(;a>s;s++)r=e[s],r.style&&(o[s]=j.get(r,"olddisplay"),n=r.style.display,t?(o[s]||"none"!==n||(r.style.display=""),""===r.style.display&&qt(r)&&(o[s]=j.access(r,"olddisplay",Wt(r.nodeName)))):o[s]||(i=qt(r),(n&&"none"!==n||!i)&&j.set(r,"olddisplay",i?n:x.css(r,"display"))));for(s=0;a>s;s++)r=e[s],r.style&&(t&&"none"!==r.style.display&&""!==r.style.display||(r.style.display=t?o[s]||"":"none"));return e}x.fn.extend({css:function(e,t){return x.access(this,function(e,t,n){var r,i,o={},s=0;if(x.isArray(t)){for(r=jt(e),i=t.length;i>s;s++)o[t[s]]=x.css(e,t[s],!1,r);return o}return n!==undefined?x.style(e,t,n):x.css(e,t)},e,t,arguments.length>1)},show:function(){return Ht(this,!0)},hide:function(){return Ht(this)},toggle:function(e){return"boolean"==typeof e?e?this.show():this.hide():this.each(function(){qt(this)?x(this).show():x(this).hide()})}}),x.extend({cssHooks:{opacity:{get:function(e,t){if(t){var n=vt(e,"opacity");return""===n?"1":n}}}},cssNumber:{columnCount:!0,fillOpacity:!0,fontWeight:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":"cssFloat"},style:function(e,t,n,r){if(e&&3!==e.nodeType&&8!==e.nodeType&&e.style){var i,o,s,a=x.camelCase(t),u=e.style;return t=x.cssProps[a]||(x.cssProps[a]=Lt(u,a)),s=x.cssHooks[t]||x.cssHooks[a],n===undefined?s&&"get"in s&&(i=s.get(e,!1,r))!==undefined?i:u[t]:(o=typeof n,"string"===o&&(i=Nt.exec(n))&&(n=(i[1]+1)*i[2]+parseFloat(x.css(e,t)),o="number"),null==n||"number"===o&&isNaN(n)||("number"!==o||x.cssNumber[a]||(n+="px"),x.support.clearCloneStyle||""!==n||0!==t.indexOf("background")||(u[t]="inherit"),s&&"set"in s&&(n=s.set(e,n,r))===undefined||(u[t]=n)),undefined)}},css:function(e,t,n,r){var i,o,s,a=x.camelCase(t);return t=x.cssProps[a]||(x.cssProps[a]=Lt(e.style,a)),s=x.cssHooks[t]||x.cssHooks[a],s&&"get"in s&&(i=s.get(e,!0,n)),i===undefined&&(i=vt(e,t,r)),"normal"===i&&t in Dt&&(i=Dt[t]),""===n||n?(o=parseFloat(i),n===!0||x.isNumeric(o)?o||0:i):i}}),vt=function(e,t,n){var r,i,o,s=n||jt(e),a=s?s.getPropertyValue(t)||s[t]:undefined,u=e.style;return s&&(""!==a||x.contains(e.ownerDocument,e)||(a=x.style(e,t)),Ct.test(a)&&wt.test(t)&&(r=u.width,i=u.minWidth,o=u.maxWidth,u.minWidth=u.maxWidth=u.width=a,a=s.width,u.width=r,u.minWidth=i,u.maxWidth=o)),a};function Ot(e,t,n){var r=Tt.exec(t);return r?Math.max(0,r[1]-(n||0))+(r[2]||"px"):t}function Pt(e,t,n,r,i){var o=n===(r?"border":"content")?4:"width"===t?1:0,s=0;for(;4>o;o+=2)"margin"===n&&(s+=x.css(e,n+At[o],!0,i)),r?("content"===n&&(s-=x.css(e,"padding"+At[o],!0,i)),"margin"!==n&&(s-=x.css(e,"border"+At[o]+"Width",!0,i))):(s+=x.css(e,"padding"+At[o],!0,i),"padding"!==n&&(s+=x.css(e,"border"+At[o]+"Width",!0,i)));return s}function Ft(e,t,n){var r=!0,i="width"===t?e.offsetWidth:e.offsetHeight,o=jt(e),s=x.support.boxSizing&&"border-box"===x.css(e,"boxSizing",!1,o);if(0>=i||null==i){if(i=vt(e,t,o),(0>i||null==i)&&(i=e.style[t]),Ct.test(i))return i;r=s&&(x.support.boxSizingReliable||i===e.style[t]),i=parseFloat(i)||0}return i+Pt(e,t,n||(s?"border":"content"),r,o)+"px"}function Wt(e){var t=o,n=kt[e];return n||(n=Rt(e,t),"none"!==n&&n||(xt=(xt||x("<iframe frameborder='0' width='0' height='0'/>").css("cssText","display:block !important")).appendTo(t.documentElement),t=(xt[0].contentWindow||xt[0].contentDocument).document,t.write("<!doctype html><html><body>"),t.close(),n=Rt(e,t),xt.detach()),kt[e]=n),n}function Rt(e,t){var n=x(t.createElement(e)).appendTo(t.body),r=x.css(n[0],"display");return n.remove(),r}x.each(["height","width"],function(e,t){x.cssHooks[t]={get:function(e,n,r){return n?0===e.offsetWidth&&bt.test(x.css(e,"display"))?x.swap(e,Et,function(){return Ft(e,t,r)}):Ft(e,t,r):undefined},set:function(e,n,r){var i=r&&jt(e);return Ot(e,n,r?Pt(e,t,r,x.support.boxSizing&&"border-box"===x.css(e,"boxSizing",!1,i),i):0)}}}),x(function(){x.support.reliableMarginRight||(x.cssHooks.marginRight={get:function(e,t){return t?x.swap(e,{display:"inline-block"},vt,[e,"marginRight"]):undefined}}),!x.support.pixelPosition&&x.fn.position&&x.each(["top","left"],function(e,t){x.cssHooks[t]={get:function(e,n){return n?(n=vt(e,t),Ct.test(n)?x(e).position()[t]+"px":n):undefined}}})}),x.expr&&x.expr.filters&&(x.expr.filters.hidden=function(e){return 0>=e.offsetWidth&&0>=e.offsetHeight},x.expr.filters.visible=function(e){return!x.expr.filters.hidden(e)}),x.each({margin:"",padding:"",border:"Width"},function(e,t){x.cssHooks[e+t]={expand:function(n){var r=0,i={},o="string"==typeof n?n.split(" "):[n];for(;4>r;r++)i[e+At[r]+t]=o[r]||o[r-2]||o[0];return i}},wt.test(e)||(x.cssHooks[e+t].set=Ot)});var Mt=/%20/g,Bt=/\\[\\]$/,$t=/\\r?\\n/g,It=/^(?:submit|button|image|reset|file)$/i,zt=/^(?:input|select|textarea|keygen)/i;x.fn.extend({serialize:function(){return x.param(this.serializeArray())},serializeArray:function(){return this.map(function(){var e=x.prop(this,"elements");return e?x.makeArray(e):this}).filter(function(){var e=this.type;return this.name&&!x(this).is(":disabled")&&zt.test(this.nodeName)&&!It.test(e)&&(this.checked||!ot.test(e))}).map(function(e,t){var n=x(this).val();return null==n?null:x.isArray(n)?x.map(n,function(e){return{name:t.name,value:e.replace($t,"\\r\\n")}}):{name:t.name,value:n.replace($t,"\\r\\n")}}).get()}}),x.param=function(e,t){var n,r=[],i=function(e,t){t=x.isFunction(t)?t():null==t?"":t,r[r.length]=encodeURIComponent(e)+"="+encodeURIComponent(t)};if(t===undefined&&(t=x.ajaxSettings&&x.ajaxSettings.traditional),x.isArray(e)||e.jquery&&!x.isPlainObject(e))x.each(e,function(){i(this.name,this.value)});else for(n in e)_t(n,e[n],t,i);return r.join("&").replace(Mt,"+")};function _t(e,t,n,r){var i;if(x.isArray(t))x.each(t,function(t,i){n||Bt.test(e)?r(e,i):_t(e+"["+("object"==typeof i?t:"")+"]",i,n,r)});else if(n||"object"!==x.type(t))r(e,t);else for(i in t)_t(e+"["+i+"]",t[i],n,r)}x.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error contextmenu".split(" "),function(e,t){x.fn[t]=function(e,n){return arguments.length>0?this.on(t,null,e,n):this.trigger(t)}}),x.fn.extend({hover:function(e,t){return this.mouseenter(e).mouseleave(t||e)},bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)}});var Xt,Ut,Yt=/^(?:toggle|show|hide)$/,Vt=RegExp("^(?:([+-])=|)("+b+")([a-z%]*)$","i"),Qt=/queueHooks$/,Gt=[nn],Kt={"*":[function(e,t){var n=this.createTween(e,t),r=n.cur(),i=Vt.exec(t),o=i&&i[3]||(x.cssNumber[e]?"":"px"),s=(x.cssNumber[e]||"px"!==o&&+r)&&Vt.exec(x.css(n.elem,e)),a=1,u=20;if(s&&s[3]!==o){o=o||s[3],i=i||[],s=+r||1;do a=a||".5",s/=a,x.style(n.elem,e,s+o);while(a!==(a=n.cur()/r)&&1!==a&&--u)}return i&&(s=n.start=+s||+r||0,n.unit=o,n.end=i[1]?s+(i[1]+1)*i[2]:+i[2]),n}]};function Jt(){return setTimeout(function(){Xt=undefined}),Xt=x.now()}function Zt(e,t,n){var r,i=(Kt[t]||[]).concat(Kt["*"]),o=0,s=i.length;
for(;s>o;o++)if(r=i[o].call(n,t,e))return r}function en(e,t,n){var r,i,o=0,s=Gt.length,a=x.Deferred().always(function(){delete u.elem}),u=function(){if(i)return!1;var t=Xt||Jt(),n=Math.max(0,l.startTime+l.duration-t),r=n/l.duration||0,o=1-r,s=0,u=l.tweens.length;for(;u>s;s++)l.tweens[s].run(o);return a.notifyWith(e,[l,o,n]),1>o&&u?n:(a.resolveWith(e,[l]),!1)},l=a.promise({elem:e,props:x.extend({},t),opts:x.extend(!0,{specialEasing:{}},n),originalProperties:t,originalOptions:n,startTime:Xt||Jt(),duration:n.duration,tweens:[],createTween:function(t,n){var r=x.Tween(e,l.opts,t,n,l.opts.specialEasing[t]||l.opts.easing);return l.tweens.push(r),r},stop:function(t){var n=0,r=t?l.tweens.length:0;if(i)return this;for(i=!0;r>n;n++)l.tweens[n].run(1);return t?a.resolveWith(e,[l,t]):a.rejectWith(e,[l,t]),this}}),c=l.props;for(tn(c,l.opts.specialEasing);s>o;o++)if(r=Gt[o].call(l,e,c,l.opts))return r;return x.map(c,Zt,l),x.isFunction(l.opts.start)&&l.opts.start.call(e,l),x.fx.timer(x.extend(u,{elem:e,anim:l,queue:l.opts.queue})),l.progress(l.opts.progress).done(l.opts.done,l.opts.complete).fail(l.opts.fail).always(l.opts.always)}function tn(e,t){var n,r,i,o,s;for(n in e)if(r=x.camelCase(n),i=t[r],o=e[n],x.isArray(o)&&(i=o[1],o=e[n]=o[0]),n!==r&&(e[r]=o,delete e[n]),s=x.cssHooks[r],s&&"expand"in s){o=s.expand(o),delete e[r];for(n in o)n in e||(e[n]=o[n],t[n]=i)}else t[r]=i}x.Animation=x.extend(en,{tweener:function(e,t){x.isFunction(e)?(t=e,e=["*"]):e=e.split(" ");var n,r=0,i=e.length;for(;i>r;r++)n=e[r],Kt[n]=Kt[n]||[],Kt[n].unshift(t)},prefilter:function(e,t){t?Gt.unshift(e):Gt.push(e)}});function nn(e,t,n){var r,i,o,s,a,u,l=this,c={},f=e.style,p=e.nodeType&&qt(e),h=j.get(e,"fxshow");n.queue||(a=x._queueHooks(e,"fx"),null==a.unqueued&&(a.unqueued=0,u=a.empty.fire,a.empty.fire=function(){a.unqueued||u()}),a.unqueued++,l.always(function(){l.always(function(){a.unqueued--,x.queue(e,"fx").length||a.empty.fire()})})),1===e.nodeType&&("height"in t||"width"in t)&&(n.overflow=[f.overflow,f.overflowX,f.overflowY],"inline"===x.css(e,"display")&&"none"===x.css(e,"float")&&(f.display="inline-block")),n.overflow&&(f.overflow="hidden",l.always(function(){f.overflow=n.overflow[0],f.overflowX=n.overflow[1],f.overflowY=n.overflow[2]}));for(r in t)if(i=t[r],Yt.exec(i)){if(delete t[r],o=o||"toggle"===i,i===(p?"hide":"show")){if("show"!==i||!h||h[r]===undefined)continue;p=!0}c[r]=h&&h[r]||x.style(e,r)}if(!x.isEmptyObject(c)){h?"hidden"in h&&(p=h.hidden):h=j.access(e,"fxshow",{}),o&&(h.hidden=!p),p?x(e).show():l.done(function(){x(e).hide()}),l.done(function(){var t;j.remove(e,"fxshow");for(t in c)x.style(e,t,c[t])});for(r in c)s=Zt(p?h[r]:0,r,l),r in h||(h[r]=s.start,p&&(s.end=s.start,s.start="width"===r||"height"===r?1:0))}}function rn(e,t,n,r,i){return new rn.prototype.init(e,t,n,r,i)}x.Tween=rn,rn.prototype={constructor:rn,init:function(e,t,n,r,i,o){this.elem=e,this.prop=n,this.easing=i||"swing",this.options=t,this.start=this.now=this.cur(),this.end=r,this.unit=o||(x.cssNumber[n]?"":"px")},cur:function(){var e=rn.propHooks[this.prop];return e&&e.get?e.get(this):rn.propHooks._default.get(this)},run:function(e){var t,n=rn.propHooks[this.prop];return this.pos=t=this.options.duration?x.easing[this.easing](e,this.options.duration*e,0,1,this.options.duration):e,this.now=(this.end-this.start)*t+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),n&&n.set?n.set(this):rn.propHooks._default.set(this),this}},rn.prototype.init.prototype=rn.prototype,rn.propHooks={_default:{get:function(e){var t;return null==e.elem[e.prop]||e.elem.style&&null!=e.elem.style[e.prop]?(t=x.css(e.elem,e.prop,""),t&&"auto"!==t?t:0):e.elem[e.prop]},set:function(e){x.fx.step[e.prop]?x.fx.step[e.prop](e):e.elem.style&&(null!=e.elem.style[x.cssProps[e.prop]]||x.cssHooks[e.prop])?x.style(e.elem,e.prop,e.now+e.unit):e.elem[e.prop]=e.now}}},rn.propHooks.scrollTop=rn.propHooks.scrollLeft={set:function(e){e.elem.nodeType&&e.elem.parentNode&&(e.elem[e.prop]=e.now)}},x.each(["toggle","show","hide"],function(e,t){var n=x.fn[t];x.fn[t]=function(e,r,i){return null==e||"boolean"==typeof e?n.apply(this,arguments):this.animate(on(t,!0),e,r,i)}}),x.fn.extend({fadeTo:function(e,t,n,r){return this.filter(qt).css("opacity",0).show().end().animate({opacity:t},e,n,r)},animate:function(e,t,n,r){var i=x.isEmptyObject(e),o=x.speed(t,n,r),s=function(){var t=en(this,x.extend({},e),o);(i||j.get(this,"finish"))&&t.stop(!0)};return s.finish=s,i||o.queue===!1?this.each(s):this.queue(o.queue,s)},stop:function(e,t,n){var r=function(e){var t=e.stop;delete e.stop,t(n)};return"string"!=typeof e&&(n=t,t=e,e=undefined),t&&e!==!1&&this.queue(e||"fx",[]),this.each(function(){var t=!0,i=null!=e&&e+"queueHooks",o=x.timers,s=j.get(this);if(i)s[i]&&s[i].stop&&r(s[i]);else for(i in s)s[i]&&s[i].stop&&Qt.test(i)&&r(s[i]);for(i=o.length;i--;)o[i].elem!==this||null!=e&&o[i].queue!==e||(o[i].anim.stop(n),t=!1,o.splice(i,1));(t||!n)&&x.dequeue(this,e)})},finish:function(e){return e!==!1&&(e=e||"fx"),this.each(function(){var t,n=j.get(this),r=n[e+"queue"],i=n[e+"queueHooks"],o=x.timers,s=r?r.length:0;for(n.finish=!0,x.queue(this,e,[]),i&&i.stop&&i.stop.call(this,!0),t=o.length;t--;)o[t].elem===this&&o[t].queue===e&&(o[t].anim.stop(!0),o.splice(t,1));for(t=0;s>t;t++)r[t]&&r[t].finish&&r[t].finish.call(this);delete n.finish})}});function on(e,t){var n,r={height:e},i=0;for(t=t?1:0;4>i;i+=2-t)n=At[i],r["margin"+n]=r["padding"+n]=e;return t&&(r.opacity=r.width=e),r}x.each({slideDown:on("show"),slideUp:on("hide"),slideToggle:on("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(e,t){x.fn[e]=function(e,n,r){return this.animate(t,e,n,r)}}),x.speed=function(e,t,n){var r=e&&"object"==typeof e?x.extend({},e):{complete:n||!n&&t||x.isFunction(e)&&e,duration:e,easing:n&&t||t&&!x.isFunction(t)&&t};return r.duration=x.fx.off?0:"number"==typeof r.duration?r.duration:r.duration in x.fx.speeds?x.fx.speeds[r.duration]:x.fx.speeds._default,(null==r.queue||r.queue===!0)&&(r.queue="fx"),r.old=r.complete,r.complete=function(){x.isFunction(r.old)&&r.old.call(this),r.queue&&x.dequeue(this,r.queue)},r},x.easing={linear:function(e){return e},swing:function(e){return.5-Math.cos(e*Math.PI)/2}},x.timers=[],x.fx=rn.prototype.init,x.fx.tick=function(){var e,t=x.timers,n=0;for(Xt=x.now();t.length>n;n++)e=t[n],e()||t[n]!==e||t.splice(n--,1);t.length||x.fx.stop(),Xt=undefined},x.fx.timer=function(e){e()&&x.timers.push(e)&&x.fx.start()},x.fx.interval=13,x.fx.start=function(){Ut||(Ut=setInterval(x.fx.tick,x.fx.interval))},x.fx.stop=function(){clearInterval(Ut),Ut=null},x.fx.speeds={slow:600,fast:200,_default:400},x.fx.step={},x.expr&&x.expr.filters&&(x.expr.filters.animated=function(e){return x.grep(x.timers,function(t){return e===t.elem}).length}),x.fn.offset=function(e){if(arguments.length)return e===undefined?this:this.each(function(t){x.offset.setOffset(this,e,t)});var t,n,i=this[0],o={top:0,left:0},s=i&&i.ownerDocument;if(s)return t=s.documentElement,x.contains(t,i)?(typeof i.getBoundingClientRect!==r&&(o=i.getBoundingClientRect()),n=sn(s),{top:o.top+n.pageYOffset-t.clientTop,left:o.left+n.pageXOffset-t.clientLeft}):o},x.offset={setOffset:function(e,t,n){var r,i,o,s,a,u,l,c=x.css(e,"position"),f=x(e),p={};"static"===c&&(e.style.position="relative"),a=f.offset(),o=x.css(e,"top"),u=x.css(e,"left"),l=("absolute"===c||"fixed"===c)&&(o+u).indexOf("auto")>-1,l?(r=f.position(),s=r.top,i=r.left):(s=parseFloat(o)||0,i=parseFloat(u)||0),x.isFunction(t)&&(t=t.call(e,n,a)),null!=t.top&&(p.top=t.top-a.top+s),null!=t.left&&(p.left=t.left-a.left+i),"using"in t?t.using.call(e,p):f.css(p)}},x.fn.extend({position:function(){if(this[0]){var e,t,n=this[0],r={top:0,left:0};return"fixed"===x.css(n,"position")?t=n.getBoundingClientRect():(e=this.offsetParent(),t=this.offset(),x.nodeName(e[0],"html")||(r=e.offset()),r.top+=x.css(e[0],"borderTopWidth",!0),r.left+=x.css(e[0],"borderLeftWidth",!0)),{top:t.top-r.top-x.css(n,"marginTop",!0),left:t.left-r.left-x.css(n,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent||s;while(e&&!x.nodeName(e,"html")&&"static"===x.css(e,"position"))e=e.offsetParent;return e||s})}}),x.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,n){var r="pageYOffset"===n;x.fn[t]=function(i){return x.access(this,function(t,i,o){var s=sn(t);return o===undefined?s?s[n]:t[i]:(s?s.scrollTo(r?e.pageXOffset:o,r?o:e.pageYOffset):t[i]=o,undefined)},t,i,arguments.length,null)}});function sn(e){return x.isWindow(e)?e:9===e.nodeType&&e.defaultView}x.each({Height:"height",Width:"width"},function(e,t){x.each({padding:"inner"+e,content:t,"":"outer"+e},function(n,r){x.fn[r]=function(r,i){var o=arguments.length&&(n||"boolean"!=typeof r),s=n||(r===!0||i===!0?"margin":"border");return x.access(this,function(t,n,r){var i;return x.isWindow(t)?t.document.documentElement["client"+e]:9===t.nodeType?(i=t.documentElement,Math.max(t.body["scroll"+e],i["scroll"+e],t.body["offset"+e],i["offset"+e],i["client"+e])):r===undefined?x.css(t,n,s):x.style(t,n,r,s)},t,o?r:undefined,o,null)}})}),"object"==typeof module&&module&&"object"==typeof module.exports?module.exports=x:"function"==typeof define&&define.amd&&define("jquery",[],function(){return x}),"object"==typeof e&&"object"==typeof e.document&&(e.jQuery=e.$=x)})(window);
"""
end
end #defmacro jquery
end #defmodule
| 210.026005 | 32,220 | 0.613973 |
034b87e18338f3dd4f60153171537012e0620b03 | 7,281 | exs | Elixir | lib/elixir/test/elixir/stream_test.exs | MSch/elixir | fc42dc9bb76ec1fdcfcbdbfb11fea6a845a62fca | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/stream_test.exs | MSch/elixir | fc42dc9bb76ec1fdcfcbdbfb11fea6a845a62fca | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/stream_test.exs | MSch/elixir | fc42dc9bb76ec1fdcfcbdbfb11fea6a845a62fca | [
"Apache-2.0"
] | null | null | null | Code.require_file "test_helper.exs", __DIR__
defmodule StreamTest do
use ExUnit.Case, async: true
test "streams as enumerables" do
stream = Stream.map([1,2,3], &(&1 * 2))
# Reduce
assert Enum.map(stream, &(&1 + 1)) == [3,5,7]
# Member
assert Enum.member?(stream, 4)
refute Enum.member?(stream, 1)
# Count
assert Enum.count(stream) == 3
end
test "streams are composable" do
stream = Stream.map([1,2,3], &(&1 * 2))
assert is_lazy(stream)
stream = Stream.map(stream, &(&1 + 1))
assert is_lazy(stream)
assert Enum.to_list(stream) == [3,5,7]
end
test "concat_1" do
stream = Stream.concat([1..3, [], [4, 5, 6], [], 7..9])
assert is_function(stream)
assert Enum.to_list(stream) == [1,2,3,4,5,6,7,8,9]
assert Enum.take(stream, 5) == [1,2,3,4,5]
stream = Stream.concat([1..3, [4, 5, 6], Stream.cycle(7..100)])
assert is_function(stream)
assert Enum.take(stream, 13) == [1,2,3,4,5,6,7,8,9,10,11,12,13]
end
test "concat_2" do
stream = Stream.concat(1..3, 4..6)
assert is_function(stream)
assert Stream.cycle(stream) |> Enum.take(16) == [1,2,3,4,5,6,1,2,3,4,5,6,1,2,3,4]
stream = Stream.concat(1..3, [])
assert is_function(stream)
assert Stream.cycle(stream) |> Enum.take(5) == [1,2,3,1,2]
stream = Stream.concat(1..6, Stream.cycle(7..9))
assert is_function(stream)
assert Stream.drop(stream, 3) |> Enum.take(13) == [4,5,6,7,8,9,7,8,9,7,8,9,7]
stream = Stream.concat(Stream.cycle(1..3), Stream.cycle(4..6))
assert is_function(stream)
assert Enum.take(stream, 13) == [1,2,3,1,2,3,1,2,3,1,2,3,1]
end
test "cycle" do
stream = Stream.cycle([1,2,3])
assert is_function(stream)
assert Stream.cycle([1,2,3]) |> Stream.take(5) |> Enum.to_list == [1,2,3,1,2]
assert Enum.take(stream, 5) == [1,2,3,1,2]
end
test "drop" do
stream = Stream.drop(1..10, 5)
assert is_lazy(stream)
assert Enum.to_list(stream) == [6,7,8,9,10]
assert Enum.to_list(Stream.drop(1..5, 0)) == [1,2,3,4,5]
assert Enum.to_list(Stream.drop(1..3, 5)) == []
nats = Stream.iterate(1, &(&1 + 1))
assert Stream.drop(nats, 2) |> Enum.take(5) == [3,4,5,6,7]
end
test "drop_while" do
stream = Stream.drop_while(1..10, &(&1 <= 5))
assert is_lazy(stream)
assert Enum.to_list(stream) == [6,7,8,9,10]
assert Enum.to_list(Stream.drop_while(1..5, &(&1 <= 0))) == [1,2,3,4,5]
assert Enum.to_list(Stream.drop_while(1..3, &(&1 <= 5))) == []
nats = Stream.iterate(1, &(&1 + 1))
assert Stream.drop_while(nats, &(&1 <= 5)) |> Enum.take(5) == [6,7,8,9,10]
end
test "filter" do
stream = Stream.filter([1,2,3], fn(x) -> rem(x, 2) == 0 end)
assert is_lazy(stream)
assert Enum.to_list(stream) == [2]
nats = Stream.iterate(1, &(&1 + 1))
assert Stream.filter(nats, &(rem(&1, 2) == 0)) |> Enum.take(5) == [2,4,6,8,10]
end
test "iterate" do
stream = Stream.iterate(0, &(&1+2))
assert Enum.take(stream, 5) == [0,2,4,6,8]
stream = Stream.iterate(5, &(&1+2))
assert Enum.take(stream, 5) == [5,7,9,11,13]
# Only calculate values if needed
stream = Stream.iterate("HELLO", &raise/1)
assert Enum.take(stream, 1) == ["HELLO"]
end
test "map" do
stream = Stream.map([1,2,3], &(&1 * 2))
assert is_lazy(stream)
assert Enum.to_list(stream) == [2,4,6]
nats = Stream.iterate(1, &(&1 + 1))
assert Stream.map(nats, &(&1 * 2)) |> Enum.take(5) == [2,4,6,8,10]
end
test "flat_map" do
stream = Stream.flat_map([1, 2, 3], &[&1, &1 * 2])
assert is_lazy(stream)
assert Enum.to_list(stream) == [1, 2, 2, 4, 3, 6]
nats = Stream.iterate(1, &(&1 + 1))
assert Stream.flat_map(nats, &[&1, &1 * 2]) |> Enum.take(6) == [1, 2, 2, 4, 3, 6]
end
test "flat_map does not intercept wrapped lazy enumeration" do
# flat_map returns a lazy enumeration that does not throw
assert [1, 2, 3, -1, -2]
|> Stream.flat_map(fn x -> Stream.map([x, x+1], & &1) end)
|> Stream.take_while(fn x -> x >= 0 end)
|> Enum.to_list == [1, 2, 2, 3, 3, 4]
# flat_map returns a lazy enumeration that does throws
assert [1, 2, 3, -1, -2]
|> Stream.flat_map(fn x -> Stream.take_while([x, x+1, x+2], &(&1 <= x + 1)) end)
|> Stream.take_while(fn x -> x >= 0 end)
|> Enum.to_list == [1, 2, 2, 3, 3, 4]
# flat_map returns a lazy enumeration that does throws wrapped in an enumerable
assert [1, 2, 3, -1, -2]
|> Stream.flat_map(fn x -> Stream.concat([x], Stream.take_while([x+1, x+2], &(&1 <= x + 1))) end)
|> Stream.take_while(fn x -> x >= 0 end)
|> Enum.to_list == [1, 2, 2, 3, 3, 4]
end
test "reject" do
stream = Stream.reject([1,2,3], fn(x) -> rem(x, 2) == 0 end)
assert is_lazy(stream)
assert Enum.to_list(stream) == [1,3]
nats = Stream.iterate(1, &(&1 + 1))
assert Stream.reject(nats, &(rem(&1, 2) == 0)) |> Enum.take(5) == [1,3,5,7,9]
end
test "repeatedly" do
stream = Stream.repeatedly(fn -> 1 end)
assert Enum.take(stream, 5) == [1,1,1,1,1]
stream = Stream.repeatedly(&:random.uniform/0)
[r1,r2] = Enum.take(stream, 2)
assert r1 != r2
end
test "take" do
stream = Stream.take(1..1000, 5)
assert is_lazy(stream)
assert Enum.to_list(stream) == [1,2,3,4,5]
assert Enum.to_list(Stream.take(1..1000, 0)) == []
assert Enum.to_list(Stream.take(1..3, 5)) == [1,2,3]
nats = Stream.iterate(1, &(&1 + 1))
assert Enum.to_list(Stream.take(nats, 5)) == [1,2,3,4,5]
stream = Stream.drop(1..100, 5)
assert Stream.take(stream, 5) |> Enum.to_list == [6,7,8,9,10]
stream = 1..5 |> Stream.take(10) |> Stream.drop(15)
assert { [], [] } = Enum.split(stream, 5)
end
test "take_while" do
stream = Stream.take_while(1..1000, &(&1 <= 5))
assert is_lazy(stream)
assert Enum.to_list(stream) == [1,2,3,4,5]
assert Enum.to_list(Stream.take_while(1..1000, &(&1 <= 0))) == []
assert Enum.to_list(Stream.take_while(1..3, &(&1 <= 5))) == [1,2,3]
nats = Stream.iterate(1, &(&1 + 1))
assert Enum.to_list(Stream.take_while(nats, &(&1 <= 5))) == [1,2,3,4,5]
stream = Stream.drop(1..100, 5)
assert Stream.take_while(stream, &(&1 < 11)) |> Enum.to_list == [6,7,8,9,10]
end
test "unfold" do
stream = Stream.unfold(10, fn x -> if x > 0, do: {x, x-1}, else: nil end)
assert Enum.take(stream, 5) == [10, 9, 8, 7, 6]
stream = Stream.unfold(5, fn x -> if x > 0, do: {x, x-1}, else: nil end)
assert Enum.to_list(stream) == [5, 4, 3, 2, 1]
end
test "unfold only calculate values if needed" do
stream = Stream.unfold(1, fn x -> if x > 0, do: {x, x-1}, else: throw(:boom) end)
assert Enum.take(stream, 1) == [1]
stream = Stream.unfold(5, fn x -> if x > 0, do: {x, x-1}, else: nil end)
assert Enum.to_list(Stream.take(stream, 2)) == [5, 4]
end
test "with_index" do
stream = Stream.with_index([1,2,3])
assert is_lazy(stream)
assert Enum.to_list(stream) == [{1,0},{2,1},{3,2}]
nats = Stream.iterate(1, &(&1 + 1))
assert Stream.with_index(nats) |> Enum.take(3) == [{1,0},{2,1},{3,2}]
end
defp is_lazy(stream) do
assert is_record(stream, Stream.Lazy)
end
end
| 32.07489 | 108 | 0.572449 |
034b983fdaf925c05851ad061608c4d06fbb20ec | 1,751 | ex | Elixir | lib/dustbin/notifier.ex | saulecabrera/dustbin | 6f862d35d4584acda1e082fad278a7c23dc9598b | [
"MIT"
] | null | null | null | lib/dustbin/notifier.ex | saulecabrera/dustbin | 6f862d35d4584acda1e082fad278a7c23dc9598b | [
"MIT"
] | 7 | 2017-02-01T00:17:57.000Z | 2017-04-17T13:40:04.000Z | lib/dustbin/notifier.ex | saulecabrera/dustbin | 6f862d35d4584acda1e082fad278a7c23dc9598b | [
"MIT"
] | null | null | null | defmodule Dustbin.Notifier do
alias Dustbin.{Locations, Scheduler, Schedules}
import Crontab.CronExpression
use GenServer
use Timex
def start_link do
GenServer.start_link(__MODULE__, %{}, name: __MODULE__)
end
def notify(name, location_slug, timezone) do
GenServer.cast(__MODULE__, {:notify, name, location_slug, timezone})
end
# Callbacks
def init(state) do
config_jobs()
{:ok, state}
end
def handle_cast({:notify, name, location_slug, timezone}, state) do
date = tomorrow(timezone)
with {:ok, occurrences} <- Schedules.find(location_slug, Date.to_string(date)) do
Task.Supervisor.start_child(Dustbin.TaskSupervisor, fn ->
format_notification(name, occurrences, date)
|> ExTwitter.update
end, restart: :transient)
end
{:noreply, state}
end
# Helpers
defp config_jobs do
Enum.each(Locations.supported(), fn %{name: name, slug: slug, timezone: timezone} ->
Scheduler.new_job()
|> Quantum.Job.set_schedule(~e[0 16 * * *])
|> Quantum.Job.set_timezone(timezone)
|> Quantum.Job.set_task(fn -> Dustbin.Notifier.notify(name, slug, timezone) end)
|> Scheduler.add_job()
end)
end
defp format_notification(name, occurrences, date) do
occurrences_msg =
Enum.reduce(occurrences, "", fn
%{"name" => name}, "" -> name
%{"name" => name}, acc ->
"""
#{acc}
#{name}
"""
end)
date_msg =
date
|> Timex.format!("{WDshort} {Mshort} {D}, {YYYY}")
"""
#{name}
#{date_msg}:
#{occurrences_msg}
"""
end
defp tomorrow(timezone) do
Timex.now(timezone)
|> Timex.shift(days: 1)
|> Timex.to_date
end
end
| 23.039474 | 88 | 0.615077 |
034bb8d06f351acf77dbe282b59fe6c62ebfcc0a | 172 | exs | Elixir | config/test.exs | dreamingblackcat/phoneix-chat-server-example | 39fe46056c0cf4015c66919c4831b8e2437e39af | [
"MIT"
] | null | null | null | config/test.exs | dreamingblackcat/phoneix-chat-server-example | 39fe46056c0cf4015c66919c4831b8e2437e39af | [
"MIT"
] | null | null | null | config/test.exs | dreamingblackcat/phoneix-chat-server-example | 39fe46056c0cf4015c66919c4831b8e2437e39af | [
"MIT"
] | null | null | null | use Mix.Config
config :chatter, Chatter.Endpoint,
http: [port: System.get_env("PORT") || 4001]
# Print only warnings and errors during test
config :logger, level: :warn | 24.571429 | 46 | 0.732558 |
034beb95ccaac2604ffc60c8f6500cb9dcbc4510 | 368 | ex | Elixir | web_finngen_r8/lib/risteys/atc_drug.ex | vincent-octo/risteys | 5bb1e70b78988770048b91b42fad025faf98d84a | [
"MIT"
] | null | null | null | web_finngen_r8/lib/risteys/atc_drug.ex | vincent-octo/risteys | 5bb1e70b78988770048b91b42fad025faf98d84a | [
"MIT"
] | null | null | null | web_finngen_r8/lib/risteys/atc_drug.ex | vincent-octo/risteys | 5bb1e70b78988770048b91b42fad025faf98d84a | [
"MIT"
] | null | null | null | defmodule Risteys.ATCDrug do
use Ecto.Schema
import Ecto.Changeset
schema "atc_drugs" do
field :atc, :string
field :description, :string
timestamps()
end
@doc false
def changeset(atc_drug, attrs) do
atc_drug
|> cast(attrs, [:atc, :description])
|> validate_required([:atc, :description])
|> unique_constraint(:atc)
end
end
| 18.4 | 46 | 0.668478 |
034c0a1542394d0b57d7da9ea9f23b0943a903a4 | 800 | exs | Elixir | apps/elixir_ls_debugger/mix.exs | bottlenecked/elixir-ls | 99ab6e98ff181aae01ca3d119dee0ea9c49c727a | [
"Apache-2.0"
] | null | null | null | apps/elixir_ls_debugger/mix.exs | bottlenecked/elixir-ls | 99ab6e98ff181aae01ca3d119dee0ea9c49c727a | [
"Apache-2.0"
] | null | null | null | apps/elixir_ls_debugger/mix.exs | bottlenecked/elixir-ls | 99ab6e98ff181aae01ca3d119dee0ea9c49c727a | [
"Apache-2.0"
] | null | null | null | defmodule ElixirLS.Debugger.Mixfile do
use Mix.Project
def project do
[
app: :elixir_ls_debugger,
version: "0.9.0",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: ">= 1.10.0",
build_embedded: false,
start_permanent: true,
build_per_environment: false,
consolidate_protocols: false,
deps: deps(),
xref: [exclude: [:int, :dbg_iserver]]
]
end
def application do
[mod: {ElixirLS.Debugger, []}, extra_applications: [:mix, :logger]]
end
defp deps do
[
{:elixir_sense, github: "elixir-lsp/elixir_sense"},
{:elixir_ls_utils, in_umbrella: true},
{:dialyxir, "~> 1.0", runtime: false}
]
end
end
| 23.529412 | 71 | 0.585 |
034c2231c13dce8aa6c843b865e9168d65a9d110 | 570 | ex | Elixir | lib/sobelow/sql/stream.ex | juancgalvis/sobelow | 9ae3874c26ab7cfa6c8a8517ccd02af98e187585 | [
"Apache-2.0"
] | 1,305 | 2017-05-12T21:09:40.000Z | 2022-03-31T04:31:49.000Z | lib/sobelow/sql/stream.ex | juancgalvis/sobelow | 9ae3874c26ab7cfa6c8a8517ccd02af98e187585 | [
"Apache-2.0"
] | 95 | 2017-05-15T09:45:41.000Z | 2022-03-23T03:35:48.000Z | lib/sobelow/sql/stream.ex | juancgalvis/sobelow | 9ae3874c26ab7cfa6c8a8517ccd02af98e187585 | [
"Apache-2.0"
] | 86 | 2017-05-15T20:18:59.000Z | 2022-02-11T22:10:34.000Z | defmodule Sobelow.SQL.Stream do
@uid 18
@finding_type "SQL.Stream: SQL injection"
use Sobelow.Finding
def run(fun, meta_file) do
confidence = if !meta_file.is_controller?, do: :low
Finding.init(@finding_type, meta_file.filename, confidence)
|> Finding.multi_from_def(fun, parse_sql_def(fun))
|> Enum.each(&Print.add_finding(&1))
end
## stream(repo, sql, params \\ [], opts \\ [])
def parse_sql_def(fun) do
Parse.get_fun_vars_and_meta(fun, 1, :stream, {:required, :SQL})
end
def details() do
Sobelow.SQL.details()
end
end
| 23.75 | 67 | 0.680702 |
034c2a3371e55064bce0f66b44ba1d2dea506684 | 340 | exs | Elixir | priv/repo/migrations/20190217191734_create_credentials.exs | edwinthinks/semaphore-demo-elixir-phoenix | 16c60f1a37f204156a17628947a7dda552a76ee0 | [
"MIT"
] | 1 | 2020-03-19T13:26:01.000Z | 2020-03-19T13:26:01.000Z | priv/repo/migrations/20190217191734_create_credentials.exs | edwinthinks/semaphore-demo-elixir-phoenix | 16c60f1a37f204156a17628947a7dda552a76ee0 | [
"MIT"
] | 6 | 2019-05-29T16:34:19.000Z | 2021-09-01T02:10:43.000Z | priv/repo/migrations/20190217191734_create_credentials.exs | edwinthinks/semaphore-demo-elixir-phoenix | 16c60f1a37f204156a17628947a7dda552a76ee0 | [
"MIT"
] | 68 | 2019-05-16T15:40:51.000Z | 2022-03-20T06:30:53.000Z | defmodule Sema.Repo.Migrations.CreateCredentials do
use Ecto.Migration
def change do
create table(:credentials) do
add :email, :string
add :user_id, references(:users, on_delete: :nothing)
timestamps()
end
create unique_index(:credentials, [:email])
create index(:credentials, [:user_id])
end
end
| 21.25 | 59 | 0.688235 |
034ce4ac07772e0aa6453b38d3cf9ed70f5b6df6 | 1,631 | ex | Elixir | apps/day6/lib/day6.ex | at7heb/aoc2021 | ab31881b40354e28da0feaf5309c9648def85e77 | [
"MIT"
] | null | null | null | apps/day6/lib/day6.ex | at7heb/aoc2021 | ab31881b40354e28da0feaf5309c9648def85e77 | [
"MIT"
] | null | null | null | apps/day6/lib/day6.ex | at7heb/aoc2021 | ab31881b40354e28da0feaf5309c9648def85e77 | [
"MIT"
] | null | null | null | defmodule Day6 do
@moduledoc """
Documentation for `Day6`.
"""
@doc """
Hello world.
## Examples
iex> Day6.hello()
:world
"""
def run() do
get_input()
|> process()
|> present()
end
def present({a1, a2} = _answer) do
IO.puts("There are now #{a1} langernfish")
IO.puts("Later there are #{a2} lanternfish")
end
def get_input(), do: File.read!("input.txt")
def process(text) do
answer1 = split(text)
|> make_into_model()
|> age_model(80)
answer2 = split(text)
|> make_into_model()
|> age_model(256)
{answer1, answer2}
end
def split(text), do: String.split(text, ",", trim: true)
def make_into_model(fish_list) do
fish_list = Enum.map(fish_list, fn fish_age -> String.to_integer(fish_age) end)
IO.inspect(fish_list, label: "fish list into model")
Enum.reduce(0..6,
Tuple.duplicate(0,9),
fn age, tuple -> put_elem(tuple, age, length(Enum.filter(fish_list, fn fish_age -> fish_age == age end))) end)
|> IO.inspect(label: "model from fish list")
end
def age_model(population, n) when n == 0, do: Enum.sum(Tuple.to_list(population))
def age_model({p0,p1,p2,p3,p4,p5,p6,p7,p8} = population, n) do
if n > 60 do
IO.inspect({n, population})
end
next_days_population = {p1, p2, p3, p4, p5, p6, p0+p7, p8, p0}
age_model(next_days_population, n - 1)
end
def generate_new_fish(population) do
number_of_babies = length(Enum.filter(population, fn x -> x == 0 end))
List.duplicate(6, number_of_babies)
end
def example() do
process("3,4,3,1,2")
|> present
end
end
| 23.3 | 116 | 0.62477 |
034cf02ecde0a174d4ab84e7954bdb166dadb6b3 | 1,564 | ex | Elixir | clients/slides/lib/google_api/slides/v1/model/size.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/slides/lib/google_api/slides/v1/model/size.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/slides/lib/google_api/slides/v1/model/size.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Slides.V1.Model.Size do
@moduledoc """
A width and height.
## Attributes
- height (Dimension): The height of the object. Defaults to: `null`.
- width (Dimension): The width of the object. Defaults to: `null`.
"""
defstruct [
:"height",
:"width"
]
end
defimpl Poison.Decoder, for: GoogleApi.Slides.V1.Model.Size do
import GoogleApi.Slides.V1.Deserializer
def decode(value, options) do
value
|> deserialize(:"height", :struct, GoogleApi.Slides.V1.Model.Dimension, options)
|> deserialize(:"width", :struct, GoogleApi.Slides.V1.Model.Dimension, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Slides.V1.Model.Size do
def encode(value, options) do
GoogleApi.Slides.V1.Deserializer.serialize_non_nil(value, options)
end
end
| 30.666667 | 84 | 0.734655 |
034d181f2768c90c215dcf431755ef25fcd690b6 | 870 | exs | Elixir | mix.exs | mtanca/alchemic_avatar | 8016ef8268581ae1c77605e228e3c62e64c0b21b | [
"MIT"
] | 56 | 2016-04-05T09:41:04.000Z | 2021-01-21T21:40:14.000Z | mix.exs | mtanca/alchemic_avatar | 8016ef8268581ae1c77605e228e3c62e64c0b21b | [
"MIT"
] | 3 | 2016-08-28T03:37:18.000Z | 2017-12-01T07:05:34.000Z | mix.exs | mtanca/alchemic_avatar | 8016ef8268581ae1c77605e228e3c62e64c0b21b | [
"MIT"
] | 9 | 2016-04-05T11:20:36.000Z | 2021-04-06T14:16:42.000Z | defmodule AlchemicAvatar.Mixfile do
use Mix.Project
def project do
[app: :alchemic_avatar,
version: "0.1.3",
elixir: "~> 1.2",
description: description(),
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
package: package(),
deps: deps(),
docs: [extras: ["README.md"] ]
]
end
def application do
[applications: []]
end
defp deps do
[{:earmark, "~> 1.1", only: :dev},
{:ex_doc, "~> 0.18", only: :dev},
{:inch_ex, "~> 0.5", only: :docs}]
end
defp description do
"""
Creating letter avatar from user's name(or any other strong / character).
"""
end
defp package do
[maintainers: ["zhangsoledad"],
licenses: ["MIT"],
links: %{"Github" => "https://github.com/zhangsoledad/alchemic_avatar"},
files: ~w(mix.exs README.md lib)]
end
end
| 20.714286 | 77 | 0.581609 |
034d21a7f2bf19567314b7279b2a8c0a40cda982 | 1,786 | ex | Elixir | clients/double_click_bid_manager/lib/google_api/double_click_bid_manager/v11/model/path_query_options_filter.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/double_click_bid_manager/lib/google_api/double_click_bid_manager/v11/model/path_query_options_filter.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/double_click_bid_manager/lib/google_api/double_click_bid_manager/v11/model/path_query_options_filter.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DoubleClickBidManager.V11.Model.PathQueryOptionsFilter do
@moduledoc """
Dimension Filter on path events.
## Attributes
* `filter` (*type:* `String.t`, *default:* `nil`) - Dimension the filter is applied to.
* `match` (*type:* `String.t`, *default:* `nil`) - Indicates how the filter should be matched to the value.
* `values` (*type:* `list(String.t)`, *default:* `nil`) - Value to filter on.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:filter => String.t(),
:match => String.t(),
:values => list(String.t())
}
field(:filter)
field(:match)
field(:values, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.DoubleClickBidManager.V11.Model.PathQueryOptionsFilter do
def decode(value, options) do
GoogleApi.DoubleClickBidManager.V11.Model.PathQueryOptionsFilter.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DoubleClickBidManager.V11.Model.PathQueryOptionsFilter do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.698113 | 111 | 0.717245 |
034d63339db9ef9b2c732cda7b32cac0508334b5 | 2,205 | exs | Elixir | exercises/01-elixir/02-data-types/02-tuples/05-destructuring-remarketed/tests.exs | DennisWinnepenninckx/distributed-applications | 06743e4e2a09dc52ff52be831e486bb073916173 | [
"BSD-3-Clause"
] | 1 | 2021-09-22T09:52:11.000Z | 2021-09-22T09:52:11.000Z | exercises/01-elixir/02-data-types/02-tuples/05-destructuring-remarketed/tests.exs | DennisWinnepenninckx/distributed-applications | 06743e4e2a09dc52ff52be831e486bb073916173 | [
"BSD-3-Clause"
] | 22 | 2019-06-19T18:58:13.000Z | 2020-03-16T14:43:06.000Z | exercises/01-elixir/02-data-types/02-tuples/05-destructuring-remarketed/tests.exs | DennisWinnepenninckx/distributed-applications | 06743e4e2a09dc52ff52be831e486bb073916173 | [
"BSD-3-Clause"
] | 32 | 2019-09-19T03:25:11.000Z | 2020-10-06T15:01:47.000Z | defmodule Setup do
@script "shared.exs"
def setup(directory \\ ".") do
path = Path.join(directory, @script)
if File.exists?(path) do
Code.require_file(path)
Shared.setup(__DIR__)
else
setup(Path.join(directory, ".."))
end
end
end
Setup.setup
defmodule Tests do
use ExUnit.Case, async: true
import Shared
# Fallback case: no simplification possible
check that: Math.simplify(0), is_equal_to: 0
check that: Math.simplify({:+, :a, :b}), is_equal_to: {:+, :a, :b}
# x + 0 == 0 + x == x
check that: Math.simplify({:+, :x, 0}), is_equal_to: :x
check that: Math.simplify({:+, 0, :x}), is_equal_to: :x
# Recursive simplification
check that: Math.simplify({:+, 0, {:+, :x, 0}}), is_equal_to: :x
check that: Math.simplify({:+, {:+, 0, :y}, {:+, :x, 0}}), is_equal_to: {:+, :y, :x}
check that: Math.simplify({:+, {:+, 0, {:/, :a, :b}}, 0}), is_equal_to: {:/, :a, :b}
# Literals allow partial evaluation
check that: Math.simplify({:+, 1, 1}), is_equal_to: 1 + 1
check that: Math.simplify({:+, 5, 8}), is_equal_to: 5 + 8
check that: Math.simplify({:+, {:+, 1, 2}, {:+, 3, 4}}), is_equal_to: 1 + 2 + 3 + 4
check that: Math.simplify({:+, {:+, 1, 2}, :x}), is_equal_to: {:+, 3, :x}
# x - 0 == x
check that: Math.simplify({:-, :x, 0}), is_equal_to: :x
check that: Math.simplify({:-, {:+, :x, :y}, 0}), is_equal_to: {:+, :x, :y}
# x - x == 0
check that: Math.simplify({:-, :x, :x}), is_equal_to: 0
check that: Math.simplify({:-, :y, {:-, :x, :x}}), is_equal_to: :y
# Literals allow partial evaluation
check that: Math.simplify({:-, 5, 3}), is_equal_to: 2
check that: Math.simplify({:-, {:-, 4, 1}, {:-, 5, 3}}), is_equal_to: 1
# 0 * x == x * 0 == 0
check that: Math.simplify({:*, :x, 0}), is_equal_to: 0
check that: Math.simplify({:*, 0, :x}), is_equal_to: 0
check that: Math.simplify({:*, 4, {:*, 0, :x}}), is_equal_to: 0
check that: Math.simplify({:+, 4, {:*, 0, :x}}), is_equal_to: 4
# 1 * x == x * 1 == x
check that: Math.simplify({:*, :x, 1}), is_equal_to: :x
check that: Math.simplify({:*, 1, :x}), is_equal_to: :x
check that: Math.simplify({:-, {:*, :x, 1}, {:*, 1, :x}}), is_equal_to: 0
end
| 33.923077 | 86 | 0.560544 |
034dda2f06299cfdb35a8714730c4c355bdd495d | 969 | ex | Elixir | lib/top5_2/accounts/user.ex | rpillar/Top5_Elixir2 | 9f3a9a0315c5dc53cb53aab93deadccdb697a868 | [
"MIT"
] | 1 | 2019-11-11T21:48:20.000Z | 2019-11-11T21:48:20.000Z | lib/top5_2/accounts/user.ex | rpillar/Top5_Elixir2 | 9f3a9a0315c5dc53cb53aab93deadccdb697a868 | [
"MIT"
] | 2 | 2021-03-09T09:26:25.000Z | 2021-05-09T08:58:51.000Z | lib/top5_2/accounts/user.ex | rpillar/Top5_Elixir2 | 9f3a9a0315c5dc53cb53aab93deadccdb697a868 | [
"MIT"
] | null | null | null | defmodule Top52.Accounts.User do
@moduledoc """
The Accounts User schema / changeset.
"""
use Ecto.Schema
import Ecto.Changeset
alias Pbkdf2
schema "users" do
field :email, :string
field :password, :string
field :username, :string
has_many :tasks, Top52.Tasks.Task
timestamps()
end
@doc false
def changeset(user, attrs) do
user
|> cast(attrs, [:username, :email, :password])
|> validate_required([:username, :email, :password])
|> validate_format(:email, ~r/@/, message: "Invalid email address")
|> validate_password(:password)
|> unique_constraint(:username)
|> update_change(:password, &Pbkdf2.hash_pwd_salt/1)
end
defp validate_password(changeset, field, options \\ []) do
validate_change(changeset, field, fn _, password ->
case String.length(password) > 7 do
true -> []
false -> [{field, options[:message] || "Password invalid"}]
end
end)
end
end
| 24.225 | 71 | 0.647059 |
034defc870846f05f72c19f17190e8be9be3f5e8 | 677 | ex | Elixir | lib/money_bin/accounts/account.ex | KazW/money_bin_sql | 32c3513e734b4b1f0a9688e9f60bdd50a54b4a4f | [
"MIT"
] | null | null | null | lib/money_bin/accounts/account.ex | KazW/money_bin_sql | 32c3513e734b4b1f0a9688e9f60bdd50a54b4a4f | [
"MIT"
] | 1 | 2019-01-01T06:50:32.000Z | 2019-01-01T06:50:32.000Z | lib/money_bin/accounts/account.ex | KazW/money_bin_sql | 32c3513e734b4b1f0a9688e9f60bdd50a54b4a4f | [
"MIT"
] | null | null | null | defmodule MoneyBin.Account do
use MoneyBin, :schema
@moduledoc false
schema @tables[:account] do
has_many(:entries, @schemas[:journal_entry])
has_many(:transactions, through: [:entries, :transactions])
has_many(:memberships, @schemas[:ledger_member])
has_many(:ledgers, through: [:memberships, :ledger])
field(:balance, :decimal, virtual: true, default: 0)
field(:debit_total, :decimal, virtual: true, default: 0)
field(:credit_total, :decimal, virtual: true, default: 0)
field(:entry_count, :integer, virtual: true, default: 0)
timestamps()
end
def changeset(record \\ %__MODULE__{}, attrs), do: record |> cast(attrs, [])
end
| 32.238095 | 78 | 0.691285 |
034e117617a2ae9a900b6a44c27eab2a29b59297 | 3,255 | ex | Elixir | clients/language/lib/google_api/language/v1/model/entity.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/language/lib/google_api/language/v1/model/entity.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/language/lib/google_api/language/v1/model/entity.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Language.V1.Model.Entity do
@moduledoc """
Represents a phrase in the text that is a known entity, such as
a person, an organization, or location. The API associates information, such
as salience and mentions, with entities.
## Attributes
* `mentions` (*type:* `list(GoogleApi.Language.V1.Model.EntityMention.t)`, *default:* `nil`) - The mentions of this entity in the input document. The API currently
supports proper noun mentions.
* `metadata` (*type:* `map()`, *default:* `nil`) - Metadata associated with the entity.
For most entity types, the metadata is a Wikipedia URL (`wikipedia_url`)
and Knowledge Graph MID (`mid`), if they are available. For the metadata
associated with other entity types, see the Type table below.
* `name` (*type:* `String.t`, *default:* `nil`) - The representative name for the entity.
* `salience` (*type:* `number()`, *default:* `nil`) - The salience score associated with the entity in the [0, 1.0] range.
The salience score for an entity provides information about the
importance or centrality of that entity to the entire document text.
Scores closer to 0 are less salient, while scores closer to 1.0 are highly
salient.
* `sentiment` (*type:* `GoogleApi.Language.V1.Model.Sentiment.t`, *default:* `nil`) - For calls to AnalyzeEntitySentiment or if
AnnotateTextRequest.Features.extract_entity_sentiment is set to
true, this field will contain the aggregate sentiment expressed for this
entity in the provided document.
* `type` (*type:* `String.t`, *default:* `nil`) - The entity type.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:mentions => list(GoogleApi.Language.V1.Model.EntityMention.t()),
:metadata => map(),
:name => String.t(),
:salience => number(),
:sentiment => GoogleApi.Language.V1.Model.Sentiment.t(),
:type => String.t()
}
field(:mentions, as: GoogleApi.Language.V1.Model.EntityMention, type: :list)
field(:metadata, type: :map)
field(:name)
field(:salience)
field(:sentiment, as: GoogleApi.Language.V1.Model.Sentiment)
field(:type)
end
defimpl Poison.Decoder, for: GoogleApi.Language.V1.Model.Entity do
def decode(value, options) do
GoogleApi.Language.V1.Model.Entity.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Language.V1.Model.Entity do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42.272727 | 167 | 0.705376 |
034e4517fccba2ec2e1bd50e7f6d5d73e1b2d709 | 144 | ex | Elixir | lib/wishlist_web/controllers/admin_controller.ex | egutter/wishlist | af7b71c96ef9efded708c5ecfe3bab5a00c0761e | [
"MIT"
] | null | null | null | lib/wishlist_web/controllers/admin_controller.ex | egutter/wishlist | af7b71c96ef9efded708c5ecfe3bab5a00c0761e | [
"MIT"
] | null | null | null | lib/wishlist_web/controllers/admin_controller.ex | egutter/wishlist | af7b71c96ef9efded708c5ecfe3bab5a00c0761e | [
"MIT"
] | null | null | null | defmodule WishlistWeb.AdminController do
use WishlistWeb, :controller
def index(conn, _params) do
render(conn, "index.html")
end
end
| 18 | 40 | 0.743056 |
034e5d022e4ba3e9d8cec38a9b790096fcbf6b81 | 2,560 | ex | Elixir | priv/perf/apps/load_test/lib/scenario/many_standard_exits.ex | omisego/elixir-omg | 2c68973d8f29033d137f63a6e060f12e2a7dcd59 | [
"Apache-2.0"
] | 177 | 2018-08-24T03:51:02.000Z | 2020-05-30T13:29:25.000Z | priv/perf/apps/load_test/lib/scenario/many_standard_exits.ex | omisego/elixir-omg | 2c68973d8f29033d137f63a6e060f12e2a7dcd59 | [
"Apache-2.0"
] | 1,042 | 2018-08-25T00:52:39.000Z | 2020-06-01T05:15:17.000Z | priv/perf/apps/load_test/lib/scenario/many_standard_exits.ex | omisego/elixir-omg | 2c68973d8f29033d137f63a6e060f12e2a7dcd59 | [
"Apache-2.0"
] | 47 | 2018-08-24T12:06:33.000Z | 2020-04-28T11:49:25.000Z | # Copyright 2019-2020 OMG Network Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule LoadTest.Scenario.ManyStandardExits do
@moduledoc """
Creates and funds an account, creates many utxos and starts a standard exit on each utxo
## configuration values
- `exits_per_session` the number od utxos to create and then exit
"""
use Chaperon.Scenario
alias LoadTest.ChildChain.WatcherSync
alias LoadTest.Ethereum
alias LoadTest.Ethereum.Account
alias LoadTest.Service.Faucet
@gas_start_exit 500_000
@standard_exit_bond 14_000_000_000_000_000
def run(session) do
exits_per_session = config(session, [:exits_per_session])
gas_price = config(session, [:gas_price])
# Create a new exiter account
{:ok, exiter} = Account.new()
amount = (@gas_start_exit * gas_price + @standard_exit_bond) * exits_per_session
# Fund the exiter with some root chain eth
{:ok, _} = Faucet.fund_root_chain_account(exiter.addr, amount)
# Create many utxos on the child chain
session =
run_scenario(session, LoadTest.Scenario.CreateUtxos, %{
sender: exiter,
transactions_per_session: 1,
utxos_to_create_per_session: exits_per_session
})
# Wait for the last utxo to seen by the watcher
:ok = LoadTest.ChildChain.Utxos.wait_for_utxo(exiter.addr, session.assigned.utxo)
# Start a standard exit on each of the exiter's utxos
session =
exiter.addr
|> LoadTest.ChildChain.Utxos.get_utxos()
|> Enum.map(&exit_utxo(session, &1, exiter))
|> List.last()
last_tx_hash = session.assigned.tx_hash
{:ok, %{"status" => "0x1", "blockNumber" => last_exit_height}} = Ethereum.transact_sync(last_tx_hash)
:ok = WatcherSync.watcher_synchronize(root_chain_height: last_exit_height)
log_info(session, "Many Standard Exits Test done.")
end
def exit_utxo(session, utxo, exiter) do
run_scenario(
session,
LoadTest.Scenario.StartStandardExit,
%{
exiter: exiter,
utxo: utxo
}
)
end
end
| 31.604938 | 105 | 0.714844 |
034e744e978b992568aa1b4b1b41f8d43b383d81 | 1,099 | ex | Elixir | test/support/channel_case.ex | wbotelhos/crud-in-5-minutes-with-phoenix-and-elixir | f3218507d5c2ea7c23170d4316b41979beaa9aa6 | [
"MIT"
] | 2 | 2021-05-28T11:32:22.000Z | 2021-05-28T19:39:03.000Z | test/support/channel_case.ex | wbotelhos/crud-in-5-minutes-with-phoenix-and-elixir | f3218507d5c2ea7c23170d4316b41979beaa9aa6 | [
"MIT"
] | null | null | null | test/support/channel_case.ex | wbotelhos/crud-in-5-minutes-with-phoenix-and-elixir | f3218507d5c2ea7c23170d4316b41979beaa9aa6 | [
"MIT"
] | null | null | null | defmodule BibleWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use BibleWeb.ChannelCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
import Phoenix.ChannelTest
import BibleWeb.ChannelCase
# The default endpoint for testing
@endpoint BibleWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Bible.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Bible.Repo, {:shared, self()})
end
:ok
end
end
| 26.804878 | 67 | 0.724295 |
034e8542d7c65ac6443d03ece28fa3870a70a57d | 944 | ex | Elixir | examples/picam_http/lib/picam_http/streamer.ex | JacquiManzi/picam | d03d7a1b20da4a53578c6d218635b832e1543af9 | [
"BSD-3-Clause"
] | 56 | 2019-03-31T07:03:16.000Z | 2022-03-17T16:46:22.000Z | examples/picam_http/lib/picam_http/streamer.ex | JacquiManzi/picam | d03d7a1b20da4a53578c6d218635b832e1543af9 | [
"BSD-3-Clause"
] | 12 | 2019-04-09T10:11:25.000Z | 2022-03-06T13:30:27.000Z | examples/picam_http/lib/picam_http/streamer.ex | JacquiManzi/picam | d03d7a1b20da4a53578c6d218635b832e1543af9 | [
"BSD-3-Clause"
] | 16 | 2019-04-08T05:36:14.000Z | 2022-03-05T07:58:41.000Z | defmodule PicamHTTP.Streamer do
@moduledoc """
Plug for streaming an image
"""
import Plug.Conn
@behaviour Plug
@boundary "w58EW1cEpjzydSCq"
def init(opts), do: opts
def call(conn, _opts) do
conn
|> put_resp_header("Age", "0")
|> put_resp_header("Cache-Control", "no-cache, private")
|> put_resp_header("Pragma", "no-cache")
|> put_resp_header("Content-Type", "multipart/x-mixed-replace; boundary=#{@boundary}")
|> send_chunked(200)
|> send_pictures
end
defp send_pictures(conn) do
send_picture(conn)
send_pictures(conn)
end
defp send_picture(conn) do
jpg = Picam.next_frame
size = byte_size(jpg)
header = "------#{@boundary}\r\nContent-Type: image/jpeg\r\nContent-length: #{size}\r\n\r\n"
footer = "\r\n"
with {:ok, conn} <- chunk(conn, header),
{:ok, conn} <- chunk(conn, jpg),
{:ok, conn} <- chunk(conn, footer),
do: conn
end
end
| 24.842105 | 96 | 0.626059 |
034e8b5b44a601e3c78e44a2566558e070629e85 | 437 | ex | Elixir | lib/milbase/content/comment.ex | suryakun/milbase-skeleton | 1483142bd9ef70a9cf07504c8f03314f2cb7b7d0 | [
"Apache-2.0"
] | 1 | 2020-07-14T03:27:30.000Z | 2020-07-14T03:27:30.000Z | lib/milbase/content/comment.ex | suryakun/milbase-skeleton | 1483142bd9ef70a9cf07504c8f03314f2cb7b7d0 | [
"Apache-2.0"
] | null | null | null | lib/milbase/content/comment.ex | suryakun/milbase-skeleton | 1483142bd9ef70a9cf07504c8f03314f2cb7b7d0 | [
"Apache-2.0"
] | null | null | null | defmodule Milbase.Content.Comment do
use Ecto.Schema
alias Milbase.Account.User
alias Milbase.Content.Post
import Ecto.Changeset
schema "comments" do
field :text, :string
belongs_to :user, User
belongs_to :post, Post
timestamps()
end
@doc false
def changeset(comment, attrs) do
comment
|> cast(attrs, [:text, :post_id, :user_id])
|> validate_required([:text, :post_id, :user_id])
end
end
| 19.863636 | 53 | 0.684211 |
034e8be8515ff9e03112405b1bd4db215b377a5c | 3,332 | ex | Elixir | clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1p1beta1__timestamped_object.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1p1beta1__timestamped_object.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1p1beta1__timestamped_object.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p1beta1_TimestampedObject do
@moduledoc """
For tracking related features. An object at time_offset with attributes, and located with normalized_bounding_box.
## Attributes
* `attributes` (*type:* `list(GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p1beta1_DetectedAttribute.t)`, *default:* `nil`) - Optional. The attributes of the object in the bounding box.
* `landmarks` (*type:* `list(GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p1beta1_DetectedLandmark.t)`, *default:* `nil`) - Optional. The detected landmarks.
* `normalizedBoundingBox` (*type:* `GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p1beta1_NormalizedBoundingBox.t`, *default:* `nil`) - Normalized Bounding box in a frame, where the object is located.
* `timeOffset` (*type:* `String.t`, *default:* `nil`) - Time-offset, relative to the beginning of the video, corresponding to the video frame for this object.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:attributes =>
list(
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p1beta1_DetectedAttribute.t()
),
:landmarks =>
list(
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p1beta1_DetectedLandmark.t()
),
:normalizedBoundingBox =>
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p1beta1_NormalizedBoundingBox.t(),
:timeOffset => String.t()
}
field(:attributes,
as:
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p1beta1_DetectedAttribute,
type: :list
)
field(:landmarks,
as:
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p1beta1_DetectedLandmark,
type: :list
)
field(:normalizedBoundingBox,
as:
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p1beta1_NormalizedBoundingBox
)
field(:timeOffset)
end
defimpl Poison.Decoder,
for:
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p1beta1_TimestampedObject do
def decode(value, options) do
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p1beta1_TimestampedObject.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for:
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p1beta1_TimestampedObject do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.666667 | 225 | 0.751501 |
034e8e16d1447d671b56053a4010fa6e2a23828f | 6,417 | exs | Elixir | test/ueberauth_test.exs | jfornoff/ueberauth | 705cb797b0f4a9e50685541fa1e38d27d679f332 | [
"MIT"
] | null | null | null | test/ueberauth_test.exs | jfornoff/ueberauth | 705cb797b0f4a9e50685541fa1e38d27d679f332 | [
"MIT"
] | null | null | null | test/ueberauth_test.exs | jfornoff/ueberauth | 705cb797b0f4a9e50685541fa1e38d27d679f332 | [
"MIT"
] | 1 | 2015-11-11T00:57:24.000Z | 2015-11-11T00:57:24.000Z | defmodule UeberauthTest do
use ExUnit.Case, async: true
use Plug.Test
doctest Ueberauth
alias Support.SpecRouter
@opts Support.SpecRouter.init([])
test "simple request phase" do
conn = conn(:get, "/auth/simple")
resp = SpecRouter.call(conn, @opts)
assert resp.resp_body == "simple_request_phase"
end
test "simple callback phase" do
conn =
:get
|> conn("/auth/simple/callback")
|> SpecRouter.call(@opts)
auth = conn.assigns.ueberauth_auth
assert auth.uid == "Elixir.Support.SimpleCallback-uid"
assert auth.provider == :simple
assert auth.strategy == Support.SimpleCallback
assert_standard_info(auth)
assert_standard_credentials(auth)
extra = auth.extra
assert extra.raw_info.request_path == "/auth/simple"
assert extra.raw_info.callback_path == "/auth/simple/callback"
assert extra.raw_info.request_url == "http://www.example.com/auth/simple"
assert extra.raw_info.callback_url == "http://www.example.com/auth/simple/callback"
end
test "simple request and callback phase for same url but different method" do
conn = conn(:get, "/auth/post_callback_and_same_request_path")
resp = SpecRouter.call(conn, @opts)
assert resp.resp_body == "ok"
conn =
:post
|> conn("/auth/post_callback_and_same_request_path")
|> SpecRouter.call(@opts)
auth = conn.assigns.ueberauth_auth
assert auth.provider == :post_callback_and_same_request_path
end
test "redirecting a request phase without trailing slash" do
conn = conn(:get, "/auth/redirector") |> SpecRouter.call(@opts)
assert get_resp_header(conn, "location") == ["https://redirectme.example.com/foo"]
end
test "redirecting a request phase with trailing slash" do
conn = conn(:get, "/auth/redirector/") |> SpecRouter.call(@opts)
assert get_resp_header(conn, "location") == ["https://redirectme.example.com/foo"]
end
test "setting request phase path" do
conn = conn(:get, "/login") |> SpecRouter.call(@opts)
assert get_resp_header(conn, "location") == ["https://redirectme.example.com/foo"]
conn = conn(:get, "/auth/with_request_path/callback") |> SpecRouter.call(@opts)
auth = conn.assigns.ueberauth_auth
assert auth.provider == :with_request_path
assert auth.strategy == Support.Redirector
assert auth.extra.raw_info.request_path == "/login"
assert auth.extra.raw_info.callback_path == "/auth/with_request_path/callback"
end
test "setting callback phase path" do
conn = conn(:get, "/login_callback") |> SpecRouter.call(@opts)
auth = conn.assigns.ueberauth_auth
assert auth.provider == :with_callback_path
assert auth.strategy == Support.SimpleCallback
assert auth.extra.raw_info.request_path == "/auth/with_callback_path"
assert auth.extra.raw_info.callback_path == "/login_callback"
end
test "using default options" do
conn = conn(:get, "/auth/using_default_options/callback") |> SpecRouter.call(@opts)
auth = conn.assigns.ueberauth_auth
assert auth.uid == "default uid"
end
test "using custom options" do
conn = conn(:get, "/auth/using_custom_options/callback") |> SpecRouter.call(@opts)
auth = conn.assigns.ueberauth_auth
assert auth.uid == "custom uid"
end
test "returning errors" do
conn = conn(:get, "/auth/with_errors/callback") |> SpecRouter.call(@opts)
assert conn.assigns[:ueberauth_auth] == nil
assert conn.assigns[:ueberauth_failure] != nil
failure = conn.assigns.ueberauth_failure
assert failure.provider == :with_errors
assert failure.strategy == Support.WithErrors
assert length(failure.errors) == 2
[first | second] = failure.errors
second = hd(second)
assert first.message_key == "one"
assert first.message == "error one"
assert second.message_key == "two"
assert second.message == "error two"
end
test "setting the callback http method" do
conn = conn(:get, "/auth/post_callback/callback") |> SpecRouter.call(@opts)
assert conn.status == 404
assert conn.assigns[:ueberauth_auth] == nil
assert conn.assigns[:ueberauth_failure] == nil
conn = conn(:post, "/auth/post_callback/callback") |> SpecRouter.call(@opts)
assert conn.status == 200
assert conn.assigns[:ueberauth_failure] == nil
assert conn.assigns[:ueberauth_auth] != nil
auth = conn.assigns[:ueberauth_auth]
assert auth.provider == :post_callback
assert auth.strategy == Support.SimpleCallback
end
test "callback_url port" do
conn = %{conn(:get, "/") | scheme: :https, port: 80}
conn = put_private(conn, :ueberauth_request_options, callback_path: "/auth/provider/callback")
conn = %{conn | params: %{}}
assert Ueberauth.Strategy.Helpers.callback_url(conn) ==
"https://www.example.com/auth/provider/callback"
end
test "callback_url forwarded protocol" do
conn = %{
(conn(:get, "/")
|> put_req_header("x-forwarded-proto", "https"))
| scheme: :http,
port: 80
}
conn = put_private(conn, :ueberauth_request_options, callback_path: "/auth/provider/callback")
assert Ueberauth.Strategy.Helpers.callback_url(conn) ==
"https://www.example.com/auth/provider/callback"
end
test "callback_url has extra params" do
conn = conn(:get, "/")
conn = put_private(conn, :ueberauth_request_options, callback_params: ["type"])
conn = %{conn | params: %{"type" => "user", "param_2" => "param_2"}}
assert Ueberauth.Strategy.Helpers.callback_url(conn) == "http://www.example.com?type=user"
end
defp assert_standard_info(auth) do
info = auth.info
assert info.name == "Some name"
assert info.first_name == "First name"
assert info.last_name == "Last name"
assert info.nickname == "Nickname"
assert info.email == "[email protected]"
assert info.location == "Some location"
assert info.description == "Some description"
assert info.phone == "555-555-5555"
assert info.urls == %{"Blog" => "http://foo.com", "Thing" => "http://thing.com"}
end
defp assert_standard_credentials(auth) do
creds = auth.credentials
assert creds.token == "Some token"
assert creds.refresh_token == "Some refresh token"
assert creds.secret == "Some secret"
assert creds.expires == true
assert creds.expires_at == 1111
assert creds.other == %{password: "sekrit"}
end
end
| 33.596859 | 98 | 0.685679 |
034ef2aea4d2d1fb8a88e1de3896a9768640772d | 3,252 | ex | Elixir | clients/apigee/lib/google_api/apigee/v1/model/google_type_expr.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/apigee/lib/google_api/apigee/v1/model/google_type_expr.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/apigee/lib/google_api/apigee/v1/model/google_type_expr.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Apigee.V1.Model.GoogleTypeExpr do
@moduledoc """
Represents a textual expression in the Common Expression Language (CEL) syntax. CEL is a C-like expression language. The syntax and semantics of CEL are documented at https://github.com/google/cel-spec. Example (Comparison): title: "Summary size limit" description: "Determines if a summary is less than 100 chars" expression: "document.summary.size() < 100" Example (Equality): title: "Requestor is owner" description: "Determines if requestor is the document owner" expression: "document.owner == request.auth.claims.email" Example (Logic): title: "Public documents" description: "Determine whether the document should be publicly visible" expression: "document.type != 'private' && document.type != 'internal'" Example (Data Manipulation): title: "Notification string" description: "Create a notification string with a timestamp." expression: "'New message received at ' + string(document.create_time)" The exact variables and functions that may be referenced within an expression are determined by the service that evaluates it. See the service documentation for additional information.
## Attributes
* `description` (*type:* `String.t`, *default:* `nil`) - Optional. Description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
* `expression` (*type:* `String.t`, *default:* `nil`) - Textual representation of an expression in Common Expression Language syntax.
* `location` (*type:* `String.t`, *default:* `nil`) - Optional. String indicating the location of the expression for error reporting, e.g. a file name and a position in the file.
* `title` (*type:* `String.t`, *default:* `nil`) - Optional. Title for the expression, i.e. a short string describing its purpose. This can be used e.g. in UIs which allow to enter the expression.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:description => String.t() | nil,
:expression => String.t() | nil,
:location => String.t() | nil,
:title => String.t() | nil
}
field(:description)
field(:expression)
field(:location)
field(:title)
end
defimpl Poison.Decoder, for: GoogleApi.Apigee.V1.Model.GoogleTypeExpr do
def decode(value, options) do
GoogleApi.Apigee.V1.Model.GoogleTypeExpr.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Apigee.V1.Model.GoogleTypeExpr do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 58.071429 | 1,092 | 0.73524 |
034f063d0da4678c77b5b9f72aeb6417d2f15d9d | 346 | ex | Elixir | lib/gscraper/guardian/error_handler.ex | longnd/elixir-gscraper | 894570afd89e54b80ca591a56a182da55ac6ee61 | [
"MIT"
] | null | null | null | lib/gscraper/guardian/error_handler.ex | longnd/elixir-gscraper | 894570afd89e54b80ca591a56a182da55ac6ee61 | [
"MIT"
] | 25 | 2021-03-23T07:27:21.000Z | 2021-10-31T15:09:52.000Z | lib/gscraper/guardian/error_handler.ex | longnd/elixir-gscraper | 894570afd89e54b80ca591a56a182da55ac6ee61 | [
"MIT"
] | null | null | null | defmodule Gscraper.Guardian.ErrorHandler do
@behaviour Guardian.Plug.ErrorHandler
use GscraperWeb, :controller
@impl Guardian.Plug.ErrorHandler
def auth_error(conn, {_type, _reason}, _opts) do
conn
|> put_flash(:error, dgettext("auth", "Authentication required."))
|> redirect(to: Routes.session_path(conn, :new))
end
end
| 26.615385 | 70 | 0.731214 |
034f0f19c3d66afb49d5253f604446d6456d9c8e | 480 | ex | Elixir | lib/flix/protocol/events/bluetooth_controller_state_change.ex | efcasado/flix | 945fe84e6dba31b7f47d07279a97559e1094da46 | [
"Unlicense",
"MIT"
] | 1 | 2021-07-24T09:44:54.000Z | 2021-07-24T09:44:54.000Z | lib/flix/protocol/events/bluetooth_controller_state_change.ex | efcasado/flix | 945fe84e6dba31b7f47d07279a97559e1094da46 | [
"Unlicense",
"MIT"
] | 1 | 2021-07-24T07:13:40.000Z | 2021-08-02T13:44:44.000Z | lib/flix/protocol/events/bluetooth_controller_state_change.ex | efcasado/flix | 945fe84e6dba31b7f47d07279a97559e1094da46 | [
"Unlicense",
"MIT"
] | null | null | null | defmodule Flix.Protocol.Events.BluetoothControllerStateChange do
defstruct state: Flix.Protocol.Enums.BluetoothControllerState.default()
# @type t :: %__MODULE__{
# x: String.t(),
# y: boolean,
# z: integer
# }
def decode(
<<
state::unsigned-little-integer-8
>> = _binary
) do
%__MODULE__{
state: Flix.Protocol.Enums.BluetoothControllerState.from(state)
}
end
def encode(_data) do
# TODO
nil
end
end
| 19.2 | 73 | 0.629167 |
034f241075a747e44c135ea2d64bc7f79f02fa61 | 989 | exs | Elixir | mix.exs | akaKuruma/power_dnsex | a7c9a6bb86dfac8cb196e6b623ed33abfee42762 | [
"MIT"
] | 6 | 2019-05-28T16:42:19.000Z | 2021-11-06T17:44:38.000Z | mix.exs | akaKuruma/power_dnsex | a7c9a6bb86dfac8cb196e6b623ed33abfee42762 | [
"MIT"
] | 5 | 2020-04-24T18:42:13.000Z | 2021-10-01T13:34:16.000Z | mix.exs | akaKuruma/power_dnsex | a7c9a6bb86dfac8cb196e6b623ed33abfee42762 | [
"MIT"
] | 6 | 2019-11-17T21:30:05.000Z | 2021-10-01T13:11:45.000Z | defmodule PowerDNSex.Mixfile do
use Mix.Project
def project do
[
app: :powerdnsex,
version: "0.5.0",
elixir: "~> 1.6",
description: description(),
package: package(),
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
def application do
[applications: [:logger, :poison, :httpoison, :poolboy], mod: {PowerDNSex, []}]
end
defp deps do
[
{:credo, "~> 1.5", only: [:dev, :test], runtime: false},
{:poolboy, "~> 1.5"},
{:httpoison, "~> 1.5.0"},
{:poison, "~> 3.0 or ~> 4.0.1"},
{:exvcr, "~> 0.10.3", only: :test},
{:ex_doc, ">= 0.0.0", only: :dev}
]
end
defp description do
"""
A Client to integrate with PowerDNS API version 4
"""
end
defp package do
[
maintainers: ["Lindolfo Rodrigues"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/locaweb/power_dnsex"}
]
end
end
| 21.5 | 83 | 0.529828 |
034f3a53fa61a568e0473d2bfa6f4fef9a172c24 | 403 | exs | Elixir | mix.exs | marcelog/jsonex | 82e6c416eed5e791073427bf3079d7ab7b85a1e1 | [
"Apache-2.0"
] | 2 | 2015-11-05T08:03:10.000Z | 2015-12-03T04:45:25.000Z | mix.exs | marcelog/jsonex | 82e6c416eed5e791073427bf3079d7ab7b85a1e1 | [
"Apache-2.0"
] | null | null | null | mix.exs | marcelog/jsonex | 82e6c416eed5e791073427bf3079d7ab7b85a1e1 | [
"Apache-2.0"
] | null | null | null | defmodule Jsonex.Mixfile do
use Mix.Project
def project do
[ app: :jsonex,
version: "2.0.1",
deps: deps ]
end
# Configuration for the OTP application
def application do
[]
end
# Returns the list of dependencies in the format:
# { :foobar, "0.1", git: "https://github.com/elixir-lang/foobar.git" }
defp deps do
[{:jsx, github: "talentdeficit/jsx"}]
end
end
| 19.190476 | 72 | 0.632754 |
034f569c63c1ebe09bce65273de3fd6d88ff4c29 | 1,433 | exs | Elixir | test/lib/absinthe/type/deprecation_test.exs | bruce/absinthe | 19b63d3aaa9fb75aad01ffd5e91d89e0b30d7f91 | [
"MIT"
] | 3 | 2017-06-22T16:33:58.000Z | 2021-07-07T15:21:09.000Z | test/lib/absinthe/type/deprecation_test.exs | bruce/absinthe | 19b63d3aaa9fb75aad01ffd5e91d89e0b30d7f91 | [
"MIT"
] | null | null | null | test/lib/absinthe/type/deprecation_test.exs | bruce/absinthe | 19b63d3aaa9fb75aad01ffd5e91d89e0b30d7f91 | [
"MIT"
] | null | null | null | defmodule Absinthe.Type.DeprecationTest do
use Absinthe.Case, async: true
alias Absinthe.Type
defmodule TestSchema do
use Absinthe.Schema
query do
#Query type must exist
end
input_object :profile do
description "A profile"
field :name, :string
field :profile_picture,
type: :string,
args: [
width: [type: :integer],
height: [type: :integer],
size: [type: :string, deprecate: "Not explicit enough"],
source: [type: :string, deprecate: true]
]
field :email_address, :string do
deprecate "privacy"
end
field :address, :string, deprecate: true
end
end
context "fields" do
it "can be deprecated" do
obj = TestSchema.__absinthe_type__(:profile)
assert Type.deprecated?(obj.fields.email_address)
assert "privacy" == obj.fields.email_address.deprecation.reason
assert Type.deprecated?(obj.fields.address)
assert nil == obj.fields.address.deprecation.reason
end
end
context "arguments" do
it "can be deprecated" do
field = TestSchema.__absinthe_type__(:profile).fields.profile_picture
assert Type.deprecated?(field.args.size)
assert "Not explicit enough" == field.args.size.deprecation.reason
assert Type.deprecated?(field.args.source)
assert nil == field.args.source.deprecation.reason
end
end
end
| 23.112903 | 75 | 0.652477 |
034f6dd1297ada0028bbb286d58f1835fcafc07a | 1,958 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/reference.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/reference.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/reference.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Compute.V1.Model.Reference do
@moduledoc """
Represents a reference to a resource.
## Attributes
- kind (String.t): [Output Only] Type of the resource. Always compute#reference for references. Defaults to: `null`.
- referenceType (String.t): A description of the reference type with no implied semantics. Possible values include: - MEMBER_OF Defaults to: `null`.
- referrer (String.t): URL of the resource which refers to the target. Defaults to: `null`.
- target (String.t): URL of the resource to which this reference points. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:kind => any(),
:referenceType => any(),
:referrer => any(),
:target => any()
}
field(:kind)
field(:referenceType)
field(:referrer)
field(:target)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.Reference do
def decode(value, options) do
GoogleApi.Compute.V1.Model.Reference.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.Reference do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.350877 | 152 | 0.719612 |
034f6fad2c0dee21313dd361e8b928da97e6f804 | 600 | ex | Elixir | web/router.ex | amohamedali/bbc_schedulor_phoenix | 0eb828ca7da4cfecd57e8ad8085acbd625a3c2fa | [
"MIT"
] | null | null | null | web/router.ex | amohamedali/bbc_schedulor_phoenix | 0eb828ca7da4cfecd57e8ad8085acbd625a3c2fa | [
"MIT"
] | null | null | null | web/router.ex | amohamedali/bbc_schedulor_phoenix | 0eb828ca7da4cfecd57e8ad8085acbd625a3c2fa | [
"MIT"
] | null | null | null | defmodule BbcSchedulorPhoenix.Router do
use BbcSchedulorPhoenix.Web, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", BbcSchedulorPhoenix do
pipe_through :browser # Use the default browser stack
get "/", PageController, :index
resources "/users", UserController
end
# Other scopes may use custom stacks.
# scope "/api", BbcSchedulorPhoenix do
# pipe_through :api
# end
end
| 21.428571 | 57 | 0.701667 |
034f78dc1ea3c24553771bf1d316fe36df678ec2 | 2,461 | ex | Elixir | lib/mdns_lite/inet_monitor.ex | jjcarstens/mdns_lite | 4b4046b913429a8385dc157876da083141540ede | [
"Apache-2.0"
] | null | null | null | lib/mdns_lite/inet_monitor.ex | jjcarstens/mdns_lite | 4b4046b913429a8385dc157876da083141540ede | [
"Apache-2.0"
] | null | null | null | lib/mdns_lite/inet_monitor.ex | jjcarstens/mdns_lite | 4b4046b913429a8385dc157876da083141540ede | [
"Apache-2.0"
] | null | null | null | defmodule MdnsLite.InetMonitor do
use GenServer
require Logger
alias MdnsLite.{Responder, ResponderSupervisor}
@scan_interval 10000
@moduledoc false
# Watch :inet.getifaddrs/0 for IP address changes and update the active responders.
defmodule State do
@moduledoc false
defstruct [:excluded_ifnames, :ipv4_only, :ip_list]
end
@doc """
Start watching for changes on the specified network interfaces.
Parameters
* `:excluded_ifnames` - the list of interface names not to watch
* `:ipv4_only` - limit notifications to IPv4 addresses
"""
@spec start_link(excluded_ifnames: [String.t()], ipv4_only: boolean()) :: GenServer.on_start()
def start_link(init_args) do
GenServer.start_link(__MODULE__, init_args, name: __MODULE__)
end
@impl true
def init(args) do
excluded_ifnames = Keyword.get(args, :excluded_ifnames, [])
excluded_ifnames_cl = Enum.map(excluded_ifnames, &to_charlist/1)
ipv4_only = Keyword.get(args, :ipv4_only, true)
state = %State{excluded_ifnames: excluded_ifnames_cl, ip_list: [], ipv4_only: ipv4_only}
{:ok, state, 1}
end
@impl true
def handle_info(:timeout, state) do
new_state = update(state)
{:noreply, new_state, @scan_interval}
end
defp update(state) do
new_ip_list =
get_all_ip_addrs()
|> filter_excluded_ifnames(state.excluded_ifnames)
|> filter_by_ipv4(state.ipv4_only)
removed_ips = state.ip_list -- new_ip_list
added_ips = new_ip_list -- state.ip_list
Enum.each(removed_ips, fn {_ifname, addr} -> Responder.stop_server(addr) end)
Enum.each(added_ips, fn {_ifname, addr} -> ResponderSupervisor.start_child(addr) end)
%State{state | ip_list: new_ip_list}
end
defp filter_excluded_ifnames(ip_list, ifnames) do
Enum.filter(ip_list, fn {ifname, _addr} -> ifname not in ifnames end)
end
defp filter_by_ipv4(ip_list, false) do
ip_list
end
defp filter_by_ipv4(ip_list, true) do
Enum.filter(ip_list, fn {_ifname, addr} -> MdnsLite.Utilities.ip_family(addr) == :inet end)
end
defp get_all_ip_addrs() do
case :inet.getifaddrs() do
{:ok, ifaddrs} ->
ifaddrs_to_ip_list(ifaddrs)
_error ->
[]
end
end
defp ifaddrs_to_ip_list(ifaddrs) do
Enum.flat_map(ifaddrs, &ifaddr_to_ip_list/1)
end
defp ifaddr_to_ip_list({ifname, info}) do
for addr <- Keyword.get_values(info, :addr) do
{ifname, addr}
end
end
end
| 25.371134 | 96 | 0.701341 |
034f7b7bb58269e6bb9a2cae4f6270bd3f2a7ea8 | 2,153 | exs | Elixir | test/scenic/primitive/line_test.exs | mikeover/scenic | 4b61c4996ed2d06b8cdf94f88c8a0522160e10b5 | [
"Apache-2.0"
] | null | null | null | test/scenic/primitive/line_test.exs | mikeover/scenic | 4b61c4996ed2d06b8cdf94f88c8a0522160e10b5 | [
"Apache-2.0"
] | null | null | null | test/scenic/primitive/line_test.exs | mikeover/scenic | 4b61c4996ed2d06b8cdf94f88c8a0522160e10b5 | [
"Apache-2.0"
] | null | null | null | #
# Created by Boyd Multerer on 5/8/17. Re-written on 11/01/17
# Copyright © 2017 Kry10 Industries. All rights reserved.
#
defmodule Scenic.Primitive.LineTest do
use ExUnit.Case, async: true
doctest Scenic
alias Scenic.Primitive
alias Scenic.Primitive.Line
@data {{10, 12}, {40, 80}}
# ============================================================================
# build / add
test "build works" do
p = Line.build(@data)
assert p.module == Line
assert Primitive.get(p) == @data
end
# ============================================================================
# verify
test "verify passes valid data" do
assert Line.verify(@data) == {:ok, @data}
end
test "verify fails invalid data" do
assert Line.verify({{10, 12}, 40, 80}) == :invalid_data
assert Line.verify({10, 12, 40, 80}) == :invalid_data
assert Line.verify({10, 40, 80}) == :invalid_data
assert Line.verify({{10, 12}, {40, :banana}}) == :invalid_data
assert Line.verify({{10, :banana}, {40, 80}}) == :invalid_data
assert Line.verify(:banana) == :invalid_data
end
# ============================================================================
# styles
test "valid_styles works" do
assert Line.valid_styles() == [:hidden, :stroke, :cap]
end
# ============================================================================
# transform helpers
test "default_pin returns the center of the line" do
assert Line.default_pin(@data) == {25, 46}
end
test "centroid returns the center of the line" do
assert Line.centroid(@data) == {25, 46}
end
# test "expand makes the line longer" do
# assert Line.expand({{100,100},{200,100}}, 10) == {{90,100},{220,100}}
# assert Line.expand({{100,100},{100,200}}, 10) == {{100,90},{100,210}}
# end
# ============================================================================
# point containment
test "contains_point? always returns false" do
assert Line.contains_point?(@data, {30, 52}) == false
assert Line.contains_point?(@data, {10, 12}) == false
assert Line.contains_point?(@data, {40, 80}) == false
end
end
| 30.323944 | 80 | 0.520669 |
034f92e731f48b462a00f13665b00f974a309f38 | 1,523 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/file_content_buffer.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/compute/lib/google_api/compute/v1/model/file_content_buffer.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/compute/lib/google_api/compute/v1/model/file_content_buffer.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Model.FileContentBuffer do
@moduledoc """
## Attributes
* `content` (*type:* `String.t`, *default:* `nil`) - The raw content in the secure keys file.
* `fileType` (*type:* `String.t`, *default:* `nil`) - The file type of source file.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:content => String.t() | nil,
:fileType => String.t() | nil
}
field(:content)
field(:fileType)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.FileContentBuffer do
def decode(value, options) do
GoogleApi.Compute.V1.Model.FileContentBuffer.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.FileContentBuffer do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 30.46 | 97 | 0.715693 |
034fa96b5c15c1950dc010e7309e2d2fa13d10a8 | 1,912 | exs | Elixir | mix.exs | theblitzapp/prometheus_telemetry_elixir | a2c81e84f832c622ac5eb98bca89526a52cca3f0 | [
"MIT"
] | 3 | 2022-03-20T00:21:06.000Z | 2022-03-31T13:40:20.000Z | mix.exs | theblitzapp/prometheus_telemetry_elixir | a2c81e84f832c622ac5eb98bca89526a52cca3f0 | [
"MIT"
] | null | null | null | mix.exs | theblitzapp/prometheus_telemetry_elixir | a2c81e84f832c622ac5eb98bca89526a52cca3f0 | [
"MIT"
] | null | null | null | defmodule PrometheusTelemetry.MixProject do
use Mix.Project
def project do
[
app: :prometheus_telemetry,
version: "0.2.0",
elixir: "~> 1.12",
description: "Prometheus metrics exporter using Telemetry.Metrics as a foundation",
start_permanent: Mix.env() == :prod,
elixirc_paths: elixirc_paths(Mix.env()),
deps: deps(),
docs: docs(),
package: package()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:telemetry_metrics_prometheus_core, "~> 1.0"},
{:telemetry_metrics, "~> 0.6"},
{:telemetry_poller, "~> 1.0"},
{:nimble_options, "~> 0.4"},
{:absinthe, "~> 1.7", optional: true},
{:plug, "~> 1.8"},
{:plug_cowboy, "~> 2.5"},
{:ex_doc, ">= 0.0.0", only: :dev},
{:faker, "~> 0.17", only: [:test, :dev]},
{:finch, "~> 0.11", only: :test}
]
end
defp package do
[
maintainers: ["Mika Kalathil"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/theblitzapp/prometheus_telemetry_elixir"},
files: ~w(mix.exs README.md CHANGELOG.md LICENSE lib config)
]
end
defp docs do
[
main: "PrometheusTelemetry",
source_url: "https://github.com/theblitzapp/prometheus_telemetry_elixir",
groups_for_modules: [
"General": [
PrometheusTelemetry,
PrometheusTelemetry.MetricsExporterPlug
],
"Metrics": [
PrometheusTelemetry.Metrics.Ecto,
PrometheusTelemetry.Metrics.GraphQL,
PrometheusTelemetry.Metrics.Phoenix,
PrometheusTelemetry.Metrics.VM
]
]
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
end
| 24.831169 | 89 | 0.585251 |
034ffc6e8c1646415478633b5f32e77b6b1d01b9 | 377 | exs | Elixir | examples/elixir/bloom_example/mix.exs | evanmcc/erbloom | a1768fa6a63e979017fe94ee86a86164e3c21d88 | [
"MIT"
] | 45 | 2018-02-18T21:54:08.000Z | 2022-02-23T03:18:49.000Z | examples/elixir/bloom_example/mix.exs | evanmcc/erbloom | a1768fa6a63e979017fe94ee86a86164e3c21d88 | [
"MIT"
] | 4 | 2020-02-14T16:11:52.000Z | 2020-09-27T07:58:29.000Z | examples/elixir/bloom_example/mix.exs | evanmcc/erbloom | a1768fa6a63e979017fe94ee86a86164e3c21d88 | [
"MIT"
] | 11 | 2018-02-19T15:17:05.000Z | 2021-07-19T08:10:16.000Z | defmodule BloomExample.MixProject do
use Mix.Project
def project do
[
app: :bloom_example,
version: "0.1.0",
elixir: "~> 1.10",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
def application do
[
extra_applications: [:logger]
]
end
defp deps do
[
{:erbloom, "~> 2.1.0-rc.2"}
]
end
end
| 14.5 | 42 | 0.533156 |
035024a1b513102f5581a9284ffd96ffbc108ae6 | 1,767 | ex | Elixir | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2_sentiment_analysis_request_config.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2_sentiment_analysis_request_config.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2_sentiment_analysis_request_config.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2SentimentAnalysisRequestConfig do
@moduledoc """
Configures the types of sentiment analysis to perform.
## Attributes
* `analyzeQueryTextSentiment` (*type:* `boolean()`, *default:* `nil`) - Instructs the service to perform sentiment analysis on
`query_text`. If not provided, sentiment analysis is not performed on
`query_text`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:analyzeQueryTextSentiment => boolean()
}
field(:analyzeQueryTextSentiment)
end
defimpl Poison.Decoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2SentimentAnalysisRequestConfig do
def decode(value, options) do
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2SentimentAnalysisRequestConfig.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2SentimentAnalysisRequestConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.722222 | 130 | 0.758913 |
0350794136373da1706713d44a531dcbbf40d651 | 437 | ex | Elixir | web/models/person.ex | geowa4/graphql-elixir-phoenix-preso | 9d0de23a09506e46a4a0bf71db26e4691f254849 | [
"MIT"
] | null | null | null | web/models/person.ex | geowa4/graphql-elixir-phoenix-preso | 9d0de23a09506e46a4a0bf71db26e4691f254849 | [
"MIT"
] | null | null | null | web/models/person.ex | geowa4/graphql-elixir-phoenix-preso | 9d0de23a09506e46a4a0bf71db26e4691f254849 | [
"MIT"
] | null | null | null | defmodule HelloPhoenix.Person do
@moduledoc """
"""
use HelloPhoenix.Web, :model
schema "people" do
field :given_name, :string
field :family_name, :string
timestamps()
end
@doc """
Builds a changeset based on the `struct` and `params`.
"""
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:given_name, :family_name])
|> validate_required([:given_name, :family_name])
end
end
| 19.863636 | 56 | 0.645309 |
0350a25fc722e5200271ae385ac1748b7ddfc6b7 | 2,493 | ex | Elixir | lib/ecto_ltree/postgrex/ltree.ex | RomanKotov/ecto_ltree | b2863fd31a3304ec07b9c43e09f5c985d8652deb | [
"MIT"
] | 24 | 2018-04-03T11:36:54.000Z | 2021-09-21T09:37:15.000Z | lib/ecto_ltree/postgrex/ltree.ex | RomanKotov/ecto_ltree | b2863fd31a3304ec07b9c43e09f5c985d8652deb | [
"MIT"
] | 8 | 2019-06-01T13:49:37.000Z | 2022-03-29T04:18:39.000Z | lib/ecto_ltree/postgrex/ltree.ex | RomanKotov/ecto_ltree | b2863fd31a3304ec07b9c43e09f5c985d8652deb | [
"MIT"
] | 10 | 2019-02-01T16:52:48.000Z | 2022-02-23T03:46:09.000Z | # Copyright 2013 Eric Meadows-Jönsson
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
defmodule EctoLtree.Postgrex.Ltree do
@moduledoc """
This module provides the necessary functions to encode and decode PostgreSQL’s `ltree` data type to and from Elixir values.
Implements the Postgrex.Extension behaviour.
"""
@behaviour Postgrex.Extension
# It can be memory efficient to copy the decoded binary because a
# reference counted binary that points to a larger binary will be passed
# to the decode/4 callback. Copying the binary can allow the larger
# binary to be garbage collected sooner if the copy is going to be kept
# for a longer period of time. See [`:binary.copy/1`](http://www.erlang.org/doc/man/binary.html#copy-1) for more
# information.
def init(opts) do
Keyword.get(opts, :decode_copy, :copy)
end
# Use this extension when `type` from %Postgrex.TypeInfo{} is "ltree"
def matching(_state), do: [type: "ltree"]
# Use the text format, "ltree" does not have a binary format.
def format(_state), do: :text
# Use quoted expression to encode a string that is the same as
# postgresql's ltree text format. The quoted expression should contain
# clauses that match those of a `case` or `fn`. Encoding matches on the
# value and returns encoded `iodata()`. The first 4 bytes in the
# `iodata()` must be the byte size of the rest of the encoded data, as a
# signed 32bit big endian integer.
def encode(_state) do
quote do
bin when is_binary(bin) ->
[<<byte_size(bin)::signed-size(32)>> | bin]
end
end
# Use quoted expression to decode the data to a string. Decoding matches
# on an encoded binary with the same signed 32bit big endian integer
# length header.
def decode(:reference) do
quote do
<<len::signed-size(32), bin::binary-size(len)>> ->
bin
end
end
def decode(:copy) do
quote do
<<len::signed-size(32), bin::binary-size(len)>> ->
:binary.copy(bin)
end
end
end
| 40.209677 | 307 | 0.715203 |
0350c37cef152f6f7383e20d011229c2cdcde486 | 2,906 | ex | Elixir | lib/ueberauth/strategy/spotify/oauth.ex | davejlong/ueberauth_spotify | 2e44e4136a8236ce85f4cde4599f2d6f414a5314 | [
"MIT"
] | 2 | 2017-12-02T18:55:13.000Z | 2019-05-16T09:27:51.000Z | lib/ueberauth/strategy/spotify/oauth.ex | davejlong/ueberauth_spotify | 2e44e4136a8236ce85f4cde4599f2d6f414a5314 | [
"MIT"
] | 2 | 2017-12-02T21:09:33.000Z | 2019-01-22T02:04:50.000Z | lib/ueberauth/strategy/spotify/oauth.ex | davejlong/ueberauth_spotify | 2e44e4136a8236ce85f4cde4599f2d6f414a5314 | [
"MIT"
] | 3 | 2017-12-06T15:13:10.000Z | 2019-01-21T02:14:40.000Z | defmodule Ueberauth.Strategy.Spotify.OAuth do
@moduledoc """
OAuth2 for Spotify.
Add `client_id` and `client_secret` to your configuration:
config :ueberauth, Ueberauth.Strategy.Spotify.OAuth,
client_id: System.get_env("SPOTIFY_CLIENT_ID"),
client_secret: System.get_env("SPOTIFY_CLIENT_SECRET")
"""
use OAuth2.Strategy
@account_url "https://accounts.spotify.com"
@defaults [
strategy: __MODULE__,
site: "https://api.spotify.com/",
authorize_url: "#{@account_url}/authorize",
token_url: "#{@account_url}/api/token"
]
@doc """
Construct a client for request to Spotify.
This will be setup automatically for youi n `Ueberauth.Strategy.Spotify`.
These options are only usefule for usage outside the normal callback phase of Ueberauth.
Examples:
iex> Ueberauth.Strategy.Spotify.OAuth.client().__struct__
OAuth2.Client
"""
def client(opts \\ []) do
@defaults
|> Keyword.merge(config())
|> Keyword.merge(opts)
|> OAuth2.Client.new
end
@doc """
Provides the authorize url for the request phase of Ueberauth. No need to call this usually.
Examples:
iex> Ueberauth.Strategy.Spotify.OAuth.authorize_url!() =~ ~r/^https:\\/\\/accounts.spotify.com\\/authorize/
true
"""
def authorize_url!(params \\ [], opts \\ []) do
opts
|> client
|> OAuth2.Client.authorize_url!(params)
end
@doc """
Gets the Access Token from Spotify
"""
def get_token!(params \\ [], opts \\ []) do
IO.inspect(params, label: "Token Params")
client = opts
|> client
|> OAuth2.Client.get_token!(params)
client.token
end
@doc """
Helper method to query Spotify API endpoints
"""
def get(token, url, headers \\ [], opts \\ []) do
[token: token]
|> client
|> OAuth2.Client.get(url, headers, opts)
end
@doc false
def authorize_url(client, params) do
OAuth2.Strategy.AuthCode.authorize_url(client, params)
end
def get_token(client, params, headers) do
client
|> put_param("client_secret", client.client_secret)
|> put_header("Accept", "application/json")
|> OAuth2.Strategy.AuthCode.get_token(params, headers)
end
def get_token_with_refresh(refresh_token, redirect_uri) do
client = client()
opts = [
redirect_uri: redirect_uri,
strategy: OAuth2.Strategy.Refresh
]
client = opts
|> client
|> put_param("grant_type", "refresh_token")
|> put_param("refresh_token", refresh_token)
|> put_header("Accept", "application/json")
|> put_header("Authorization", "Basic #{Base.encode64(client.client_id <> ":" <> client.client_secret)}")
|> OAuth2.Client.get_token!([])
end
defp config, do: Application.get_env(:ueberauth, Ueberauth.Strategy.Spotify.OAuth, [])
end
| 27.158879 | 114 | 0.642464 |
0350db4f66caaccc1da7bcc2749a052828cbe1e4 | 19,832 | ex | Elixir | lib/ex_hl7/codec.ex | workpathco/ex_hl7 | 20f2fadb158e903cf1752f69cd0ecdeae377c2c3 | [
"Apache-2.0"
] | 38 | 2015-06-21T17:44:44.000Z | 2021-10-03T08:46:08.000Z | lib/ex_hl7/codec.ex | workpathco/ex_hl7 | 20f2fadb158e903cf1752f69cd0ecdeae377c2c3 | [
"Apache-2.0"
] | 2 | 2019-08-27T17:27:37.000Z | 2021-02-05T14:27:28.000Z | lib/ex_hl7/codec.ex | workpathco/ex_hl7 | 20f2fadb158e903cf1752f69cd0ecdeae377c2c3 | [
"Apache-2.0"
] | 14 | 2016-02-04T15:11:55.000Z | 2021-11-13T20:28:19.000Z | defmodule HL7.Codec do
@moduledoc """
Functions that decode and encode HL7 fields, repetitions, components and subcomponents.
Each type of item has a intermediate representation, that will vary depending on whether the `trim`
option was used when decoding or encoding. If we set `trim` to `true`, some trailing optional
items and separators will be omitted from the decoded or encoded result, as we can see in the
following example:
iex> text = "504599^223344&&IIN&^~"
...> decode_field!(text, separators(), trim: true)
{"504599", {"223344", "", "IIN"}}
...> decode_field!(text, separators(), trim: false)
[{"504599", {"223344", "", "IIN", ""}, ""}, ""]
Both representations are correct, given that HL7 allows trailing items that are empty to be
omitted. This causes an ambiguity because the same item can be interpreted in several ways when
it is the first and only item present.
For example, in the following HL7 segment the item in the third field (`504599`) might be the
same in both cases (i.e. the first component of the second field):
1. `AUT||504599^^||||0000190447|^||`
2. `AUT||504599||||0000190447|^||`
But for this module it has two different representations:
1. First component of the second field
2. Second field
To resolve the ambiguity in the HL7 syntax, the code decoding and encoding HL7 segments using the
functions in this module must be aware of this issue and deal with it accordingly when performing
lookups or comparisons.
"""
alias Type
@separators {?|, ?^, ?&, ?~}
@null_value "\"\""
@doc """
Return the default separators used to encode HL7 messages in their compiled
format. These are:
* `|`: field separator
* `^`: component separator
* `&`: subcomponent separator
* `~`: repetition separator
To use custom separators in a message use `HL7.Codec.set_separators/1` and pass the returned
value as argument to the encoding functions.
"""
@spec separators() :: Type.separators()
def separators(), do: @separators
@spec set_separators(Keyword.t()) :: Type.separators()
def set_separators(args) do
field = Keyword.get(args, :field, ?|)
component = Keyword.get(args, :component, ?^)
subcomponent = Keyword.get(args, :subcomponent, ?&)
repetition = Keyword.get(args, :repetition, ?~)
{field, component, subcomponent, repetition}
end
@compile {:inline, separator: 2}
@doc "Return the separator corresponding to an item type."
@spec separator(Type.item_type(), Type.separators()) :: byte
def separator(item_type, separators \\ @separators)
def separator(:field, {char, _, _, _}), do: char
def separator(:component, {_, char, _, _}), do: char
def separator(:subcomponent, {_, _, char, _}), do: char
def separator(:repetition, {_, _, _, char}), do: char
@compile {:inline, match_separator: 2}
@spec match_separator(byte, Type.separators()) :: {:match, Type.item_type()} | :nomatch
def match_separator(char, separators \\ @separators)
def match_separator(char, {char, _, _, _}), do: {:match, :field}
def match_separator(char, {_, char, _, _}), do: {:match, :component}
def match_separator(char, {_, _, char, _}), do: {:match, :subcomponent}
def match_separator(char, {_, _, _, char}), do: {:match, :repetition}
def match_separator(_char, _separators), do: :nomatch
@doc """
Checks if a value is empty. A value is considered empty when it is `nil` or an empty string.
"""
defmacro empty?(value) do
quote do
unquote(value) === "" or unquote(value) === nil
end
end
@doc """
Decode a binary holding an HL7 field into its intermediate representation (IR).
## Examples
iex> decode_field!("PREPAGA^112233^IIN")
{"PREPAGA", "112233", "IIN"}
...> decode_field!("112233~IIN")
["112233", "IIN"]
...> decode_field!("\"\"")
nil
...> decode_field!("")
""
"""
@spec decode_field!(binary, Type.separators(), trim :: boolean) :: Type.field() | no_return
def decode_field!(field, separators \\ @separators, trim \\ true)
def decode_field!("", _separators, _trim), do: ""
def decode_field!(@null_value, _separators, _trim), do: nil
def decode_field!(value, separators, trim) when is_binary(value) do
rep_sep = separator(:repetition, separators)
case :binary.split(value, <<rep_sep>>, split_options(trim)) do
[field] ->
decode_components!(field, separators, trim)
repetitions ->
for repetition <- repetitions do
decode_components!(repetition, separators, trim)
end
end
end
@doc """
Decode a binary holding one or more HL7 components into its intermediate representation.
"""
@spec decode_components!(binary, Type.separators(), trim :: boolean) ::
Type.component() | no_return
def decode_components!(components, separators \\ @separators, trim \\ true)
def decode_components!("", _separators, _trim), do: ""
def decode_components!(@null_value, _separators, _trim), do: nil
def decode_components!(field, separators, trim) do
comp_sep = separator(:component, separators)
case :binary.split(field, <<comp_sep>>, split_options(trim)) do
[component] ->
case decode_subcomponents!(component, separators, trim) do
components when is_tuple(components) ->
{components}
components ->
components
end
components ->
for component <- components do
decode_subcomponents!(component, separators, trim)
end
|> case do
[] -> ""
components -> List.to_tuple(components)
end
end
end
@doc """
Decode a binary holding one or more HL7 subcomponents into its intermediate representation.
"""
@spec decode_subcomponents!(binary, Type.separators(), trim :: boolean) ::
Type.subcomponent() | no_return
def decode_subcomponents!(component, separators \\ @separators, trim \\ true)
def decode_subcomponents!("", _separators, _trim), do: ""
def decode_subcomponents!(@null_value, _separators, _trim), do: nil
def decode_subcomponents!(component, separators, trim) do
subcomp_sep = separator(:subcomponent, separators)
case :binary.split(component, <<subcomp_sep>>, split_options(trim)) do
[subcomponent] ->
subcomponent
subcomponents ->
subcomponents
|> Enum.map(&decode_value!(&1))
|> case do
[] -> ""
subcomponents -> List.to_tuple(subcomponents)
end
end
end
@spec decode_value!(Type.field(), Type.value_type()) :: Type.value() | nil | no_return
def decode_value!(value, type \\ :string)
def decode_value!(@null_value, _type), do: nil
def decode_value!(value, type)
when type === :string or
(value === "" and
(type === :integer or type === :float or type === :date or type === :datetime)) do
# Empty fields have to be passed to the composite field module to insert the corresponding
# struct in the corresponding field.
value
end
def decode_value!(value, :integer), do: :erlang.binary_to_integer(value)
def decode_value!(value, :float), do: binary_to_float!(value)
def decode_value!(value, :date), do: binary_to_date!(value)
def decode_value!(value, :datetime), do: binary_to_datetime!(value)
def decode_value!(value, type) do
raise ArgumentError, "cannot decode value #{inspect(value)} with type #{inspect(type)}"
end
defp binary_to_float!(value) do
value |> Float.parse() |> elem(0)
end
defp binary_to_date!(
<<y::binary-size(4), m::binary-size(2), d::binary-size(2), _rest::binary>> = value
) do
year = :erlang.binary_to_integer(y)
month = :erlang.binary_to_integer(m)
day = :erlang.binary_to_integer(d)
case Date.new(year, month, day) do
{:ok, date} -> date
{:error, _reason} -> raise ArgumentError, "invalid date: #{value}"
end
end
defp binary_to_date!(value) do
raise ArgumentError, "invalid date: #{value}"
end
defp binary_to_datetime!(value) do
case value do
<<y::binary-size(4), m::binary-size(2), d::binary-size(2), time::binary>> ->
year = :erlang.binary_to_integer(y)
month = :erlang.binary_to_integer(m)
day = :erlang.binary_to_integer(d)
{hour, min, sec} =
case time do
<<h::binary-size(2), mm::binary-size(2), s::binary-size(2)>> ->
{:erlang.binary_to_integer(h), :erlang.binary_to_integer(mm),
:erlang.binary_to_integer(s)}
<<h::binary-size(2), mm::binary-size(2)>> ->
{:erlang.binary_to_integer(h), :erlang.binary_to_integer(mm), 0}
_ ->
{0, 0, 0}
end
case NaiveDateTime.new(year, month, day, hour, min, sec) do
{:ok, datetime} -> datetime
{:error, _reason} -> raise ArgumentError, "invalid datetime: #{value}"
end
_ ->
raise ArgumentError, "invalid datetime: #{value}"
end
end
@spec encode_field!(Type.field(), Type.separators(), trim :: boolean) :: iodata | no_return
def encode_field!(field, separators \\ @separators, trim \\ true)
def encode_field!(field, _separators, _trim) when is_binary(field), do: field
def encode_field!(nil, _separators, _trim), do: @null_value
def encode_field!(repetitions, separators, trim) when is_list(repetitions),
do: encode_repetitions!(repetitions, separators, trim, [])
def encode_field!(components, separators, trim) when is_tuple(components),
do: encode_components!(components, separators, trim)
defp encode_repetitions!([repetition | tail], separators, trim, acc)
when not is_list(repetition) do
value = encode_field!(repetition, separators, trim)
acc =
case acc do
[] -> [value]
[_ | _] -> [value, separator(:repetition, separators) | acc]
end
encode_repetitions!(tail, separators, trim, acc)
end
defp encode_repetitions!([], separators, trim, acc) do
acc
|> maybe_trim_item(separator(:repetition, separators), trim)
|> Enum.reverse()
end
@spec encode_components!(Type.component(), Type.separators(), trim :: boolean) ::
iodata | no_return
def encode_components!(components, separators \\ @separators, trim \\ true) do
subencoder = &encode_subcomponents!(&1, separators, trim)
encode_subitems(components, subencoder, separator(:component, separators), trim)
end
@spec encode_subcomponents!(Type.subcomponent(), Type.separators(), trim :: boolean) ::
iodata | no_return
def encode_subcomponents!(subcomponents, separators \\ @separators, trim \\ true) do
encode_subitems(subcomponents, &encode_value!/1, separator(:subcomponent, separators), trim)
end
defp encode_subitems(item, _subencoder, _separator, _trim) when is_binary(item), do: item
defp encode_subitems(nil, _subencoder, _separator, _trim), do: @null_value
defp encode_subitems(items, subencoder, separator, trim) when is_tuple(items),
do:
_encode_subitems(
items,
subencoder,
separator,
trim,
non_empty_tuple_size(items, trim),
0,
[]
)
defp _encode_subitems(items, subencoder, separator, trim, size, index, acc) when index < size do
value = subencoder.(elem(items, index))
acc =
case acc do
[] -> [value]
[_ | _] -> [value, separator | acc]
end
_encode_subitems(items, subencoder, separator, trim, size, index + 1, acc)
end
defp _encode_subitems(_items, _subencoder, separator, trim, _size, _index, acc) do
acc
|> maybe_trim_item(separator, trim)
|> Enum.reverse()
end
@spec encode_value!(Type.value() | nil, Type.value_type() | nil) :: binary | no_return
def encode_value!(value, type \\ :string)
def encode_value!(nil, _type), do: @null_value
def encode_value!(value, type) when type === :string or value === "", do: value
def encode_value!(value, :integer) when is_integer(value), do: :erlang.integer_to_binary(value)
def encode_value!(value, :float) when is_float(value), do: Float.to_string(value)
def encode_value!(value, :date) when is_map(value), do: format_date!(value)
def encode_value!(value, :datetime) when is_map(value), do: format_datetime(value)
def encode_value!(value, type) do
raise ArgumentError, "cannot encode value #{inspect(value)} with type #{inspect(type)}"
end
def format_date!(%Date{year: year, month: month, day: day}) do
format_date(year, month, day)
end
def format_date!(%NaiveDateTime{year: year, month: month, day: day}) do
format_date(year, month, day)
end
def format_date!(date) do
raise ArgumentError, "invalid date: #{inspect(date)}"
end
defp format_date(year, month, day) do
yyyy = zpad(year, 4)
mm = zpad(month, 2)
dd = zpad(day, 2)
<<yyyy::binary, mm::binary, dd::binary>>
end
def format_datetime(%NaiveDateTime{
year: year,
month: month,
day: day,
hour: hour,
minute: min,
second: sec
}) do
format_datetime(year, month, day, hour, min, sec)
end
def format_datetime(%Date{year: year, month: month, day: day}) do
format_datetime(year, month, day, 0, 0, 0)
end
def format_datetime(datetime) do
raise ArgumentError, "invalid datetime #{inspect(datetime)}"
end
defp format_datetime(year, month, day, hour, min, sec) do
yyyy = zpad(year, 4)
m = zpad(month, 2)
dd = zpad(day, 2)
hh = zpad(hour, 2)
mm = zpad(min, 2)
if sec === 0 do
<<yyyy::binary, m::binary, dd::binary, hh::binary, mm::binary>>
else
ss = zpad(sec, 2)
<<yyyy::binary, m::binary, dd::binary, hh::binary, mm::binary, ss::binary>>
end
end
defp zpad(value, length) do
value
|> Integer.to_string()
|> String.pad_leading(length, "0")
end
@doc """
Escape a string that may contain separators using the HL7 escaping rules.
## Arguments
* `value`: a string to escape; it may or may not contain separator
characters.
* `separators`: a tuple containing the item separators to be used when
generating the message as returned by `HL7.Codec.set_separators/1`.
Defaults to `HL7.Codec.separators`.
* `escape_char`: character to be used as escape delimiter. Defaults to `?\\\\ `.
## Examples
iex> escape("ABCDEF")
"ABCDEF"
...> escape("ABC|DEF^GHI", separators: separators(), escape_char: ?\\\\)
"ABC\\\\F\\\\DEF\\\\S\\\\GHI"
"""
@spec escape(binary, Type.separators(), escape_char :: byte) :: binary
def escape(value, separators \\ @separators, escape_char \\ ?\\)
when is_binary(value) and is_tuple(separators) and is_integer(escape_char) do
escape_no_copy(value, separators, escape_char, byte_size(value), 0)
end
defp escape_no_copy(value, separators, escape_char, size, index) when index < size do
# As strings that need to be escaped are fairly rare, we try to avoid generating unnecessary
# garbage by not copying the characters in the string unless the string has to be escaped.
<<head::binary-size(index), char, rest::binary>> = value
case match_separator(char, separators) do
{:match, item_type} ->
acc = escape_acc(item_type, escape_char, head)
escape_copy(rest, separators, escape_char, acc)
:nomatch when char === escape_char ->
acc = escape_acc(:escape, escape_char, head)
escape_copy(rest, separators, escape_char, acc)
:nomatch ->
escape_no_copy(value, separators, escape_char, size, index + 1)
end
end
defp escape_no_copy(value, _separators, _escape_char, _size, _index) do
value
end
defp escape_copy(<<char, rest::binary>>, separators, escape_char, acc) do
acc =
case match_separator(char, separators) do
{:match, item_type} -> escape_acc(item_type, escape_char, acc)
:nomatch when char === escape_char -> escape_acc(:escape, escape_char, acc)
:nomatch -> <<acc::binary, char>>
end
escape_copy(rest, separators, escape_char, acc)
end
defp escape_copy(<<>>, _separators, _escape_char, acc) do
acc
end
defp escape_acc(item_type, escape_char, acc) do
char = escape_delimiter_type(item_type)
<<acc::binary, escape_char, char, escape_char>>
end
@compile {:inline, escape_delimiter_type: 1}
defp escape_delimiter_type(:field), do: ?F
defp escape_delimiter_type(:component), do: ?S
defp escape_delimiter_type(:subcomponent), do: ?T
defp escape_delimiter_type(:repetition), do: ?R
defp escape_delimiter_type(:escape), do: ?E
@doc """
Convert an escaped string into its original value.
## Arguments
* `value`: a string to unescape; it may or may not contain escaped characters.
* `escape_char`: character that was used as escape delimiter. Defaults to `?\\\\ `.
## Examples
iex> "ABCDEF" = HL7.unescape("ABCDEF")
iex> "ABC|DEF|GHI" = HL7.Codec.unescape("ABC\\\\F\\\\DEF\\\\F\\\\GHI", ?\\\\)
"""
@spec unescape(binary, Type.separators(), escape_char :: byte) :: binary
def unescape(value, separators \\ @separators, escape_char \\ ?\\)
when is_binary(value) and is_tuple(separators) and is_integer(escape_char) do
unescape_no_copy(value, separators, escape_char, byte_size(value), 0)
end
defp unescape_no_copy(value, separators, escape_char, size, index) when index < size do
# As strings that need to be unescaped are fairly rare, we try to avoid generating unnecessary
# garbage by not copying the characters in the string unless the string has to be unescaped.
case value do
<<head::binary-size(index), ^escape_char, char, ^escape_char, rest::binary>> ->
char = unescape_delimiter(char, separators, escape_char)
unescape_copy(rest, separators, escape_char, <<head::binary, char>>)
_ ->
unescape_no_copy(value, separators, escape_char, size, index + 1)
end
end
defp unescape_no_copy(value, _separators, _escape_char, _size, _index) do
value
end
defp unescape_copy(value, separators, escape_char, acc) do
case value do
<<^escape_char, char, ^escape_char, rest::binary>> ->
char = unescape_delimiter(char, separators, escape_char)
unescape_copy(rest, separators, escape_char, <<acc::binary, char>>)
<<char, rest::binary>> ->
unescape_copy(rest, separators, escape_char, <<acc::binary, char>>)
<<>> ->
acc
end
end
defp unescape_delimiter(?F, separators, _escape_char), do: separator(:field, separators)
defp unescape_delimiter(?S, separators, _escape_char), do: separator(:component, separators)
defp unescape_delimiter(?T, separators, _escape_char), do: separator(:subcomponent, separators)
defp unescape_delimiter(?R, separators, _escape_char), do: separator(:repetition, separators)
defp unescape_delimiter(?E, _separators, escape_char), do: escape_char
defp split_options(true), do: [:global, :trim]
defp split_options(false), do: [:global]
defp non_empty_tuple_size(tuple, false), do: tuple_size(tuple)
defp non_empty_tuple_size(tuple, _trim), do: _non_empty_tuple_size(tuple, tuple_size(tuple))
defp _non_empty_tuple_size(tuple, size) when size > 1 do
case :erlang.element(size, tuple) do
"" -> _non_empty_tuple_size(tuple, size - 1)
_ -> size
end
end
defp _non_empty_tuple_size(_tuple, size) do
size
end
defp maybe_trim_item(data, char, true), do: trim_item(data, char)
defp maybe_trim_item(data, _char, false), do: data
defp trim_item([value | tail], separator)
when value === separator or value === "" or value === [] do
trim_item(tail, separator)
end
defp trim_item(data, _separator), do: data
end
| 34.252159 | 101 | 0.662112 |
0350e6e8f67d4d796813a4c12208a603f04a14aa | 1,543 | ex | Elixir | clients/datastream/lib/google_api/datastream/v1/model/mysql_rdbms.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/datastream/lib/google_api/datastream/v1/model/mysql_rdbms.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/datastream/lib/google_api/datastream/v1/model/mysql_rdbms.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Datastream.V1.Model.MysqlRdbms do
@moduledoc """
MySQL database structure
## Attributes
* `mysqlDatabases` (*type:* `list(GoogleApi.Datastream.V1.Model.MysqlDatabase.t)`, *default:* `nil`) - Mysql databases on the server
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:mysqlDatabases => list(GoogleApi.Datastream.V1.Model.MysqlDatabase.t()) | nil
}
field(:mysqlDatabases, as: GoogleApi.Datastream.V1.Model.MysqlDatabase, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Datastream.V1.Model.MysqlRdbms do
def decode(value, options) do
GoogleApi.Datastream.V1.Model.MysqlRdbms.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Datastream.V1.Model.MysqlRdbms do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.829787 | 136 | 0.745301 |
0350f31ca165254d41b68d49d7a63803e88a0dd8 | 1,002 | exs | Elixir | test/scale/ggity_scale_alpha_continuous_test.exs | kianmeng/ggity | 75f0097464eae4086f8c70e4bea995d60571eba9 | [
"MIT"
] | 47 | 2020-06-21T15:23:54.000Z | 2022-03-13T01:24:19.000Z | test/scale/ggity_scale_alpha_continuous_test.exs | kianmeng/ggity | 75f0097464eae4086f8c70e4bea995d60571eba9 | [
"MIT"
] | 3 | 2020-11-28T11:00:59.000Z | 2020-11-30T18:20:37.000Z | test/scale/ggity_scale_alpha_continuous_test.exs | kianmeng/ggity | 75f0097464eae4086f8c70e4bea995d60571eba9 | [
"MIT"
] | 2 | 2020-11-28T10:40:10.000Z | 2021-05-28T06:44:47.000Z | defmodule GGityScaleAlphaContinuousTest do
use ExUnit.Case
alias GGity.Scale.Alpha
setup do
%{min_max: {0, 3}}
end
describe "draw/2" do
test "returns a correct scale given default options", %{min_max: min_max} do
scale =
Alpha.Continuous.new()
|> Alpha.Continuous.train(min_max)
assert_in_delta scale.transform.(0), 0.1, 0.0000001
assert_in_delta scale.transform.(1), 0.4, 0.0000001
assert_in_delta scale.transform.(2), 0.7, 0.0000001
assert_in_delta scale.transform.(3), 1, 0.0000001
end
test "returns a correct scale given custom min and max", %{min_max: min_max} do
scale =
Alpha.Continuous.new(range: {0.2, 0.8})
|> Alpha.Continuous.train(min_max)
assert_in_delta scale.transform.(0), 0.2, 0.0000001
assert_in_delta scale.transform.(1), 0.4, 0.0000001
assert_in_delta scale.transform.(2), 0.6, 0.0000001
assert_in_delta scale.transform.(3), 0.8, 0.0000001
end
end
end
| 29.470588 | 83 | 0.664671 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.