hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
730769004054ba6451c1ea902adb667392f58e40 | 1,623 | exs | Elixir | test/power_assert_case_template_test.exs | ma2gedev/power_assert_ex | e0131555e272a37dc41bda7df6ea544918034fcc | [
"Apache-2.0"
] | 228 | 2015-10-01T03:21:12.000Z | 2021-11-12T03:32:19.000Z | test/power_assert_case_template_test.exs | ma2gedev/power_assert_ex | e0131555e272a37dc41bda7df6ea544918034fcc | [
"Apache-2.0"
] | 28 | 2015-10-13T13:03:38.000Z | 2020-12-26T08:25:11.000Z | test/power_assert_case_template_test.exs | ma2gedev/power_assert_ex | e0131555e272a37dc41bda7df6ea544918034fcc | [
"Apache-2.0"
] | 9 | 2015-11-14T03:16:56.000Z | 2020-12-04T05:23:29.000Z | defmodule MyCase do
use ExUnit.CaseTemplate
using do
quote do
use PowerAssert
end
end
setup do
{:ok, executed: "setup func"}
end
end
defmodule MyTest do
use MyCase, async: true
test "executed setup function", context do
assert context[:executed] == "setup func"
end
test "raise", context do
assert context[:executed] == "setup func"
try do
assert [1,2,3] |> Enum.take(1) |> Enum.empty?()
rescue
error ->
msg = """
[1, 2, 3] |> Enum.take(1) |> Enum.empty?()
| |
[1] false
"""
ExUnit.Assertions.assert error.message <> "\n" == msg
end
end
end
defmodule MyCaseUsing do
use ExUnit.CaseTemplate
using do
quote do
use PowerAssert
import MyCaseUsing
end
end
def my_function(msg) do
msg
end
setup do
{:ok, executed: "setup func"}
end
end
defmodule MyTestUsing do
use MyCaseUsing, async: true
test "using function is available" do
assert my_function("using") == "using"
end
test "executed setup function", context do
assert context[:executed] == "setup func"
end
test "raise", context do
assert context[:executed] == "setup func"
try do
assert [1,2,3] |> Enum.take(1) |> Enum.empty?()
rescue
error ->
msg = """
[1, 2, 3] |> Enum.take(1) |> Enum.empty?()
| |
[1] false
"""
ExUnit.Assertions.assert error.message <> "\n" == msg
end
end
end
| 19.792683 | 61 | 0.532964 |
7307884e179bf8271899aadb6af1059c62b2d8de | 390 | ex | Elixir | lib/phoenix_website_web/views/error_view.ex | epic-creative/phoenix_website | 60cf15d86e3a55724bef41986db07b3d0ee108aa | [
"MIT"
] | null | null | null | lib/phoenix_website_web/views/error_view.ex | epic-creative/phoenix_website | 60cf15d86e3a55724bef41986db07b3d0ee108aa | [
"MIT"
] | null | null | null | lib/phoenix_website_web/views/error_view.ex | epic-creative/phoenix_website | 60cf15d86e3a55724bef41986db07b3d0ee108aa | [
"MIT"
] | null | null | null | defmodule PhoenixWebsiteWeb.ErrorView do
use PhoenixWebsiteWeb, :view
def render("404.html", _assigns) do
"Page not found"
end
def render("500.html", _assigns) do
"Internal server error"
end
# In case no render clause matches or no
# template is found, let's render it as 500
def template_not_found(_template, assigns) do
render "500.html", assigns
end
end
| 21.666667 | 47 | 0.712821 |
7307e258a37c6bd5083f2832d7361d4b35ff9536 | 4,980 | ex | Elixir | clients/home_graph/lib/google_api/home_graph/v1/api/agent_users.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/home_graph/lib/google_api/home_graph/v1/api/agent_users.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/home_graph/lib/google_api/home_graph/v1/api/agent_users.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.HomeGraph.V1.Api.AgentUsers do
@moduledoc """
API calls for all endpoints tagged `AgentUsers`.
"""
alias GoogleApi.HomeGraph.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Unlinks an agent user from Google. As a result, all data related to this
user will be deleted.
Here is how the agent user is created in Google:
1. When a user opens their Google Home App, they can begin linking a 3p
partner.
2. User is guided through the OAuth process.
3. After entering the 3p credentials, Google gets the 3p OAuth token and
uses it to make a Sync call to the 3p partner and gets back all of the
user's data, including `agent_user_id` and devices.
4. Google creates the agent user and stores a mapping from the
`agent_user_id` -> Google ID mapping. Google also
stores all of the user's devices under that Google ID.
The mapping from `agent_user_id` to Google ID is many to many, since one
Google user can have multiple 3p accounts, and multiple Google users can
map to one `agent_user_id` (e.g., a husband and wife share one Nest account
username/password).
The third-party user's identity is passed in as `agent_user_id`.
The agent is identified by the JWT signed by the partner's service account.
Note: Special characters (except "/") in `agent_user_id` must be
URL-encoded.
## Parameters
* `connection` (*type:* `GoogleApi.HomeGraph.V1.Connection.t`) - Connection to server
* `agent_users_id` (*type:* `String.t`) - Part of `agentUserId`. Required. Third-party user ID.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:requestId` (*type:* `String.t`) - Request ID used for debugging.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.HomeGraph.V1.Model.Empty{}}` on success
* `{:error, info}` on failure
"""
@spec homegraph_agent_users_delete(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.HomeGraph.V1.Model.Empty.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def homegraph_agent_users_delete(connection, agent_users_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:requestId => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/v1/agentUsers/{agentUsersId}", %{
"agentUsersId" => URI.encode(agent_users_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.HomeGraph.V1.Model.Empty{}])
end
end
| 44.070796 | 196 | 0.667671 |
7307f3847d3fa52a4aaee91033713999f6f8ae3c | 2,518 | ex | Elixir | clients/logging/lib/google_api/logging/v2/model/monitored_resource.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/logging/lib/google_api/logging/v2/model/monitored_resource.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/logging/lib/google_api/logging/v2/model/monitored_resource.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Logging.V2.Model.MonitoredResource do
@moduledoc """
An object representing a resource that can be used for monitoring, logging, billing, or other purposes. Examples include virtual machine instances, databases, and storage devices such as disks. The type field identifies a MonitoredResourceDescriptor object that describes the resource's schema. Information in the labels field identifies the actual resource and its attributes according to the schema. For example, a particular Compute Engine VM instance could be represented by the following object, because the MonitoredResourceDescriptor for "gce_instance" has labels "instance_id" and "zone": { "type": "gce_instance", "labels": { "instance_id": "12345678901234", "zone": "us-central1-a" }}
## Attributes
* `labels` (*type:* `map()`, *default:* `nil`) - Required. Values for all of the labels listed in the associated monitored resource descriptor. For example, Compute Engine VM instances use the labels "project_id", "instance_id", and "zone".
* `type` (*type:* `String.t`, *default:* `nil`) - Required. The monitored resource type. This field must match the type field of a MonitoredResourceDescriptor object. For example, the type of a Compute Engine VM instance is gce_instance.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:labels => map() | nil,
:type => String.t() | nil
}
field(:labels, type: :map)
field(:type)
end
defimpl Poison.Decoder, for: GoogleApi.Logging.V2.Model.MonitoredResource do
def decode(value, options) do
GoogleApi.Logging.V2.Model.MonitoredResource.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Logging.V2.Model.MonitoredResource do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 50.36 | 697 | 0.749007 |
7308006844168c21d13913707bce4abdf0e58003 | 2,681 | ex | Elixir | clients/genomics/lib/google_api/genomics/v1/model/reference.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/genomics/lib/google_api/genomics/v1/model/reference.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/genomics/lib/google_api/genomics/v1/model/reference.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Genomics.V1.Model.Reference do
@moduledoc """
A reference is a canonical assembled DNA sequence, intended to act as a reference coordinate space for other genomic annotations. A single reference might represent the human chromosome 1 or mitochandrial DNA, for instance. A reference belongs to one or more reference sets. For more genomics resource definitions, see [Fundamentals of Google Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
## Attributes
- id (String): The server-generated reference ID, unique across all references. Defaults to: `null`.
- length (String): The length of this reference's sequence. Defaults to: `null`.
- md5checksum (String): MD5 of the upper-case sequence excluding all whitespace characters (this is equivalent to SQ:M5 in SAM). This value is represented in lower case hexadecimal format. Defaults to: `null`.
- name (String): The name of this reference, for example `22`. Defaults to: `null`.
- ncbiTaxonId (Integer): ID from http://www.ncbi.nlm.nih.gov/taxonomy. For example, 9606 for human. Defaults to: `null`.
- sourceAccessions (List[String]): All known corresponding accession IDs in INSDC (GenBank/ENA/DDBJ) ideally with a version number, for example `GCF_000001405.26`. Defaults to: `null`.
- sourceUri (String): The URI from which the sequence was obtained. Typically specifies a FASTA format file. Defaults to: `null`.
"""
defstruct [
:"id",
:"length",
:"md5checksum",
:"name",
:"ncbiTaxonId",
:"sourceAccessions",
:"sourceUri"
]
end
defimpl Poison.Decoder, for: GoogleApi.Genomics.V1.Model.Reference do
def decode(value, _options) do
value
end
end
defimpl Poison.Encoder, for: GoogleApi.Genomics.V1.Model.Reference do
def encode(value, options) do
GoogleApi.Genomics.V1.Deserializer.serialize_non_nil(value, options)
end
end
| 46.224138 | 422 | 0.747855 |
730803f2958408d28ee72d6eaf3d71617e42f2c1 | 8,982 | ex | Elixir | lib/phoenix/live_dashboard/live/os_mon_live.ex | Blond11516/phoenix_live_dashboard | 9b70670d4ad1dc49ebe2eef45f2f082ff6b03a1b | [
"MIT"
] | null | null | null | lib/phoenix/live_dashboard/live/os_mon_live.ex | Blond11516/phoenix_live_dashboard | 9b70670d4ad1dc49ebe2eef45f2f082ff6b03a1b | [
"MIT"
] | null | null | null | lib/phoenix/live_dashboard/live/os_mon_live.ex | Blond11516/phoenix_live_dashboard | 9b70670d4ad1dc49ebe2eef45f2f082ff6b03a1b | [
"MIT"
] | null | null | null | defmodule Phoenix.LiveDashboard.OSMonLive do
use Phoenix.LiveDashboard.Web, :live_view
alias Phoenix.LiveDashboard.{
SystemInfo,
BarComponent,
ColorBarComponent,
ColorBarLegendComponent
}
@temporary_assigns [os_mon: nil, memory_usage: nil, cpu_total: nil, cpu_count: 0]
@cpu_usage_sections [
{:kernel, "Kernel", "purple", "Executing code in kernel mode"},
{:user, "User", "blue", "Executing code in user mode"},
{:nice_user, "User nice", "green", "Executing code in low-priority (nice)"},
{:soft_irq, "Soft IRQ", "orange", "Executing soft interrupts"},
{:hard_irq, "Hard IRQ", "yellow", "Executing hard interrupts"},
{:steal, "Steal", "purple", "Stolen time spent in virtualized OSes"},
{:wait, "Waiting", "orange", nil},
{:idle, "Idle", "dark-gray", nil}
]
@memory_usage_sections [
{"Used", :used_memory, :system_total_memory,
"The amount of memory used from the available memory"},
{"Buffered", :buffered_memory, :system_total_memory,
"The amount of memory used for temporary storing raw disk blocks"},
{"Cached", :cached_memory, :system_total_memory,
"The amount of memory used for cached files read from disk"},
{"Swap", :used_swap, :total_swap,
"The amount of disk swap memory used from the available swap"}
]
@impl true
def mount(%{"node" => _} = params, session, socket) do
socket =
socket
|> assign_defaults(params, session, true)
|> assign_os_mon()
{:ok, socket, temporary_assigns: @temporary_assigns}
end
def mount(_params, _session, socket) do
{:ok, push_redirect(socket, to: live_dashboard_path(socket, :home, node()))}
end
defp assign_os_mon(socket) do
os_mon = SystemInfo.fetch_os_mon_info(socket.assigns.menu.node)
cpu_count = length(os_mon.cpu_per_core)
assign(socket,
os_mon: os_mon,
cpu_count: cpu_count,
cpu_total: calculate_cpu_total(os_mon.cpu_per_core, cpu_count),
memory_usage: calculate_memory_usage(os_mon.system_mem)
)
end
defp calculate_memory_usage(system_memory) do
for {key, value_key, total_key, hint} <- @memory_usage_sections,
total = system_memory[total_key],
value = memory_value(system_memory, value_key, total) do
{key, value, total, percentage(value, total), hint}
end
end
defp memory_value(system_memory, :used_memory, total) do
if free = Keyword.get(system_memory, :free_memory, 0) do
total -
(free + Keyword.get(system_memory, :cached_memory, 0) +
Keyword.get(system_memory, :buffered_memory, 0))
end
end
defp memory_value(system_memory, :used_swap, total) do
if free = Keyword.get(system_memory, :free_swap, 0) do
total - free
end
end
defp memory_value(system_memory, key, _total), do: system_memory[key]
defp calculate_cpu_total([], _cpu_count), do: nil
defp calculate_cpu_total([{_, core}], _cpu_count), do: core
defp calculate_cpu_total([{_, keys} | _] = per_core, cpu_count) do
keys
|> Map.keys()
|> Enum.map(fn key -> {key, avg_cpu_usage(per_core, key, cpu_count)} end)
end
defp avg_cpu_usage(map, key, count) do
map
|> Enum.map(fn {_n, values} -> values[key] end)
|> Enum.sum()
|> Kernel./(count)
|> Float.ceil(1)
end
@impl true
def render(assigns) do
~L"""
<div class="row">
<%= if @os_mon.cpu_nprocs > 0 do %>
<div class="col-sm-6">
<h5 class="card-title">
CPU
<%= hint do %>
<p>The load panes show the CPU demand in the last 1, 5 and 15 minutes over all cores.</p>
<%= if @cpu_count > 0 do %>
<p>The avg panes show the same values averaged across all cores.</p>
<% end %>
<% end %>
</h5>
<div class="row">
<div class="col-md-4 mb-4">
<div class="banner-card">
<h6 class="banner-card-title">
Load 1 min
</h6>
<div class="banner-card-value"><%= rup(@os_mon.cpu_avg1) %></div>
</div>
</div>
<div class="col-md-4 mb-4">
<div class="banner-card">
<h6 class="banner-card-title">
Load 5 min
</h6>
<div class="banner-card-value"><%= rup(@os_mon.cpu_avg5) %></div>
</div>
</div>
<div class="col-md-4 mb-4">
<div class="banner-card">
<h6 class="banner-card-title">
Load 15 min
</h6>
<div class="banner-card-value"><%= rup(@os_mon.cpu_avg15) %></div>
</div>
</div>
<%= if @cpu_count > 0 do %>
<div class="col-md-4 mb-4">
<div class="banner-card">
<h6 class="banner-card-title">
Avg 1 min
</h6>
<div class="banner-card-value"><%= rup_avg(@os_mon.cpu_avg1, @cpu_count) %></div>
</div>
</div>
<div class="col-md-4 mb-4">
<div class="banner-card">
<h6 class="banner-card-title">
Avg 5 min
</h6>
<div class="banner-card-value"><%= rup_avg(@os_mon.cpu_avg5, @cpu_count) %></div>
</div>
</div>
<div class="col-md-4 mb-4">
<div class="banner-card">
<h6 class="banner-card-title">
Avg 15 min
</h6>
<div class="banner-card-value"><%= rup_avg(@os_mon.cpu_avg15, @cpu_count) %></div>
</div>
</div>
</div>
<% end %>
<%= if @cpu_total do %>
<div class="card mb-4">
<div class="card-body resource-usage">
<%= for {num_cpu, usage} <- @os_mon.cpu_per_core do %>
<div class="progress flex-grow-1 mb-3">
<%= live_component @socket, ColorBarComponent, id: {:cpu, num_cpu}, data: cpu_usage_sections(usage), title: "CPU #{num_cpu+1}" %>
</div>
<% end %>
<div class="progress flex-grow-1 mb-3">
<%= live_component @socket, ColorBarComponent, data: cpu_usage_sections(@cpu_total), title: "TOTAL" %>
</div>
<%= live_component @socket, ColorBarLegendComponent, data: cpu_usage_sections(@cpu_total) %>
<div class="resource-usage-total text-center py-1 mt-3">
Number of OS processes: <%= @os_mon.cpu_nprocs %>
</div>
</div>
</div>
<% end %>
</div>
<% end %>
<%= if @memory_usage != [] do %>
<div class="<%= if @os_mon.cpu_nprocs > 0, do: "col-sm-6", else: "col-12" %>">
<h5 class="card-title">Memory</h5>
<%= for {title, value, total, percent, hint} <- @memory_usage do %>
<div class="card progress-section mb-4">
<%= live_component @socket, BarComponent, id: {:memory, title}, percent: percent, class: "card-body" do %>
<%= title %> <%= hint(do: hint) %>
<span class="flex-grow-1"></span>
<small class="text-right text-muted mr-2">
<%= format_bytes(value) %> of <%= format_bytes(total) %>
</small>
<strong><%= percent %>%</strong>
<% end %>
</div>
<% end %>
</div>
<% end %>
<%= if @os_mon.disk != [] do %>
<div class="col-12">
<h5 class="card-title">Disk</h5>
<div class="card mb-4">
<div class="card-body disk-usage">
<%= for {mountpoint, kbytes, percent} <- @os_mon.disk do %>
<%= live_component @socket, BarComponent, id: {:disk, mountpoint}, percent: percent do %>
<%= mountpoint %>
<span class="flex-grow-1"></span>
<span class="text-right text-muted">
<%= format_percent(percent) %> of <%= format_bytes(kbytes * 1024) %>
</span>
<% end %>
<% end %>
</div>
</div>
</div>
<% end %>
</div>
"""
end
defp rup(value), do: Float.ceil(value / 256, 2)
defp rup_avg(value, count), do: Float.ceil(value / 256 / count, 2)
defp cpu_usage_sections(cpu_usage) do
for {key, name, color, desc} <- @cpu_usage_sections, value = cpu_usage[key] do
{name, value, color, desc}
end
end
@impl true
def handle_info({:node_redirect, node}, socket) do
{:noreply, push_redirect(socket, to: live_dashboard_path(socket, :home, node))}
end
def handle_info(:refresh, socket) do
{:noreply, assign_os_mon(socket)}
end
end
| 35.501976 | 149 | 0.533289 |
730826046db6259becd685ececb884c10c9f6499 | 10,756 | ex | Elixir | lib/mock.ex | Francisco-Castro/mock | 84abb395d7d3848008124c33f0642b5306331016 | [
"MIT"
] | null | null | null | lib/mock.ex | Francisco-Castro/mock | 84abb395d7d3848008124c33f0642b5306331016 | [
"MIT"
] | null | null | null | lib/mock.ex | Francisco-Castro/mock | 84abb395d7d3848008124c33f0642b5306331016 | [
"MIT"
] | null | null | null | defmodule Mock do
@moduledoc """
Mock modules for testing purposes. Usually inside a unit test.
Please see the README file on github for a tutorial
## Example
defmodule MyTest do
use ExUnit.Case
import Mock
test "get" do
with_mock HTTPotion,
[get: fn("http://example.com", _headers) ->
HTTPotion.Response.new(status_code: 200,
body: "hello") end] do
# Code which calls HTTPotion.get
# Check that the call was made as we expected
assert called HTTPotion.get("http://example.com", :_)
end
end
end
"""
@doc """
Mock up `mock_module` with functions specified as a keyword
list of function_name:implementation `mocks` for the duration
of `test`.
`opts` List of optional arguments passed to meck. `:passthrough` will
passthrough arguments to the original module.
## Example
with_mock HTTPotion, [get: fn("http://example.com") ->
"<html></html>" end] do
# Tests that make the expected call
assert called HTTPotion.get("http://example.com")
end
"""
defmacro with_mock(mock_module, opts \\ [], mocks, do: test) do
quote do
unquote(__MODULE__).with_mocks(
[{unquote(mock_module), unquote(opts), unquote(mocks)}], do: unquote(test))
end
end
@doc """
Mock up multiple modules for the duration of `test`.
## Example
with_mocks([{HTTPotion, opts, [get: fn("http://example.com") -> "<html></html>" end]}]) do
# Tests that make the expected call
assert called HTTPotion.get("http://example.com")
end
"""
defmacro with_mocks(mocks, do: test) do
quote do
mock_modules = mock_modules(unquote(mocks))
try do
unquote(test)
after
for m <- mock_modules, do: :meck.unload(m)
end
end
end
@doc """
Shortcut to avoid multiple blocks when a test requires a single
mock.
For full description see `with_mock`.
## Example
test_with_mock "test_name", HTTPotion,
[get: fn(_url) -> "<html></html>" end] do
HTTPotion.get("http://example.com")
assert called HTTPotion.get("http://example.com")
end
"""
defmacro test_with_mock(test_name, mock_module, opts \\ [], mocks, test_block) do
quote do
test unquote(test_name) do
unquote(__MODULE__).with_mock(
unquote(mock_module), unquote(opts), unquote(mocks), unquote(test_block))
end
end
end
@doc """
Shortcut to avoid multiple blocks when a test requires a single
mock. Accepts a context argument enabling information to be shared
between callbacks and the test.
For full description see `with_mock`.
## Example
setup do
doc = "<html></html>"
{:ok, doc: doc}
end
test_with_mock "test_with_mock with context", %{doc: doc}, HTTPotion, [],
[get: fn(_url) -> doc end] do
HTTPotion.get("http://example.com")
assert called HTTPotion.get("http://example.com")
end
"""
defmacro test_with_mock(test_name, context, mock_module, opts, mocks, test_block) do
quote do
test unquote(test_name), unquote(context) do
unquote(__MODULE__).with_mock(
unquote(mock_module), unquote(opts), unquote(mocks), unquote(test_block))
end
end
end
@doc """
Call original function inside mock anonymous function.
Allows overriding only a certain behavior of a function.
Compatible with passthrough option.
## Example
with_mock String, [:passthrough], [reverse: fn(str) ->
passthrough([str]) <> "!" end] do
assert String.reverse("xyz") == "zyx!"
end
"""
defmacro passthrough(args) do
quote do
:meck.passthrough(unquote(args))
end
end
@doc """
Use inside a `with_mock` block to determine whether
a mocked function was called as expected.
Pass `:_` as a function argument for wildcard matches.
## Example
assert called HTTPotion.get("http://example.com")
# Matches any invocation
assert called HTTPotion.get(:_)
"""
defmacro called({ {:., _, [ module , f ]} , _, args }) do
quote do
:meck.called unquote(module), unquote(f), unquote(args)
end
end
@doc """
Use inside a `with_mock` block to determine whether
a mocked function was called as expected. If the assertion fails,
the calls that were received are displayed in the assertion message.
Pass `:_` as a function argument for wildcard matches.
## Example
assert_called HTTPotion.get("http://example.com")
# Matches any invocation
assert_called HTTPotion.get(:_)
"""
defmacro assert_called({{:., _, [module, f]}, _, args}) do
quote do
unquoted_module = unquote(module)
value = :meck.called(unquoted_module, unquote(f), unquote(args))
unless value do
calls = unquoted_module
|> :meck.history()
|> Enum.with_index()
|> Enum.map(fn {{_, {m, f, a}, ret}, i} ->
"#{i}. #{m}.#{f}(#{a |> Enum.map(&Kernel.inspect/1) |> Enum.join(",")}) (returned #{inspect ret})"
end)
|> Enum.join("\n")
raise ExUnit.AssertionError,
message: "Expected call but did not receive it. Calls which were received:\n\n#{calls}"
end
end
end
@doc """
Use inside a `with_mock` block to determine whether a mocked function was called
as expected exactly x times. If the assertion fails, the number of calls that
were received is displayed in the assertion message.
Pass `:_` as a function argument for wildcard matches.
## Example
assert_called_exactly HTTPotion.get("http://example.com"), 2
# Matches any invocation
assert_called_exactly HTTPotion.get(:_), 2
"""
defmacro assert_called_exactly({{:., _, [module, f]}, _, args}, call_times) do
quote do
unquoted_module = unquote(module)
unquoted_f = unquote(f)
unquoted_args = unquote(args)
unquoted_call_times = unquote(call_times)
num_calls = :meck.num_calls(unquoted_module, unquoted_f, unquoted_args)
if num_calls != unquoted_call_times do
mfa_str = "#{unquoted_module}.#{unquoted_f}(#{unquoted_args |> Enum.map(&Kernel.inspect/1) |> Enum.join(", ")})"
raise ExUnit.AssertionError,
message: "Expected #{mfa_str} to be called exactly #{unquoted_call_times} time(s), but it was called (number of calls: #{num_calls})"
end
end
end
@doc """
Use inside a `with_mock` block to check if
a mocked function was NOT called. If the assertion fails,
the number of calls is displayed in the assertion message.
Pass `:_` as a function argument for wildcard matches.
## Example
assert_not_called HTTPotion.get("http://example.com")
# Matches any invocation
assert_not_called HTTPotion.get(:_)
"""
defmacro assert_not_called({{:., _, [module, f]}, _, args}) do
quote do
unquoted_module = unquote(module)
unquoted_f = unquote(f)
unquoted_args = unquote(args)
num_calls = :meck.num_calls(unquoted_module, unquoted_f, unquoted_args)
if num_calls > 0 do
mfa_str = "#{unquoted_module}.#{unquoted_f}(#{unquoted_args |> Enum.map(&Kernel.inspect/1) |> Enum.join(", ")})"
raise ExUnit.AssertionError,
message: "Expected #{mfa_str} not to be called, but it was called (number of calls: #{num_calls})"
end
end
end
@doc """
Helper function to get the hsitory of mock functions executed.
## Example
iex> assert call_history(HTTPotion) == [
{pid, {HTTPotion, :get, ["http://example.com"]}, some_return_value}
]
"""
defmacro call_history(module) do
quote do
unquoted_module = unquote(module)
unquoted_module
|> :meck.history()
end
end
@doc """
Mocks up multiple modules prior to the execution of each test in a case and
execute the callback specified.
For full description of mocking, see `with_mocks`.
For a full description of ExUnit setup, see
https://hexdocs.pm/ex_unit/ExUnit.Callbacks.html
## Example
setup_with_mocks([
{Map, [], [get: fn(%{}, "http://example.com") -> "<html></html>" end]}
]) do
foo = "bar"
{:ok, foo: foo}
end
test "setup_all_with_mocks base case" do
assert Map.get(%{}, "http://example.com") == "<html></html>"
end
"""
defmacro setup_with_mocks(mocks, do: setup_block) do
quote do
setup do
mock_modules(unquote(mocks))
on_exit(fn ->
:meck.unload()
end)
unquote(setup_block)
end
end
end
@doc """
Mocks up multiple modules prior to the execution of each test in a case and
execute the callback specified with a context specified
See `setup_with_mocks` for more details
## Example
setup_with_mocks([
{Map, [], [get: fn(%{}, "http://example.com") -> "<html></html>" end]}
], context) do
{:ok, test_string: Atom.to_string(context.test)}
end
test "setup_all_with_mocks with context", %{test_string: test_string} do
assert Map.get(%{}, "http://example.com") == "<html></html>"
assert test_string == "test setup_all_with_mocks with context"
end
"""
defmacro setup_with_mocks(mocks, context, do: setup_block) do
quote do
setup unquote(context) do
mock_modules(unquote(mocks))
on_exit(fn ->
:meck.unload()
end)
unquote(setup_block)
end
end
end
# Helper macro to mock modules. Intended to be called only within this module
# but not defined as `defmacrop` due to the scope within which it's used.
defmacro mock_modules(mocks) do
quote do
Enum.reduce(unquote(mocks), [], fn({m, opts, mock_fns}, ms) ->
unless m in ms do
# :meck.validate will throw an error if trying to validate
# a module that was not mocked
try do
if :meck.validate(m), do: :meck.unload(m)
rescue
e in ErlangError -> :ok
end
:meck.new(m, opts)
end
unquote(__MODULE__)._install_mock(m, mock_fns)
true = :meck.validate(m)
[ m | ms] |> Enum.uniq
end)
end
end
@doc false
def _install_mock(_, []), do: :ok
def _install_mock(mock_module, [ {fn_name, value} | tail ]) do
:meck.expect(mock_module, fn_name, value)
_install_mock(mock_module, tail)
end
def in_series(signature, results) do
{signature, :meck.seq(results)}
end
end
| 28.759358 | 143 | 0.617981 |
7308361b5a13718081f00195c70ca034a4e7292f | 936 | ex | Elixir | lib/falcon_plus_api/api.ex | sawater/falcon-plus-api | a41b1cc04cc0d71b72341e803de88d16b4bba77d | [
"Apache-2.0"
] | 2 | 2018-05-03T14:35:37.000Z | 2018-07-30T09:32:38.000Z | lib/falcon_plus_api/api.ex | mon-suit/falcon-plus-api | a41b1cc04cc0d71b72341e803de88d16b4bba77d | [
"Apache-2.0"
] | null | null | null | lib/falcon_plus_api/api.ex | mon-suit/falcon-plus-api | a41b1cc04cc0d71b72341e803de88d16b4bba77d | [
"Apache-2.0"
] | null | null | null | defmodule FalconPlusApi.Api do
use Maxwell.Builder
middleware Maxwell.Middleware.Opts, connect_timeout: 10_000
middleware Maxwell.Middleware.Retry
middleware Maxwell.Middleware.Json
middleware Maxwell.Middleware.Compress
middleware Maxwell.Middleware.Logger
adapter Maxwell.Adapter.Httpc
def set_opts(conn, keywords) do
{body, keywords} = Keyword.pop(keywords, :body)
conn = if body, do: put_req_body(conn, body), else: conn
{query, keywords} = Keyword.pop(keywords, :query)
conn = if query, do: put_query_string(conn, query), else: conn
keywords |> Enum.reduce(conn, fn ({k, v}, conn) -> put_private(conn, k, v) end)
end
def get_result({:ok, conn=%Maxwell.Conn{}}) do
body = get_resp_body(conn)
if conn.status == 200 do
{:ok, body}
else
{:error, body}
end
end
def get_result({:error, reason_term, %Maxwell.Conn{}}) do
{:error, reason_term}
end
end
| 25.297297 | 83 | 0.690171 |
7308585e9878e2d2c7b33d125691146332f738b0 | 1,931 | exs | Elixir | test/grpc/integration/connection_test.exs | falood/grpc-elixir | 7353aa04cbf4cd982183aacd54d645fe6b4571dd | [
"Apache-2.0"
] | null | null | null | test/grpc/integration/connection_test.exs | falood/grpc-elixir | 7353aa04cbf4cd982183aacd54d645fe6b4571dd | [
"Apache-2.0"
] | null | null | null | test/grpc/integration/connection_test.exs | falood/grpc-elixir | 7353aa04cbf4cd982183aacd54d645fe6b4571dd | [
"Apache-2.0"
] | null | null | null | defmodule GRPC.Integration.ConnectionTest do
use GRPC.Integration.TestCase, async: true
@cert_path Path.expand("./tls/server1.pem", :code.priv_dir(:grpc))
@key_path Path.expand("./tls/server1.key", :code.priv_dir(:grpc))
@ca_path Path.expand("./tls/ca.pem", :code.priv_dir(:grpc))
defmodule FeatureServer do
use GRPC.Server, service: Routeguide.RouteGuide.Service
def get_feature(point, _stream) do
Routeguide.Feature.new(location: point, name: "#{point.latitude},#{point.longitude}")
end
end
test "reconnection works" do
server = FeatureServer
{:ok, _, port} = GRPC.Server.start(server, 0)
point = Routeguide.Point.new(latitude: 409_146_138, longitude: -746_188_906)
{:ok, channel} = GRPC.Stub.connect("localhost:#{port}", adapter_opts: %{retry_timeout: 10})
assert {:ok, _} = channel |> Routeguide.RouteGuide.Stub.get_feature(point)
:ok = GRPC.Server.stop(server)
{:ok, _, _} = GRPC.Server.start(server, port)
assert {:ok, _} = channel |> Routeguide.RouteGuide.Stub.get_feature(point)
:ok = GRPC.Server.stop(server)
end
test "authentication works" do
server = FeatureServer
cred =
GRPC.Credential.new(
ssl: [
certfile: @cert_path,
cacertfile: @ca_path,
keyfile: @key_path,
verify: :verify_peer,
fail_if_no_peer_cert: true
]
)
{:ok, _, port} = GRPC.Server.start(server, 0, cred: cred)
try do
point = Routeguide.Point.new(latitude: 409_146_138, longitude: -746_188_906)
client_cred = GRPC.Credential.new(ssl: [certfile: @cert_path, keyfile: @key_path])
{:ok, channel} = GRPC.Stub.connect("localhost:#{port}", cred: client_cred)
assert {:ok, _} = Routeguide.RouteGuide.Stub.get_feature(channel, point)
catch
error ->
refute "Caught #{inspect(error)}"
after
:ok = GRPC.Server.stop(server)
end
end
end
| 33.877193 | 95 | 0.656655 |
73085b979827836dec63530e119bc01272bfa9cb | 472 | exs | Elixir | apps/api/test/api/views/error_view_test.exs | omgnetwork/omg-childchain-v2 | 31cc9cf9e42718fc3b9bd6668f24a627cac80b4f | [
"Apache-2.0"
] | 4 | 2020-11-30T17:38:57.000Z | 2021-01-23T21:29:41.000Z | apps/api/test/api/views/error_view_test.exs | omgnetwork/omg-childchain-v2 | 31cc9cf9e42718fc3b9bd6668f24a627cac80b4f | [
"Apache-2.0"
] | 24 | 2020-11-30T17:32:48.000Z | 2021-02-22T06:25:22.000Z | apps/api/test/api/views/error_view_test.exs | omgnetwork/omg-childchain-v2 | 31cc9cf9e42718fc3b9bd6668f24a627cac80b4f | [
"Apache-2.0"
] | null | null | null | defmodule API.View.ErrorTest do
@moduledoc """
"""
use ExUnit.Case, async: true
alias API.View.Error
describe "serialize/3" do
test "serializes a code and description" do
assert Error.serialize(:code, "description", "1.0") == %{
data: %{code: :code, description: "description", object: "error"},
service_name: "child_chain",
success: false,
version: "1.0"
}
end
end
end
| 23.6 | 81 | 0.561441 |
73085c95a63dafc7bc10eff6414791817544a19e | 108 | exs | Elixir | hub/test/hub_test.exs | judas-christ/love-potions | 39a43dca4c66b5e8028759c8a81d97a8c0562956 | [
"MIT"
] | null | null | null | hub/test/hub_test.exs | judas-christ/love-potions | 39a43dca4c66b5e8028759c8a81d97a8c0562956 | [
"MIT"
] | null | null | null | hub/test/hub_test.exs | judas-christ/love-potions | 39a43dca4c66b5e8028759c8a81d97a8c0562956 | [
"MIT"
] | null | null | null | defmodule HubTest do
use ExUnit.Case
doctest Hub
test "the truth" do
assert 1 + 1 == 2
end
end
| 12 | 21 | 0.648148 |
730862650d848ede022a6b4b58395ccf090ac1c2 | 10,270 | ex | Elixir | lib/postgrex/messages.ex | enter-haken/postgrex | fb3438d4e6a56db81ddd0d578cdfc0484909c233 | [
"Apache-2.0"
] | 681 | 2016-06-16T12:28:22.000Z | 2022-03-30T08:48:42.000Z | deps/postgrex/lib/postgrex/messages.ex | rwtrecs/rocketseat-nlw5-inmana | 8ce8bc32e0bdd005c423394bb163945747b557e2 | [
"MIT"
] | 383 | 2016-06-17T14:49:41.000Z | 2022-03-21T18:13:19.000Z | deps/postgrex/lib/postgrex/messages.ex | adrianomota/blog | ef3b2d2ed54f038368ead8234d76c18983caa75b | [
"MIT"
] | 234 | 2016-06-16T16:14:47.000Z | 2022-03-03T00:43:59.000Z | defmodule Postgrex.Messages do
@moduledoc false
import Postgrex.BinaryUtils
import Record, only: [defrecord: 2]
@protocol_vsn_major 3
@protocol_vsn_minor 0
@auth_types [
ok: 0,
kerberos: 2,
cleartext: 3,
md5: 5,
scm: 6,
gss: 7,
gss_cont: 8,
sspi: 9,
sasl: 10,
sasl_cont: 11,
sasl_fin: 12
]
@error_fields [
severity: ?S,
code: ?C,
message: ?M,
detail: ?D,
hint: ?H,
position: ?P,
internal_position: ?p,
internal_query: ?q,
where: ?W,
schema: ?s,
table: ?t,
column: ?c,
data_type: ?d,
constraint: ?n,
file: ?F,
line: ?L,
routine: ?R
]
defrecord :msg_auth, [:type, :data]
defrecord :msg_startup, [:params]
defrecord :msg_password, [:pass]
defrecord :msg_error, [:fields]
defrecord :msg_parameter, [:name, :value]
defrecord :msg_backend_key, [:pid, :key]
defrecord :msg_ready, [:status]
defrecord :msg_notice, [:fields]
defrecord :msg_query, [:statement]
defrecord :msg_parse, [:name, :statement, :type_oids]
defrecord :msg_describe, [:type, :name]
defrecord :msg_flush, []
defrecord :msg_close, [:type, :name]
defrecord :msg_parse_complete, []
defrecord :msg_parameter_desc, [:type_oids]
defrecord :msg_too_many_parameters, [:len, :max_len]
defrecord :msg_row_desc, [:fields]
defrecord :msg_no_data, []
defrecord :msg_notify, [:pg_pid, :channel, :payload]
defrecord :msg_bind, [:name_port, :name_stat, :param_formats, :params, :result_formats]
defrecord :msg_execute, [:name_port, :max_rows]
defrecord :msg_sync, []
defrecord :msg_bind_complete, []
defrecord :msg_close_complete, []
defrecord :msg_portal_suspend, []
defrecord :msg_data_row, [:values]
defrecord :msg_command_complete, [:tag]
defrecord :msg_empty_query, []
defrecord :msg_copy_data, [:data]
defrecord :msg_copy_done, []
defrecord :msg_copy_fail, [:message]
defrecord :msg_copy_in_response, [:format, :columns]
defrecord :msg_copy_both_response, [:format, :columns]
defrecord :msg_copy_out_response, [:format, :columns]
defrecord :msg_terminate, []
defrecord :msg_ssl_request, []
defrecord :msg_cancel_request, [:pid, :key]
defrecord :row_field, [:name, :table_oid, :column, :type_oid, :type_size, :type_mod, :format]
### decoders ###
# auth
def parse(<<type::int32, rest::binary>>, ?R, size) do
type = decode_auth_type(type)
data =
case type do
:md5 ->
<<data::binary-size(4)>> = rest
data
:gss_cont ->
rest_size = size - 2
<<data::size(rest_size)>> = rest
data
:sasl ->
rest
:sasl_cont ->
rest
:sasl_fin ->
rest
_ ->
nil
end
msg_auth(type: type, data: data)
end
# backend_key
def parse(<<pid::int32, key::int32>>, ?K, _size) do
msg_backend_key(pid: pid, key: key)
end
# ready
def parse(<<status::int8>>, ?Z, _size) do
status =
case status do
?I -> :idle
?T -> :transaction
?E -> :error
end
msg_ready(status: status)
end
# parameter_desc
def parse(<<len::uint16, rest::binary(len, 32)>>, ?t, _size) do
oids = for <<oid::size(32) <- rest>>, do: oid
msg_parameter_desc(type_oids: oids)
end
def parse(<<overflow_len::uint16, _::binary>>, ?t, size) do
len = div(size - 2, 4)
case <<len::uint16>> do
<<^overflow_len::uint16>> ->
msg_too_many_parameters(len: len, max_len: 0xFFFF)
_ ->
raise "invalid parameter description"
end
end
# row_desc
def parse(<<len::uint16, rest::binary>>, ?T, _size) do
fields = decode_row_fields(rest, len)
msg_row_desc(fields: fields)
end
# data_row
def parse(<<_::uint16, rest::binary>>, ?D, _size) do
msg_data_row(values: rest)
end
# notify
def parse(<<pg_pid::int32, rest::binary>>, ?A, _size) do
{channel, rest} = decode_string(rest)
{payload, ""} = decode_string(rest)
msg_notify(pg_pid: pg_pid, channel: channel, payload: payload)
end
# error
def parse(rest, ?E, _size) do
fields = decode_fields(rest)
msg_error(fields: Map.new(fields))
end
# notice
def parse(rest, ?N, _size) do
fields = decode_fields(rest)
msg_notice(fields: Map.new(fields))
end
# parameter
def parse(rest, ?S, _size) do
{name, rest} = decode_string(rest)
{value, ""} = decode_string(rest)
msg_parameter(name: name, value: value)
end
# parse_complete
def parse(_rest, ?1, _size) do
msg_parse_complete()
end
# no_data
def parse(_rest, ?n, _size) do
msg_no_data()
end
# bind_complete
def parse(_rest, ?2, _size) do
msg_bind_complete()
end
# close_complete
def parse(_rest, ?3, _size) do
msg_close_complete()
end
# portal_suspended
def parse(_rest, ?s, _size) do
msg_portal_suspend()
end
# command_complete
def parse(rest, ?C, _size) do
{tag, ""} = decode_string(rest)
msg_command_complete(tag: tag)
end
# empty_query
def parse(_rest, ?I, _size) do
msg_empty_query()
end
# msg_copy_data
def parse(data, ?d, _size) do
msg_copy_data(data: data)
end
# msg_copy_done
def parse(_rest, ?c, _size) do
msg_copy_done()
end
# msg_copy_fail
def parse(message, ?f, _size) do
msg_copy_fail(message: message)
end
# msg_copy_in_response
def parse(rest, ?G, _size) do
{format, columns} = decode_copy(rest)
msg_copy_in_response(format: format, columns: columns)
end
# msg_copy_out_response
def parse(rest, ?H, _size) do
{format, columns} = decode_copy(rest)
msg_copy_out_response(format: format, columns: columns)
end
# msg_copy_both_response
def parse(rest, ?W, _size) do
{format, columns} = decode_copy(rest)
msg_copy_both_response(format: format, columns: columns)
end
### encoders ###
def encode_msg(msg) do
{first, data} = encode(msg)
size = IO.iodata_length(data) + 4
if first do
[first, <<size::int32>>, data]
else
[<<size::int32>>, data]
end
end
# startup
defp encode(msg_startup(params: params)) do
params =
Enum.reduce(params, [], fn {key, value}, acc ->
[acc, to_string(key), 0, value, 0]
end)
vsn = <<@protocol_vsn_major::int16, @protocol_vsn_minor::int16>>
{nil, [vsn, params, 0]}
end
# password
defp encode(msg_password(pass: pass)) do
{?p, [pass]}
end
# query
defp encode(msg_query(statement: statement)) do
{?Q, [statement, 0]}
end
# parse
defp encode(msg_parse(name: name, statement: statement, type_oids: oids)) do
oids = for oid <- oids, into: "", do: <<oid::uint32>>
len = <<div(byte_size(oids), 4)::int16>>
{?P, [name, 0, statement, 0, len, oids]}
end
# describe
defp encode(msg_describe(type: type, name: name)) do
byte =
case type do
:statement -> ?S
:portal -> ?P
end
{?D, [byte, name, 0]}
end
# flush
defp encode(msg_flush()) do
{?H, ""}
end
# close
defp encode(msg_close(type: type, name: name)) do
byte =
case type do
:statement -> ?S
:portal -> ?P
end
{?C, [byte, name, 0]}
end
# bind
defp encode(
msg_bind(
name_port: port,
name_stat: stat,
param_formats: param_formats,
params: params,
result_formats: result_formats
)
) do
pfs = for format <- param_formats, into: "", do: <<format(format)::int16>>
rfs = for format <- result_formats, into: "", do: <<format(format)::int16>>
len_pfs = <<div(byte_size(pfs), 2)::int16>>
len_rfs = <<div(byte_size(rfs), 2)::int16>>
len_params = <<length(params)::int16>>
{?B, [port, 0, stat, 0, len_pfs, pfs, len_params, params, len_rfs, rfs]}
end
# execute
defp encode(msg_execute(name_port: port, max_rows: rows)) do
{?E, [port, 0, <<rows::int32>>]}
end
# sync
defp encode(msg_sync()) do
{?S, ""}
end
# terminate
defp encode(msg_terminate()) do
{?X, ""}
end
# ssl_request
defp encode(msg_ssl_request()) do
{nil, <<1234::int16, 5679::int16>>}
end
# cancel_request
defp encode(msg_cancel_request(pid: pid, key: key)) do
{nil, <<1234::int16, 5678::int16, pid::int32, key::int32>>}
end
# copy_data
defp encode(msg_copy_data(data: data)) do
{?d, data}
end
# copy_done
defp encode(msg_copy_done()) do
{?c, ""}
end
# copy_fail
defp encode(msg_copy_fail(message: msg)) do
{?f, [msg, 0]}
end
### encode helpers ###
defp format(:text), do: 0
defp format(:binary), do: 1
### decode helpers ###
defp decode_fields(<<0>>), do: []
defp decode_fields(<<field::int8, rest::binary>>) do
type = decode_field_type(field)
{string, rest} = decode_string(rest)
[{type, string} | decode_fields(rest)]
end
defp decode_string(bin) do
{pos, 1} = :binary.match(bin, <<0>>)
{string, <<0, rest::binary>>} = :erlang.split_binary(bin, pos)
{string, rest}
end
defp decode_row_fields("", 0), do: []
defp decode_row_fields(rest, count) do
{field, rest} = decode_row_field(rest)
[field | decode_row_fields(rest, count - 1)]
end
defp decode_row_field(rest) do
{name, rest} = decode_string(rest)
<<table_oid::uint32, column::int16, type_oid::uint32, type_size::int16, type_mod::int32,
format::int16, rest::binary>> = rest
field =
row_field(
name: name,
table_oid: table_oid,
column: column,
type_oid: type_oid,
type_size: type_size,
type_mod: type_mod,
format: format
)
{field, rest}
end
Enum.each(@auth_types, fn {type, value} ->
def decode_auth_type(unquote(value)), do: unquote(type)
end)
Enum.each(@error_fields, fn {field, char} ->
def decode_field_type(unquote(char)), do: unquote(field)
end)
def decode_field_type(_), do: :unknown
defp decode_format(0), do: :text
defp decode_format(1), do: :binary
defp decode_copy(<<format::int8, len::uint16, rest::binary(len, 16)>>) do
format = decode_format(format)
columns = for <<column::uint16 <- rest>>, do: decode_format(column)
{format, columns}
end
end
| 22.671082 | 95 | 0.616553 |
7308802221692d9e75ef67fdec1c6605c49ca386 | 694 | ex | Elixir | web/gettext.ex | code-mancers/exray | f37a089c98ae3117063865c8d2583fec6d112af5 | [
"MIT"
] | 1 | 2016-03-20T05:30:28.000Z | 2016-03-20T05:30:28.000Z | web/gettext.ex | code-mancers/exray | f37a089c98ae3117063865c8d2583fec6d112af5 | [
"MIT"
] | null | null | null | web/gettext.ex | code-mancers/exray | f37a089c98ae3117063865c8d2583fec6d112af5 | [
"MIT"
] | null | null | null | defmodule Exray.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](http://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import Exray.Gettext
# Simple translation
gettext "Here is the string to translate"
# Plural translation
ngettext "Here is the string to translate",
"Here are the strings to translate",
3
# Domain-based translation
dgettext "errors", "Here is the error message to translate"
See the [Gettext Docs](http://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :exray
end
| 27.76 | 71 | 0.674352 |
73089983451f8b3dad1f43440dfeabc17a3c6985 | 2,321 | ex | Elixir | lib/cadastre/country.ex | Recruitee/ambassador | 309e70f679b83609ba17fee5f43e8e69e0f66fa3 | [
"MIT"
] | 1 | 2020-10-29T11:28:32.000Z | 2020-10-29T11:28:32.000Z | lib/cadastre/country.ex | Recruitee/ambassador | 309e70f679b83609ba17fee5f43e8e69e0f66fa3 | [
"MIT"
] | 2 | 2020-10-23T22:27:34.000Z | 2021-12-29T06:00:52.000Z | lib/cadastre/country.ex | Recruitee/ambassador | 309e70f679b83609ba17fee5f43e8e69e0f66fa3 | [
"MIT"
] | 2 | 2020-10-15T17:48:11.000Z | 2021-08-04T12:59:38.000Z | defmodule Cadastre.Country do
@moduledoc """
Country implementation.
"""
alias Cadastre.Language
@external_resource Application.app_dir(:cadastre, "priv/data/countries.etf")
@enforce_keys [:id]
defstruct [:id]
@type id :: <<_::16>>
@type t :: %__MODULE__{id: id}
external_data = @external_resource |> File.read!() |> :erlang.binary_to_term()
ids = external_data |> Enum.map(&elem(&1, 0))
@doc """
Returns all ids (ISO_3166-1).
## Examples
iex> Cadastre.Country.ids() |> Enum.take(10)
["AD", "AE", "AF", "AG", "AI", "AL", "AM", "AO", "AQ", "AR"]
"""
@spec ids :: [id]
def ids, do: unquote(ids)
@doc """
Returns all countries.
## Examples
iex> Cadastre.Country.all() |> Enum.take(3)
[
%Cadastre.Country{id: "AD"},
%Cadastre.Country{id: "AE"},
%Cadastre.Country{id: "AF"}
]
iex> Cadastre.Country.all() |> Enum.count()
249
"""
@spec all :: [t]
def all, do: ids() |> Enum.map(&%__MODULE__{id: &1})
@doc """
Returns `%Cadastre.Country{}` for valid `id` or `nil` for invalid `id`.
## Examples
iex> Cadastre.Country.new("NL")
%Cadastre.Country{id: "NL"}
iex> Cadastre.Country.new("nl")
%Cadastre.Country{id: "NL"}
iex> Cadastre.Country.new("xx")
nil
"""
@spec new(id | any) :: t | nil
def new(id) when id in unquote(ids), do: %__MODULE__{id: id}
def new(str) when is_binary(str) do
id = str |> String.upcase()
case id in ids() do
true -> %__MODULE__{id: id}
_ -> nil
end
end
def new(_), do: nil
@doc """
Returns country name translation for `locale`
## Examples
iex> Cadastre.Country.new("NL") |> Cadastre.Country.name("be")
"Нідэрланды"
iex> Cadastre.Country.new("NL") |> Cadastre.Country.name(":)")
"Netherlands"
iex> Cadastre.Country.name("something wrong", "be")
nil
"""
@spec name(t, Language.id()) :: String.t()
def name(country, locale)
external_data
|> Enum.each(fn {id, translations} ->
translations = translations |> Map.new()
en = translations |> Map.fetch!("en")
def name(%__MODULE__{id: unquote(id)}, locale) do
unquote(Macro.escape(translations)) |> Map.get(locale, unquote(en))
end
end)
def name(_, _), do: nil
end
| 21.490741 | 80 | 0.581215 |
7308abe917a1f829c84deafce0f90a9ffbb74871 | 2,500 | ex | Elixir | lib/sonda/sink/memory.ex | Fire-Dragon-DoL/sonda | 6565ff3a0a8303a120a3e6637fa2616614837cf3 | [
"MIT"
] | null | null | null | lib/sonda/sink/memory.ex | Fire-Dragon-DoL/sonda | 6565ff3a0a8303a120a3e6637fa2616614837cf3 | [
"MIT"
] | null | null | null | lib/sonda/sink/memory.ex | Fire-Dragon-DoL/sonda | 6565ff3a0a8303a120a3e6637fa2616614837cf3 | [
"MIT"
] | null | null | null | defmodule Sonda.Sink.Memory do
defmodule Defaults do
def signals, do: :any
end
defstruct signals: Defaults.signals(), records: []
@type message :: {Sonda.Sink.signal(), Sonda.Sink.timestamp(), any()}
@type accepted_signals :: :any | [Sonda.Sink.signal()]
@type config_opts :: [
{:signals, accepted_signals()}
]
@type matcher :: (message() -> boolean())
@type t :: %__MODULE__{
signals: accepted_signals(),
records: [message()]
}
defimpl Sonda.Sink do
def record(%module{} = sink, signal, timestamp, data) do
module.record(sink, signal, timestamp, data)
end
end
@spec configure() :: t()
def configure()
@spec configure(opts :: config_opts()) :: t()
def configure(opts \\ []) do
struct(__MODULE__, opts)
end
@spec record(
sink :: t(),
signal :: Sonda.Sink.signal(),
timestamp :: Sonda.Sink.timestamp(),
data :: any()
) :: t()
def record(sink, signal, timestamp, data) do
case record_signal?(sink, signal) do
false ->
sink
true ->
records = sink.records
new_record = {signal, timestamp, data}
records = [new_record | records]
%{sink | records: records}
end
end
@spec recorded?(sink :: t(), match :: matcher()) :: boolean()
def recorded?(sink, match) do
Enum.find_value(sink.records, false, fn record ->
match.(record)
end)
end
@spec records(sink :: t()) :: [message()]
def records(sink) do
Enum.reverse(sink.records)
end
@spec records(sink :: t(), match :: matcher()) :: [message()]
def records(sink, match) do
sink.records
|> Enum.filter(match)
|> Enum.reverse()
end
@spec record_signal?(sink :: t(), signal :: Sonda.Sink.signal()) :: boolean()
def record_signal?(sink, signal)
def record_signal?(%{signals: :any}, _signal), do: true
def record_signal?(%{signals: signals}, signal), do: signal in signals
@spec one_record(sink :: t(), match :: matcher()) ::
{:ok, message()} | {:error, :none} | {:error, :multiple}
def one_record(sink, match) do
records = records(sink, match)
case records do
[] -> {:error, :none}
[record] -> {:ok, record}
_ -> {:error, :multiple}
end
end
@spec recorded_once?(sink :: t(), match :: matcher()) :: boolean()
def recorded_once?(sink, match) do
case one_record(sink, match) do
{:ok, _} -> true
_ -> false
end
end
end
| 26.041667 | 79 | 0.5836 |
7308c1b2a702ab37760a273f05d8e241b7fa016f | 1,087 | exs | Elixir | firmware/rel/config.exs | iboard/pocketweb | e1d370e5f446fd4f9914966364af662194c43e04 | [
"MIT"
] | 2 | 2018-09-05T00:13:39.000Z | 2019-07-14T20:03:59.000Z | firmware/rel/config.exs | iboard/pocketweb | e1d370e5f446fd4f9914966364af662194c43e04 | [
"MIT"
] | null | null | null | firmware/rel/config.exs | iboard/pocketweb | e1d370e5f446fd4f9914966364af662194c43e04 | [
"MIT"
] | 3 | 2018-08-27T15:40:57.000Z | 2021-01-15T12:21:13.000Z | use Mix.Releases.Config,
# This sets the default release built by `mix release`
default_release: :default,
# This sets the default environment used by `mix release`
default_environment: :dev
# For a full list of config options for both releases
# and environments, visit https://hexdocs.pm/distillery/configuration.html
# You may define one or more environments in this file,
# an environment's settings will override those of a release
# when building in that environment, this combination of release
# and environment configuration is called a profile
environment :dev do
set cookie: :"_BiuOXP<(9md}z{|zB&un$o9xZZ&tMYi4Bd2FGESv)e{LRYCXoSXy|x@Rt64}g52"
end
environment :prod do
set cookie: :"_BiuOXP<(9md}z{|zB&un$o9xZZ&tMYi4Bd2FGESv)e{LRYCXoSXy|x@Rt64}g52"
end
# You may define one or more releases in this file.
# If you have not set a default release, or selected one
# when running `mix release`, the first release in the file
# will be used by default
release :firmware do
set version: current_version(:firmware)
plugin Shoehorn
plugin Nerves
end
| 31.057143 | 81 | 0.75989 |
7308da8a02267cd5f98ad7ac8271960f08db7504 | 3,070 | ex | Elixir | lib/web/views/reports_view.ex | smartlogic/Challenge_gov | b4203d1fcfb742dd17ecfadb9e9c56ad836d4254 | [
"CC0-1.0"
] | 9 | 2020-02-26T20:24:38.000Z | 2022-03-22T21:14:52.000Z | lib/web/views/reports_view.ex | smartlogic/Challenge_gov | b4203d1fcfb742dd17ecfadb9e9c56ad836d4254 | [
"CC0-1.0"
] | 15 | 2020-04-22T19:33:24.000Z | 2022-03-26T15:11:17.000Z | lib/web/views/reports_view.ex | smartlogic/Challenge_gov | b4203d1fcfb742dd17ecfadb9e9c56ad836d4254 | [
"CC0-1.0"
] | 4 | 2020-04-27T22:58:57.000Z | 2022-01-14T13:42:09.000Z | NimbleCSV.define(ChallengeGov.Reports.CSV, separator: ",", escape: "\"")
defmodule Web.ReportsView do
use Web, :view
alias ChallengeGov.Reports.CSV
alias Web.FormView
alias Web.SharedView
def render_security_log("security-log-header.csv", _assigns) do
headers = [
"ID",
"Action",
"Details",
"Originator ID",
"Originator Type",
"Originator Identifier",
"Originator IP Address",
"Target ID",
"Target Type",
"Target Identifier",
"Logged At"
]
CSV.dump_to_iodata([headers])
end
def render_certification_log("certification-log-header.csv", _assigns) do
headers = [
"ID",
"Approver ID",
"Approver Role",
"Approver Identifier",
"Approver IP Address",
"User ID",
"User Role",
"User Identifier",
"User IP Address",
"Requested At",
"Certified At",
"Expires At",
"Denied At",
"Inserted At",
"Updated At"
]
CSV.dump_to_iodata([headers])
end
def render(file_name, %{record: record}) do
csv = csv_schema_by_report(file_name, record)
CSV.dump_to_iodata([csv])
end
defp csv_schema_by_report(file_name, record) do
case file_name do
"security-log-content.csv" ->
[
record.id,
record.action,
parse_details(record.details),
record.originator_id,
record.originator_role,
record.originator_identifier,
record.originator_remote_ip,
record.target_id,
record.target_type,
record.target_identifier,
record.logged_at
]
"certification-log-content.csv" ->
[
record.id,
record.approver_id,
record.approver_role,
record.approver_identifier,
record.approver_remote_ip,
record.user_id,
record.user_role,
record.user_identifier,
record.user_remote_ip,
record.requested_at,
record.certified_at,
record.expires_at,
record.denied_at,
record.inserted_at,
record.updated_at
]
end
end
defp parse_details(record) do
if record do
record
|> Enum.map(fn x ->
format_to_readable(x)
end)
|> Enum.join(", ")
end
end
defp format_to_readable(x) do
case elem(x, 0) == "duration" do
true ->
["#{elem(x, 0)}: #{convert_to_iostime(elem(x, 1))}"]
false ->
["#{elem(x, 0)}: #{elem(x, 1)}"]
end
end
defp convert_to_iostime(duration) do
{hours, minutes, seconds, _microseconds} =
duration
|> Timex.Duration.from_seconds()
|> Timex.Duration.to_clock()
"#{ensure_double_digits(hours)}:#{ensure_double_digits(minutes)}:#{
ensure_double_digits(seconds)
}"
end
def ensure_double_digits(elem) do
result =
elem
|> Integer.digits()
|> length
case result == 1 do
true ->
"0#{elem}"
false ->
elem
end
end
end
| 21.928571 | 75 | 0.576547 |
730911b2f4a1bcddeff711c05139742651624a94 | 563 | ex | Elixir | lib/central_web/channels/notification_channel.ex | badosu/teiserver | 19b623aeb7c2ab28756405f7486e92b714777c54 | [
"MIT"
] | 4 | 2021-07-29T16:23:20.000Z | 2022-02-23T05:34:36.000Z | lib/central_web/channels/notification_channel.ex | badosu/teiserver | 19b623aeb7c2ab28756405f7486e92b714777c54 | [
"MIT"
] | 14 | 2021-08-01T02:36:14.000Z | 2022-01-30T21:15:03.000Z | lib/central_web/channels/notification_channel.ex | badosu/teiserver | 19b623aeb7c2ab28756405f7486e92b714777c54 | [
"MIT"
] | 7 | 2021-05-13T12:55:28.000Z | 2022-01-14T06:39:06.000Z | defmodule CentralWeb.Communication.NotificationChannel do
@moduledoc false
use Phoenix.Channel
def join("communication_notification:" <> user_id, _params, socket) do
if socket.assigns[:current_user].id == user_id |> String.to_integer() do
{:ok, socket}
else
{:error, "Permission denied"}
end
end
# Used to follow when an update to a piece of content happens
# no sensetive data is sent here so we don't need to worry about
# authing
def join("communication_reloads:" <> _, _params, socket) do
{:ok, socket}
end
end
| 28.15 | 76 | 0.699822 |
730924c64c4b69b204f0968abbe160e96abf967e | 247 | ex | Elixir | lib/bitty.ex | doomspork/tiny | fab5bb717fb7247b7a9daa1ed0058ad052005a5f | [
"Apache-2.0"
] | 1 | 2021-06-17T21:49:04.000Z | 2021-06-17T21:49:04.000Z | lib/bitty.ex | doomspork/tiny | fab5bb717fb7247b7a9daa1ed0058ad052005a5f | [
"Apache-2.0"
] | null | null | null | lib/bitty.ex | doomspork/tiny | fab5bb717fb7247b7a9daa1ed0058ad052005a5f | [
"Apache-2.0"
] | null | null | null | defmodule Bitty do
@moduledoc """
Bitty keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
end
| 24.7 | 66 | 0.748988 |
73093624c1661ef28f79bb66124b282ee6796703 | 128 | exs | Elixir | examples/foo_umbrella/apps/foo/test/foo/my_area/my_area_test.exs | katafrakt/modular | d8bd94b91e20a51b13b28b71d1602e3877518688 | [
"MIT"
] | 7 | 2019-11-21T19:51:50.000Z | 2021-12-23T02:42:14.000Z | examples/foo_umbrella/apps/foo/test/foo/my_area/my_area_test.exs | katafrakt/modular | d8bd94b91e20a51b13b28b71d1602e3877518688 | [
"MIT"
] | 1 | 2021-03-08T00:39:55.000Z | 2021-03-08T00:39:55.000Z | examples/foo_umbrella/apps/foo/test/foo/my_area/my_area_test.exs | katafrakt/modular | d8bd94b91e20a51b13b28b71d1602e3877518688 | [
"MIT"
] | 2 | 2020-09-17T17:46:26.000Z | 2021-03-05T12:28:47.000Z | defmodule Foo.MyAreaTest do
use Foo.TestCase
test "a/0" do
assert Foo.MyArea.Impl.a() == :hello_from_my_area
end
end
| 16 | 53 | 0.703125 |
730995624da4a746da969eff47bda56326c8822b | 747 | ex | Elixir | lib/crawly/pipelines/experimental/send_to_ui.ex | aymanosman/crawly | 9b5dbcd1328a0d80380bbc8b8c869ce252ac29f5 | [
"Apache-2.0"
] | null | null | null | lib/crawly/pipelines/experimental/send_to_ui.ex | aymanosman/crawly | 9b5dbcd1328a0d80380bbc8b8c869ce252ac29f5 | [
"Apache-2.0"
] | null | null | null | lib/crawly/pipelines/experimental/send_to_ui.ex | aymanosman/crawly | 9b5dbcd1328a0d80380bbc8b8c869ce252ac29f5 | [
"Apache-2.0"
] | null | null | null | defmodule Crawly.Pipelines.Experimental.SendToUI do
@moduledoc """
"""
@behaviour Crawly.Pipeline
require Logger
@impl Crawly.Pipeline
def run(item, state, opts \\ []) do
job_tag = Map.get(state, :job_tag, UUID.uuid1())
spider_name = state.spider_name |> Atom.to_string()
case Keyword.get(opts, :ui_node) do
nil ->
Logger.debug(
"No ui node is set. It's required to set a UI node to use " <>
"this pipeline. Ignoring the pipeline."
)
ui_node ->
:rpc.cast(ui_node, CrawlyUI, :store_item, [
spider_name,
item,
job_tag,
Node.self() |> to_string()
])
end
{item, Map.put(state, :job_tag, job_tag)}
end
end
| 23.34375 | 72 | 0.579652 |
7309cc5e8bc968e1c8bfe318552ab7a768725bcc | 217 | ex | Elixir | test/support/room/error_room.ex | membraneframework/webrtc-server | 10603a3f85e2d197ce7cf10ccb79c5c71d57f346 | [
"Apache-2.0"
] | 10 | 2020-03-27T04:58:56.000Z | 2022-03-12T21:39:49.000Z | test/support/room/error_room.ex | KalvadTech/webrtc-server | a9fbf41f83a644702946541342411f19e2806447 | [
"Apache-2.0"
] | 3 | 2021-01-25T08:47:45.000Z | 2021-02-08T12:58:42.000Z | test/support/room/error_room.ex | KalvadTech/webrtc-server | a9fbf41f83a644702946541342411f19e2806447 | [
"Apache-2.0"
] | 1 | 2021-01-24T17:36:57.000Z | 2021-01-24T17:36:57.000Z | defmodule Membrane.WebRTC.Server.Support.ErrorRoom do
@moduledoc false
use Membrane.WebRTC.Server.Room
@impl true
def on_join(_auth_data, state) do
{{:error, :this_is_supposed_to_fail}, state}
end
end
| 19.727273 | 53 | 0.751152 |
7309ebd80fbf5f386fffd0a7b664c1406adb7960 | 3,111 | ex | Elixir | lib/central/helpers/structure_helper.ex | icexuick/teiserver | 22f2e255e7e21f977e6b262acf439803626a506c | [
"MIT"
] | 6 | 2021-02-08T10:42:53.000Z | 2021-04-25T12:12:03.000Z | lib/central/helpers/structure_helper.ex | icexuick/teiserver | 22f2e255e7e21f977e6b262acf439803626a506c | [
"MIT"
] | 14 | 2021-08-01T02:36:14.000Z | 2022-01-30T21:15:03.000Z | lib/central/helpers/structure_helper.ex | icexuick/teiserver | 22f2e255e7e21f977e6b262acf439803626a506c | [
"MIT"
] | 7 | 2021-05-13T12:55:28.000Z | 2022-01-14T06:39:06.000Z | defmodule Central.Helpers.StructureHelper do
@moduledoc """
A module to make import/export of JSON objects easier. Currently only tested with a single parent object and multiple sets of child objects.
Designed to not take the IDs with it as they are liable to change based on the database they go into.
"""
alias Central.Repo
import Ecto.Query, warn: false
@skip_export_fields [:__meta__, :inserted_at, :updated_at]
@skip_import_fields ~w(id)
defp query_obj(module, id) do
query =
from objects in module,
where: objects.id == ^id
Repo.one!(query)
end
defp cast_many(object, field, parent_module) do
association = parent_module.__schema__(:association, field)
object_module = association.queryable
case association.relationship do
:parent ->
:skip
:child ->
Repo.preload(object, field)
|> Map.get(field)
|> Enum.map(fn item -> cast_one(item, object_module) end)
end
end
defp cast_one(object, module) do
skip_fields =
if Kernel.function_exported?(module, :structure_export_skips, 0) do
module.structure_export_skips()
else
[]
end
object
|> Map.from_struct()
|> Enum.filter(fn {k, _} ->
not Enum.member?(@skip_export_fields, k) and not Enum.member?(skip_fields, k)
end)
|> Enum.map(fn {k, v} ->
cond do
module.__schema__(:field_source, k) -> {k, v}
module.__schema__(:association, k) -> {k, cast_many(object, k, module)}
end
end)
|> Enum.filter(fn {_, v} -> v != :skip end)
|> Map.new()
end
def export(module, id) do
query_obj(module, id)
|> cast_one(module)
end
defp import_assoc(parent_module, field, data, parent_id) when is_list(data) do
field = String.to_existing_atom(field)
assoc = parent_module.__schema__(:association, field)
data
|> Enum.map(fn item_params ->
import_assoc(assoc, item_params, parent_id)
end)
end
defp import_assoc(assoc, params, parent_id) when is_map(params) do
key = assoc.related_key |> to_string
params =
Map.put(params, key, parent_id)
|> Enum.filter(fn {k, _} -> not Enum.member?(@skip_import_fields, k) end)
|> Map.new()
module = assoc.queryable
{:ok, _new_object} =
module.changeset(module.__struct__, params)
|> Repo.insert()
end
# Given the root module and the data, this should create everything you need
def import(module, data) do
assocs =
module.__schema__(:associations)
|> Enum.map(&to_string/1)
# First, create and insert the core object
core_params =
data
|> Enum.filter(fn {k, _} ->
not Enum.member?(assocs, k) and not Enum.member?(@skip_import_fields, k)
end)
|> Map.new()
{:ok, core_object} =
module.changeset(module.__struct__, core_params)
|> Repo.insert()
# Now, lets add the assocs
data
|> Enum.filter(fn {k, _} -> Enum.member?(assocs, k) end)
|> Enum.each(fn {k, v} -> import_assoc(module, k, v, core_object.id) end)
core_object
end
end
| 27.289474 | 142 | 0.64063 |
7309f0ef59732428f3c7a51d689527dd70fc1f49 | 675 | exs | Elixir | Elixir/mix.exs | shunjilin/DataStructuresAndAlgorithms | 093d0a82b82e65986e9f45d90cbf3dd2808aedce | [
"MIT"
] | 1 | 2021-12-14T09:39:20.000Z | 2021-12-14T09:39:20.000Z | Elixir/mix.exs | shunjilin/DataStructuresAndAlgorithms | 093d0a82b82e65986e9f45d90cbf3dd2808aedce | [
"MIT"
] | 4 | 2019-08-08T15:36:10.000Z | 2020-10-06T16:22:56.000Z | Elixir/mix.exs | shunjilin/DataStructuresAndAlgorithms | 093d0a82b82e65986e9f45d90cbf3dd2808aedce | [
"MIT"
] | null | null | null | defmodule DataStructuresAndAlgorithms.MixProject do
use Mix.Project
def project do
[
app: :elixir,
version: "0.1.0",
elixir: "~> 1.9",
start_permanent: Mix.env() == :prod,
deps: deps(),
test_paths: ["lib"]
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:dialyxir, "~> 1.0", only: [:dev], runtime: false}
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
]
end
end
| 21.774194 | 87 | 0.577778 |
730a077b95572da28bc5834c9834eaebc11cf382 | 1,535 | ex | Elixir | clients/qpx_express/lib/google_api/qpx_express/v1/model/tax_data.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/qpx_express/lib/google_api/qpx_express/v1/model/tax_data.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/qpx_express/lib/google_api/qpx_express/v1/model/tax_data.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.QPXExpress.V1.Model.TaxData do
@moduledoc """
Tax data.
## Attributes
- id (String): An identifier uniquely identifying a tax in a response. Defaults to: `null`.
- kind (String): Identifies this as a tax data object, representing some tax. Value: the fixed string qpxexpress#taxData. Defaults to: `null`.
- name (String): The name of a tax. Defaults to: `null`.
"""
defstruct [
:"id",
:"kind",
:"name"
]
end
defimpl Poison.Decoder, for: GoogleApi.QPXExpress.V1.Model.TaxData do
def decode(value, _options) do
value
end
end
defimpl Poison.Encoder, for: GoogleApi.QPXExpress.V1.Model.TaxData do
def encode(value, options) do
GoogleApi.QPXExpress.V1.Deserializer.serialize_non_nil(value, options)
end
end
| 30.7 | 144 | 0.734853 |
730a080a880e8b062c1f5eff44e8036741e7acc0 | 165 | exs | Elixir | braccino_firmware/test/braccino_firmware_test.exs | darcros/braccino | 33f4d945daf8eac36e4e88ef412dd53cb1389376 | [
"MIT"
] | null | null | null | braccino_firmware/test/braccino_firmware_test.exs | darcros/braccino | 33f4d945daf8eac36e4e88ef412dd53cb1389376 | [
"MIT"
] | null | null | null | braccino_firmware/test/braccino_firmware_test.exs | darcros/braccino | 33f4d945daf8eac36e4e88ef412dd53cb1389376 | [
"MIT"
] | null | null | null | defmodule BraccinoFirmwareTest do
use ExUnit.Case
doctest BraccinoFirmware
test "greets the world" do
assert BraccinoFirmware.hello() == :world
end
end
| 18.333333 | 45 | 0.757576 |
730a0ede1f1f3368ad96012a98644c65fd60199c | 482 | exs | Elixir | lesson_09/demo/4_phx/a_webpack_demo_umbrella/config/test.exs | martijnmeeldijk/ip_major | 867f09975aa8db0b308081216ace639c5677446b | [
"BSD-3-Clause"
] | 1 | 2021-09-22T09:56:35.000Z | 2021-09-22T09:56:35.000Z | lesson_09/demo/4_phx/a_webpack_demo_umbrella/config/test.exs | martijnmeeldijk/ip_major | 867f09975aa8db0b308081216ace639c5677446b | [
"BSD-3-Clause"
] | 7 | 2020-03-14T19:30:29.000Z | 2022-02-27T01:20:40.000Z | lesson_09/demo/4_phx/a_webpack_demo_umbrella/config/test.exs | martijnmeeldijk/ip_major | 867f09975aa8db0b308081216ace639c5677446b | [
"BSD-3-Clause"
] | 11 | 2020-02-13T14:52:45.000Z | 2020-08-03T12:18:56.000Z | use Mix.Config
# Configure your database
config :a_webpack_demo, AWebpackDemo.Repo,
username: "root",
password: "",
database: "a_webpack_demo_test",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :a_webpack_demo_web, AWebpackDemoWeb.Endpoint,
http: [port: 4002],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
| 25.368421 | 56 | 0.744813 |
730a1e1104d5f8043de315c701859125daf92a41 | 5,961 | ex | Elixir | lib/oban/telemetry.ex | bglusman/oban | 980b17e1235979e261c6b70723d45c09d3098e9d | [
"Apache-2.0"
] | null | null | null | lib/oban/telemetry.ex | bglusman/oban | 980b17e1235979e261c6b70723d45c09d3098e9d | [
"Apache-2.0"
] | null | null | null | lib/oban/telemetry.ex | bglusman/oban | 980b17e1235979e261c6b70723d45c09d3098e9d | [
"Apache-2.0"
] | null | null | null | defmodule Oban.Telemetry do
@moduledoc """
Telemetry integration for event metrics, logging and error reporting.
### Job Events
Oban emits the following telemetry events for each job:
* `[:oban, :job, :start]` — at the point a job is fetched from the database and will execute
* `[:oban, :job, :stop]` — after a job succeeds and the success is recorded in the database
* `[:oban, :job, :exeception]` — after a job fails and the failure is recorded in the database
All job events share the same details about the job that was executed. In addition, failed jobs
provide the error type, the error itself, and the stacktrace. The following chart shows which
metadata you can expect for each event:
| event | measures | metadata |
| ------------ | ----------- | ---------------------------------------------------------------------------------- |
| `:start` | `:start_time` | `:id, :args, :queue, :worker, :attempt, :max_attempts` |
| `:stop` | `:duration` | `:id, :args, :queue, :worker, :attempt, :max_attempts` |
| `:exception` | `:duration` | `:id, :args, :queue, :worker, :attempt, :max_attempts, :kind, :error, :stacktrace` |
For `:exception` events the metadata includes details about what caused the failure. The `:kind`
value is determined by how an error occurred. Here are the possible kinds:
* `:error` — from an `{:error, error}` return value. Some Erlang functions may also throw an
`:error` tuple, which will be reported as `:error`.
* `:exit` — from a caught process exit
* `:throw` — from a caught value, this doesn't necessarily mean that an error occurred and the
error value is unpredictable
### Circuit Events
All processes that interact with the database have circuit breakers to prevent errors from
crashing the entire supervision tree. Processes emit a `[:oban, :trip_circuit]` event when a
circuit is tripped and `[:oban, :open_circuit]` when the breaker is subsequently opened again.
| event | metadata |
| -------------------------- | -------------------------------------- |
| `[:oban, :circuit, :trip]` | `:error, :message, :name, :stacktrace` |
| `[:open, :circuit, :open]` | `:name` |
Metadata
* `:error` — the error that tripped the circuit, see the error kinds breakdown above
* `:name` — the registered name of the process that tripped a circuit, i.e. `Oban.Notifier`
* `:message` — a formatted error message describing what went wrong
* `:stacktrace` — exception stacktrace, when available
## Default Logger
A default log handler that emits structured JSON is provided, see `attach_default_logger/0` for
usage. Otherwise, if you would prefer more control over logging or would like to instrument
events you can write your own handler.
## Examples
A handler that only logs a few details about failed jobs:
```elixir
defmodule MicroLogger do
require Logger
def handle_event([:oban, :job, :exception], %{duration: duration}, meta, nil) do
Logger.warn("[#\{meta.queue}] #\{meta.worker} failed in #\{duration}")
end
end
:telemetry.attach("oban-logger", [:oban, :exception], &MicroLogger.handle_event/4, nil)
```
Another great use of execution data is error reporting. Here is an example of integrating with
[Honeybadger][honey], but only reporting jobs that have failed 3 times or more:
```elixir
defmodule ErrorReporter do
def handle_event([:oban, :job, :exception], _, %{attempt: attempt} = meta, _) do
if attempt >= 3 do
context = Map.take(meta, [:id, :args, :queue, :worker])
Honeybadger.notify(meta.error, context, meta.stack)
end
end
end
:telemetry.attach("oban-errors", [:oban, :job, :exception], &ErrorReporter.handle_event/4, [])
```
[honey]: https://honeybadger.io
"""
@moduledoc since: "0.4.0"
require Logger
@doc """
Attaches a default structured JSON Telemetry handler for logging.
This function attaches a handler that outputs logs with the following fields:
* `args` — a map of the job's raw arguments
* `duration` — the job's runtime duration, in the native time unit
* `event` — either `:success` or `:failure` dependening on whether the job succeeded or errored
* `queue` — the job's queue
* `source` — always "oban"
* `start_time` — when the job started, in microseconds
* `worker` — the job's worker module
## Examples
Attach a logger at the default `:info` level:
:ok = Oban.Telemetry.attach_default_logger()
Attach a logger at the `:debug` level:
:ok = Oban.Telemetry.attach_default_logger(:debug)
"""
@doc since: "0.4.0"
@spec attach_default_logger() :: :ok | {:error, :already_exists}
def attach_default_logger(level \\ :info) do
events = [
[:oban, :job, :start],
[:oban, :job, :stop],
[:oban, :job, :exception],
[:oban, :circuit, :trip],
[:oban, :circuit, :open]
]
:telemetry.attach_many("oban-default-logger", events, &handle_event/4, level)
end
@doc false
@spec handle_event([atom()], map(), map(), Logger.level()) :: :ok
def handle_event([:oban, :job, event], measure, meta, level) do
select_meta = Map.take(meta, [:args, :worker, :queue])
message =
measure
|> Map.take([:duration, :start_time])
|> Map.merge(select_meta)
log_message(level, "job:#{event}", message)
end
def handle_event([:oban, :circuit, event], _measure, meta, level) do
log_message(level, "circuit:#{event}", Map.take(meta, [:error, :message, :name]))
end
defp log_message(level, event, message) do
Logger.log(level, fn ->
message
|> Map.put(:event, event)
|> Map.put(:source, "oban")
|> Jason.encode!()
end)
end
end
| 37.727848 | 119 | 0.622882 |
730a2420327fd7c30f1870054b704ae5723a1d4c | 7,014 | exs | Elixir | e2e/test/bors_tests.exs | Simspace/bors-n | a8e8358d3a5e1e5056745e1811eef35d770b13d1 | [
"Apache-2.0"
] | null | null | null | e2e/test/bors_tests.exs | Simspace/bors-n | a8e8358d3a5e1e5056745e1811eef35d770b13d1 | [
"Apache-2.0"
] | 56 | 2021-06-16T19:23:06.000Z | 2022-03-28T15:11:50.000Z | e2e/test/bors_tests.exs | Simspace/bors-n | a8e8358d3a5e1e5056745e1811eef35d770b13d1 | [
"Apache-2.0"
] | 5 | 2020-11-18T23:38:29.000Z | 2021-09-30T17:45:56.000Z | defmodule BorsTests do
use ExUnit.Case, async: false
@moduletag timeout: 10 * 60 * 1_000
@staging "staging"
@into "testing/into"
@base_into "testing/base-into"
@base_hook_into "testing/base-hook-into"
@base_squash_into "testing/base-squash-into"
@normal_pr_1 "testing/normal-pr-1"
@normal_pr_2 "testing/normal-pr-2"
@custom_merge_pr_1 "testing/custom-merge-pr-1"
@custom_merge_pr_2 "testing/custom-merge-pr-2"
@merge_conflict_pr_1 "testing/merge-conflict-pr-1"
@merge_conflict_pr_2 "testing/merge-conflict-pr-2"
@empty_pr "testing/empty-pr"
@ci_status "testing/bors-ng"
@poll_interval 1_000
setup do
repo = Confex.fetch_env!(:gh, :test_repo)
close_all_open_prs!(repo)
on_exit(fn -> close_all_open_prs!(repo) end)
# Ngrok has a limit of 20 requests per minute to the external URL.
# Each webhook counts as a request, so...
Process.sleep(60 * 1_000)
%{repo: repo}
end
test "can merge two normal PRs", %{repo: repo} do
%{commit: commit} = reset_repo_state!(repo, @base_into)
%{issue_number: pr1} = GitHub.create_pull_request!(repo, @into, @normal_pr_1)
%{issue_number: pr2} = GitHub.create_pull_request!(repo, @into, @normal_pr_2)
GitHub.comment_on_issue!(repo, pr1, "bors r+")
GitHub.comment_on_issue!(repo, pr2, "bors r+")
{:changed, _} = poll_branch(repo, @staging, commit)
GitHub.Checks.succeed!(repo, @staging, @ci_status)
{:changed, _} = poll_branch(repo, @into, commit, timeout: 30 * 1_000)
# To ensure that the PRs merged, it's enough to check that certain files exist.
{:ok, _} = GitHub.get_file!(repo, "normal-pr-1", @into)
{:ok, _} = GitHub.get_file!(repo, "normal-pr-2", @into)
end
test "can squash merge two normal PRs", %{repo: repo} do
%{commit: commit} = reset_repo_state!(repo, @base_squash_into)
%{issue_number: pr1} = GitHub.create_pull_request!(repo, @into, @normal_pr_1)
%{issue_number: pr2} = GitHub.create_pull_request!(repo, @into, @normal_pr_2)
GitHub.comment_on_issue!(repo, pr1, "bors r+")
GitHub.comment_on_issue!(repo, pr2, "bors r+")
{:changed, _} = poll_branch(repo, @staging, commit)
GitHub.Checks.succeed!(repo, @staging, @ci_status)
{:changed, _} = poll_branch(repo, @into, commit)
# To ensure that the PRs merged, it's enough to check that certain files exist.
{:ok, _} = GitHub.get_file!(repo, "normal-pr-1", @into)
{:ok, _} = GitHub.get_file!(repo, "normal-pr-2", @into)
end
test "can merge two PRs with a custom merge", %{repo: repo} do
%{commit: commit} = reset_repo_state!(repo, @base_into)
%{issue_number: pr1} = GitHub.create_pull_request!(repo, @into, @custom_merge_pr_1)
%{issue_number: pr2} = GitHub.create_pull_request!(repo, @into, @custom_merge_pr_2)
GitHub.comment_on_issue!(repo, pr1, "bors r+")
GitHub.comment_on_issue!(repo, pr2, "bors r+")
{:changed, _} = poll_branch(repo, @staging, commit)
GitHub.Checks.succeed!(repo, @staging, @ci_status)
{:changed, _} = poll_branch(repo, @into, commit, timeout: 30 * 1_000)
{:ok, contents} = GitHub.get_file!(repo, "file-with-custom-merge", @into)
assert String.match?(contents, ~r/custom-merge-pr-1/)
assert String.match?(contents, ~r/custom-merge-pr-2/)
end
test "can run hooks successfully on merge and see hook output", %{repo: repo} do
%{commit: commit} = reset_repo_state!(repo, @base_hook_into)
%{issue_number: pr} = GitHub.create_pull_request!(repo, @into, @empty_pr)
GitHub.comment_on_issue!(repo, pr, "bors r+")
{:changed, _} = poll_branch(repo, @staging, commit)
GitHub.Checks.succeed!(repo, @staging, @ci_status)
{:changed, _} = poll_branch(repo, @into, commit)
# Same here, the hooks just insert empty files with their names
{:ok, _} = GitHub.get_file!(repo, "before-merge", @into)
{:ok, _} = GitHub.get_file!(repo, "after-merge", @into)
end
test "can still correctly handle merge conflicts by splitting", %{repo: repo} do
%{commit: commit} = reset_repo_state!(repo, @base_into)
%{issue_number: pr1} = GitHub.create_pull_request!(repo, @into, @merge_conflict_pr_1)
%{issue_number: pr2} = GitHub.create_pull_request!(repo, @into, @merge_conflict_pr_2)
GitHub.comment_on_issue!(repo, pr1, "bors r+")
GitHub.comment_on_issue!(repo, pr2, "bors r+")
{:changed, staging_commit} = poll_branch(repo, @staging, commit, timeout: 10 * 60 * 1_000)
{:ok, contents} = GitHub.get_file!(repo, "merge-conflict-file", @staging)
# Check that the contents of staging have one of the PRs, but not both
# i.e. that the first batch containing both failed to merge and Bors
# automatically split it.
case {String.match?(contents, ~r/merge-conflict-pr-1/), String.match?(contents, ~r/merge-conflict-pr-2/)} do
# Sadly, a xor operator doesn't exist.
{true, false} -> assert true
{false, true} -> assert true
{_, _} -> assert false
end
Process.sleep(30 * 1_000) # More sleeps to ensure we don't hit ngrok rate limits
GitHub.Checks.fail!(repo, @staging, @ci_status)
{:changed, _} = poll_branch(repo, @staging, staging_commit)
Process.sleep(30 * 1_000)
GitHub.Checks.fail!(repo, @staging, @ci_status)
# Check that `into` hasn't changed
%{commit: ^commit} = GitHub.get_branch!(repo, @into)
end
@doc """
Block until the specified branch changes from the commit specified.
Kind of gross, but it's simpler than setting up webhooks for these tests.
"""
@spec poll_branch(String.t(), String.t(), String.t()) :: :changed | :timeout
def poll_branch(repo, branch, commit, options \\ []) do
%{timeout: timeout} = Enum.into(options, %{timeout: 5 * 60 * 1_000})
start = DateTime.utc_now()
iterate(:timeout, fn _ ->
Process.sleep(@poll_interval)
here = DateTime.utc_now()
if DateTime.diff(here, start, :millisecond) >= timeout do
:done
else
%{commit: current_head} = GitHub.get_branch!(repo, branch)
if current_head != commit do
{:done, {:changed, current_head}}
else
{:continue, :timeout}
end
end
end)
end
@spec iterate(any(), (any() -> {:continue, any()} | :done)) :: any()
defp iterate(init, f) do
case f.(init) do
{:continue, next} ->
iterate(next, f)
{:done, value} ->
value
:done ->
init
end
end
def close_all_open_prs!(repo) do
%{prs: prs} = GitHub.get_open_pull_requests!(repo)
prs |> Enum.reduce(:ok, fn %{issue_number: pr}, _ ->
GitHub.close_pull_request!(repo, pr)
:ok
end)
GitHub.Checks.fail!(repo, @staging, @ci_status)
end
@spec reset_repo_state!(String.t(), String.t()) :: %{commit: String.t()}
defp reset_repo_state!(repo, base_branch) do
%{commit: commit} = GitHub.get_branch!(repo, base_branch)
GitHub.force_push!(repo, @into, base_branch)
GitHub.force_push!(repo, @staging, base_branch)
%{commit: commit}
end
end
| 36.722513 | 112 | 0.661677 |
730a25e8422f525ec93c0eaa0d448808f40cccc3 | 119 | ex | Elixir | lib/oauth2_example/repo.ex | plank-tools/oauth2_example | 22e9ad9d41676bd161ee854511babf814bf701e7 | [
"MIT"
] | null | null | null | lib/oauth2_example/repo.ex | plank-tools/oauth2_example | 22e9ad9d41676bd161ee854511babf814bf701e7 | [
"MIT"
] | null | null | null | lib/oauth2_example/repo.ex | plank-tools/oauth2_example | 22e9ad9d41676bd161ee854511babf814bf701e7 | [
"MIT"
] | null | null | null | defmodule OAuth2Example.Repo do
use Ecto.Repo,
otp_app: :oauth2_example,
adapter: Ecto.Adapters.Postgres
end
| 19.833333 | 35 | 0.756303 |
730a3d041f471d75312658a7690cfe3a18aff442 | 1,091 | ex | Elixir | lib/weaver/builder_v2/thread.ex | secretworry/weaver | 93f30f2d651fa47da2589fa871b58a2097815e10 | [
"MIT"
] | 5 | 2016-11-07T09:44:12.000Z | 2021-12-21T14:41:35.000Z | lib/weaver/builder_v2/thread.ex | secretworry/weaver | 93f30f2d651fa47da2589fa871b58a2097815e10 | [
"MIT"
] | null | null | null | lib/weaver/builder_v2/thread.ex | secretworry/weaver | 93f30f2d651fa47da2589fa871b58a2097815e10 | [
"MIT"
] | null | null | null | defmodule Weaver.BuilderV2.Thread do
@type knot :: atom | binary | integer
@type t :: [knot]
def valid?(weft) do
do_valid?(weft)
end
def get(_simple_weft, nil) do
nil
end
def get(simple_weft, item) do
do_get(simple_weft, item)
end
defp do_get([knot_id|tail], item) when is_atom(knot_id) or is_binary(knot_id) do
case Map.get(item, knot_id) do
nil -> nil
child -> do_get(tail, child)
end
end
defp do_get([knot_id | tail], item) when is_integer(knot_id) and is_list(item) do
case Enum.at(item, knot_id) do
nil -> nil
child -> do_get(tail, child)
end
end
defp do_get([knot_id | tail], item) when is_integer(knot_id) and is_tuple(item) do
case elem(item, knot_id) do
nil -> nil
child -> do_get(tail, child)
end
end
defp do_get([], item) do
item
end
defp do_valid?([knot_id|tail]) when is_atom(knot_id) or is_binary(knot_id) or is_integer(knot_id) do
do_valid?(tail)
end
defp do_valid?([_knot_id|_tail]) do
false
end
defp do_valid?([]) do
true
end
end
| 19.482143 | 102 | 0.640697 |
730a3e51e5a8f29fdad1640a681e61cdc149b412 | 1,410 | ex | Elixir | lib/phoenix/router/console_formatter.ex | brightroll/phoenix | b93022086322bcc1d797214a28e0c9710f537c22 | [
"MIT"
] | null | null | null | lib/phoenix/router/console_formatter.ex | brightroll/phoenix | b93022086322bcc1d797214a28e0c9710f537c22 | [
"MIT"
] | null | null | null | lib/phoenix/router/console_formatter.ex | brightroll/phoenix | b93022086322bcc1d797214a28e0c9710f537c22 | [
"MIT"
] | null | null | null | defmodule Phoenix.Router.ConsoleFormatter do
alias Phoenix.Project
def default_router do
Project.module_root.Router
end
def format(router) do
routes = router.__routes__
Enum.join(format_routes(routes), "\n")
end
def format_routes(routes) do
column_widths = calculate_column_widths(routes)
for route <- routes, do: format_route(route, column_widths)
end
defp calculate_column_widths(routes) do
Enum.reduce routes, [0, 0, 0], fn(route, acc) ->
{method, path, _controller, _action, options} = route
[method_len, path_len, route_name_len] = acc
route_name = Keyword.get(options, :as, :"")
[max(method_len, String.length(to_string(method))),
max(path_len, String.length(path)),
max(route_name_len, String.length(to_string(route_name)))]
end
end
defp format_route(route, column_widths) do
{method, path, controller, action, options} = route
route_name = Keyword.get(options, :as, :"")
[method_len, path_len, route_name_len] = column_widths
controller_name = String.replace(to_string(controller),
to_string(Project.module_root.Controllers) <> ".",
"")
String.rjust(to_string(route_name), route_name_len) <> " " <>
String.ljust(String.upcase(to_string(method)), method_len) <> " " <>
String.ljust(path, path_len) <> " " <>
controller_name <> "#" <> to_string(action)
end
end
| 30 | 73 | 0.682979 |
730a43b1469f7be873741bbe766cf62a64b91b36 | 300 | ex | Elixir | lib/mix/tasks/changelog.ex | maxdrift/realleasy | f12121b67475a95d6862adb21153bb068fd0e49b | [
"MIT"
] | null | null | null | lib/mix/tasks/changelog.ex | maxdrift/realleasy | f12121b67475a95d6862adb21153bb068fd0e49b | [
"MIT"
] | null | null | null | lib/mix/tasks/changelog.ex | maxdrift/realleasy | f12121b67475a95d6862adb21153bb068fd0e49b | [
"MIT"
] | null | null | null | defmodule Mix.Tasks.Changelog do
@moduledoc """
Generate a log of changes before releasing a new version of an application.
"""
@shortdoc "Generates a changelog"
@requirements ["app.start"]
use Mix.Task
@impl Mix.Task
def run(argv), do: Realleasy.CLI.main(["changelog" | argv])
end
| 23.076923 | 77 | 0.703333 |
730a969bd1c08ba7d229ca1f8046633e969257d4 | 139 | ex | Elixir | lib/log/level_filter/none.ex | Carburetor/log | 120d535a090a41f52c141c5e08be294bb78e3a6c | [
"MIT"
] | null | null | null | lib/log/level_filter/none.ex | Carburetor/log | 120d535a090a41f52c141c5e08be294bb78e3a6c | [
"MIT"
] | null | null | null | lib/log/level_filter/none.ex | Carburetor/log | 120d535a090a41f52c141c5e08be294bb78e3a6c | [
"MIT"
] | null | null | null | defmodule Log.LevelFilter.None do
@moduledoc """
Identifier for None level filter
"""
defstruct []
@type t :: %__MODULE__{}
end
| 15.444444 | 34 | 0.669065 |
730aa12050c6a6b4ab35664fade814f0393bce57 | 889 | ex | Elixir | lib/panglao/tasks/remove.ex | ikeikeikeike/panglao | 6d3f6515d9f1ceb9a2e771ae2d54c222cedbf538 | [
"MIT"
] | 1 | 2017-02-18T21:20:17.000Z | 2017-02-18T21:20:17.000Z | lib/panglao/tasks/remove.ex | ikeikeikeike/panglao | 6d3f6515d9f1ceb9a2e771ae2d54c222cedbf538 | [
"MIT"
] | null | null | null | lib/panglao/tasks/remove.ex | ikeikeikeike/panglao | 6d3f6515d9f1ceb9a2e771ae2d54c222cedbf538 | [
"MIT"
] | null | null | null | defmodule Panglao.Tasks.Remove do
# import Panglao.Tasks.Base
import Ecto.Query
alias Panglao.{Repo, RepoReader, Object, Client.Cheapcdn}
def perform do
Object.with_removable
|> RepoReader.all
|> remove()
end
def perform(:disksize) do
Enum.each Cheapcdn.abledisk, fn {client, resp} ->
with %{body: %{"root" => false}} <- resp do
from(q in Object, where: q.stat != "REMOVED", order_by: :updated_at, limit: 200)
|> RepoReader.all
|> Enum.filter(&Cheapcdn.exists?(client, &1.url))
|> remove()
end
end
end
defp remove(objects) do
Enum.each objects, fn object ->
with src when is_binary(src)
and byte_size(src) > 0 <- object.src,
{:ok, _} <- Cheapcdn.removefile(object.url, src) do
nil
end
Repo.update(Object.remove_changeset(object))
end
end
end
| 24.694444 | 88 | 0.608549 |
730abd17f864c591cc24de75cdbc1fb074109d57 | 5,654 | ex | Elixir | lib/document.ex | arikai/toml-elixir | a0c3b83cdad0d5d0d0249a4f303d1e9449591472 | [
"Apache-2.0"
] | 176 | 2018-07-29T08:29:40.000Z | 2022-03-30T06:20:39.000Z | lib/document.ex | arikai/toml-elixir | a0c3b83cdad0d5d0d0249a4f303d1e9449591472 | [
"Apache-2.0"
] | 26 | 2018-08-02T19:00:56.000Z | 2022-03-18T21:29:05.000Z | lib/document.ex | arikai/toml-elixir | a0c3b83cdad0d5d0d0249a4f303d1e9449591472 | [
"Apache-2.0"
] | 22 | 2018-08-02T18:39:52.000Z | 2021-12-25T09:53:58.000Z | defmodule Toml.Document do
@moduledoc false
# Represents a TOML document, and handles conversion to a plain map
# See `Toml.Builder` for the actual logic for constructing the document.
defstruct [:keys, :comments, :open_table, :comment_stack, :keyfun, :transforms]
# A key is either binary or atom depending on the decoder option value
@type key :: binary | atom | term
# A value is the fully decoded value from the TOML
@type value ::
%{key => value}
| {:table_array, [%{key => value}]}
| number
| binary
| NaiveDateTime.t()
| DateTime.t()
| Date.t()
| Time.t()
| [value]
# A keypath is a list of keys, they are all of the same key type
@type keypath :: list(binary) | list(atom) | list(term)
@type t :: %__MODULE__{
keys: %{key => value},
comments: %{keypath => binary},
open_table: keypath | nil,
comment_stack: [binary],
keyfun: nil | (binary -> term | no_return),
transforms: [Toml.Transform.t()]
}
@doc """
Create a new empty TOML document
"""
@spec new(Toml.opts()) :: t
def new(opts) when is_list(opts) do
keyfun = to_key_fun(Keyword.get(opts, :keys, :strings))
transforms = Keyword.get(opts, :transforms, [])
%__MODULE__{
keys: %{},
comments: %{},
open_table: nil,
comment_stack: [],
keyfun: keyfun,
transforms: transforms
}
end
@doc """
Convert the given TOML document to a plain map.
During conversion to a plain map, keys are converted according
to the key type defined when the document was created.
In addition to converting keys, if transforms were defined, they are
applied to values depth-first, bottom-up. Transforms are first composed
into a single function, designed to be executed in the order they appear
in the list provided; if any transform returns an error, conversion is
stopped and an error is returned - otherwise, the value is passed from
transformer to transformer and the final result replaces the value in the
document.
"""
def to_map(%__MODULE__{keys: keys, keyfun: keyfun, transforms: ts}) do
transform =
case ts do
[] ->
nil
ts when is_list(ts) ->
Toml.Transform.compose(ts)
end
{:ok, to_map2(keys, keyfun, transform)}
catch
_, {:error, {:keys, {:non_existing_atom, _}}} = err ->
err
end
# Called when a table is being converted
defp to_map2(m, nil, nil) when is_map(m) do
for {k, v} <- m, into: %{}, do: {k, to_map3(k, v, nil, nil)}
end
defp to_map2(m, keyfun, nil) when is_map(m) and is_function(keyfun, 1) do
for {k, v} <- m, into: %{} do
k2 = keyfun.(k)
{k2, to_map3(k2, v, keyfun, nil)}
end
end
defp to_map2(m, nil, transform) when is_map(m) and is_function(transform, 2) do
for {k, v} <- m, into: %{} do
v2 = to_map3(k, v, nil, transform)
{k, v2}
end
end
defp to_map2(m, keyfun, transform)
when is_map(m) and is_function(keyfun, 1) and is_function(transform, 2) do
for {k, v} <- m, into: %{} do
k2 = keyfun.(k)
v2 = to_map3(k2, v, keyfun, transform)
{k2, v2}
end
end
# Called when a table value is being converted
defp to_map3(_key, %_{} = s, _keyfun, nil), do: s
defp to_map3(key, %_{} = s, _keyfun, transform), do: transform.(key, s)
defp to_map3(key, list, keyfun, nil) when is_list(list) do
for v <- list, do: to_map3(key, v, keyfun, nil)
end
defp to_map3(key, list, _keyfun, transform) when is_list(list) do
transform.(key, list)
end
defp to_map3(_key, {:table_array, list}, keyfun, nil) do
for v <- Enum.reverse(list) do
to_map2(v, keyfun, nil)
end
end
defp to_map3(key, {:table_array, list}, keyfun, transform) do
for v <- Enum.reverse(list) do
to_map2(v, keyfun, transform)
end
|> (&transform.(key, &1)).()
end
defp to_map3(_key, v, keyfun, nil) when is_map(v) do
to_map2(v, keyfun, nil)
end
defp to_map3(key, v, keyfun, transform) when is_map(v) and is_function(transform) do
transform.(key, to_map2(v, keyfun, transform))
end
defp to_map3(_key, v, _keyfun, nil), do: v
defp to_map3(key, v, _keyfun, transform), do: transform.(key, v)
# Convert the value of `:keys` to a key conversion function (if not already one)
@valid_keys_opts [:atoms, :atoms!, :strings, "(key :: String.t) -> term"]
defp to_key_fun(:atoms), do: &to_atom/1
defp to_key_fun(:atoms!), do: &to_existing_atom/1
defp to_key_fun(:strings), do: nil
defp to_key_fun(fun) when is_function(fun, 1), do: fun
defp to_key_fun(invalid), do: throw({:badarg, {:keys, invalid, @valid_keys_opts}})
# Convert the given key (as binary) to an atom
# Handle converting uppercase keys to module names rather than plain atoms
defp to_atom(<<c::utf8, _::binary>> = key) when c >= ?A and c <= ?Z do
Module.concat([key])
end
defp to_atom(key), do: String.to_atom(key)
# Convert the given key (as binary) to an existing atom
# Handle converting uppercase keys to module names rather than plain atoms
#
# NOTE: This throws an error if the atom does not exist, and is intended to
# be handled in the decoder
defp to_existing_atom(<<c::utf8, _::binary>> = key) when c >= ?A and c <= ?Z do
Module.concat([String.to_existing_atom(key)])
rescue
_ ->
throw({:error, {:keys, {:non_existing_atom, key}}})
end
defp to_existing_atom(key) do
String.to_existing_atom(key)
rescue
_ ->
throw({:error, {:keys, {:non_existing_atom, key}}})
end
end
| 30.896175 | 86 | 0.631588 |
730b14b62c7b2be6b66d52e427bfee8e43c9970c | 13,525 | ex | Elixir | lib/ex_dbus/service.ex | diodechain/ex_dbus | 14d0da9a1b9eb756b0435bec63ad470877d96abf | [
"Apache-2.0"
] | null | null | null | lib/ex_dbus/service.ex | diodechain/ex_dbus | 14d0da9a1b9eb756b0435bec63ad470877d96abf | [
"Apache-2.0"
] | null | null | null | lib/ex_dbus/service.ex | diodechain/ex_dbus | 14d0da9a1b9eb756b0435bec63ad470877d96abf | [
"Apache-2.0"
] | null | null | null | defmodule ExDBus.Service do
require Logger
use GenServer
alias ExDBus.Tree
alias ExDBus.Spec
alias ErlangDBus.Message
def start_link(opts, gen_opts \\ []) do
GenServer.start_link(
__MODULE__,
opts,
gen_opts
)
end
@impl true
def init([_ | _] = opts) do
service_name = Keyword.get(opts, :name, nil)
schema = Keyword.get(opts, :schema, nil)
server = Keyword.get(opts, :server, nil)
router = Keyword.get(opts, :router, nil)
# if service_name == nil do
# # raise "Service requires the :name option"
# end
if schema == nil do
raise "Service requires the :schema option"
end
root = get_root(schema)
state = %{
name: service_name,
root: root,
bus: nil,
service: nil,
server: server,
registered_objects: %{},
router: router,
error: nil
}
case connect_bus(self()) do
{:ok, bus} ->
state = Map.put(state, :bus, bus)
if service_name != nil do
case register_name(bus, service_name) do
:ok ->
{:ok, state}
error ->
ExDBus.Bus.close(bus)
{:stop, error}
end
else
{:ok, state}
end
{:ok, state}
error ->
Logger.debug("Failed to connect to D-Bus: #{inspect(error)}")
{:ok, Map.put(state, :error, error)}
end
end
def get_root(schema) when is_atom(schema) do
schema.__schema__()
end
def get_root({:object, _, _} = root) do
root
end
def get_root(_) do
raise "Invalid :schema provided. Must be a module or a :object tree struct"
end
def get_bus(service_pid) do
GenServer.call(service_pid, :get_bus)
end
@spec get_name(pid() | {:via, atom(), any()}) :: nil | String.t()
def get_name(service_pid) do
GenServer.call(service_pid, :get_name)
end
@spec get_dbus_pid(pid()) :: {:ok, String.t()} | {:error, any()}
def get_dbus_pid(service_pid) do
GenServer.call(service_pid, :get_dbus_pid)
end
def get_router(service_pid) do
GenServer.call(service_pid, :get_router)
end
def set_router(service_pid, router) do
GenServer.call(service_pid, {:set_router, router})
end
def register_object(service_pid, path) do
GenServer.call(service_pid, {:register_object, path, service_pid})
end
def register_object(service_pid, path, server_pid)
when is_pid(server_pid) or is_atom(server_pid) do
GenServer.call(service_pid, {:register_object, path, server_pid})
end
def unregister_object(service_pid, path) do
GenServer.call(service_pid, {:unregister_object, path})
end
def is_object_registered?(service_pid, path) do
GenServer.call(service_pid, {:is_object_registered, path})
end
def call_method(pid, bus, path, interface, method, args) do
GenServer.call(pid, {:call_method, bus, path, interface, method, args})
end
def send_signal(pid, path, interface, signal) do
GenServer.cast(pid, {:send_signal, path, interface, signal})
end
def send_signal(pid, path, interface, signal, {signature, types, args}) do
GenServer.cast(pid, {:send_signal, path, interface, signal, {signature, types, args}})
end
defp __register_object(%{registered_objects: objects} = state, path, pid) do
# Do register
objects = Map.put(objects, path, pid)
Map.put(state, :registered_objects, objects)
end
defp __unregister_object(%{registered_objects: objects} = state, path) do
# Do unregister
objects = Map.delete(objects, path)
Map.put(state, :registered_objects, objects)
end
defp __get_registered_object(%{registered_objects: objects}, path) do
case Map.get(objects, path, nil) do
nil ->
{:error, "Object not registered"}
pid ->
if Process.alive?(pid) do
{:ok, pid}
else
{:error, "Object service not alive"}
end
end
end
# handle_call
@impl true
def handle_call(:get_name, _from, %{name: name} = state) do
{:reply, name, state}
end
def handle_call(:get_bus, _from, %{bus: bus} = state) do
{:reply, bus, state}
end
def handle_call(:get_dbus_pid, _from, %{bus: bus} = state) when is_pid(bus) do
reply = ExDBus.Bus.get_dbus_pid(bus)
{:reply, reply, state}
end
def handle_call(:get_dbus_pid, _from, state) do
{:reply, {:error, "No DBUS bus service running"}, state}
end
def handle_call(:get_router, _from, %{router: router} = state) do
{:reply, {:ok, router}, state}
end
def handle_call({:set_router, router}, _from, state) do
{:reply, {:ok, router}, Map.put(state, :router, router)}
end
def handle_call({:get_object, path}, _from, %{root: root} = state) do
{:reply, Tree.find_path(root, path), state}
end
def handle_call({:get_interface, path, name}, _from, %{root: root} = state) do
with {:ok, object} <- Tree.find_path(root, path) do
{:reply, Tree.find_interface(object, name), state}
else
error -> {:reply, error, state}
end
end
def handle_call({:introspect, destination, path}, _from, %{bus: bus} = state) do
reply = GenServer.call(bus, {:introspect, destination, path})
{:reply, reply, state}
end
def handle_call({:find_object, destination, path}, _from, %{bus: bus} = state) do
reply = GenServer.call(bus, {:find_object, destination, path})
{:reply, reply, state}
end
def handle_call({:has_object, destination, path}, _from, %{bus: bus} = state) do
reply = GenServer.call(bus, {:has_object, destination, path})
{:reply, reply, state}
end
def handle_call({:has_interface, destination, path, interface}, _from, %{bus: bus} = state) do
reply = GenServer.call(bus, {:has_interface, destination, path, interface})
{:reply, reply, state}
end
def handle_call(
{:call_method, destination, path, interface, method, {signature, types, body}},
_from,
%{bus: bus} = state
) do
reply =
GenServer.call(bus, {
:call_method,
destination,
path,
interface,
method,
{signature, types, body}
})
{:reply, reply, state}
end
def handle_call(
{:register_object, path, server_pid},
_from,
state
) do
case handle_call({:is_object_registered, path}, nil, state) do
{:reply, false, state} ->
{:reply, {:ok, server_pid}, __register_object(state, path, server_pid)}
{:reply, true, state} ->
{:reply, {:error, "Object path already registered to a server"}, state}
end
end
def handle_call(
{:unregister_object, path},
_from,
%{registered_objects: objects} = state
) do
case handle_call({:is_object_registered, path}, nil, state) do
{:reply, true, state} ->
{:reply, {:ok, Map.get(objects, path)}, __unregister_object(state, path)}
{:reply, false, state} ->
{:reply, {:error, "Object path not registered"}, state}
end
end
def handle_call({:is_object_registered, path}, _, %{registered_objects: objects} = state) do
case Map.get(objects, path, nil) do
nil ->
{:reply, false, state}
pid ->
if Process.alive?(pid) do
{:reply, true, state}
else
{:reply, false, __unregister_object(state, path)}
end
end
end
def handle_call({:replace_interface, path, interface}, _from, %{root: root} = state) do
case Tree.replace_interface_at(root, path, interface) do
{:ok, root} -> {:reply, :ok, Map.put(state, :root, root)}
_ -> {:reply, :error, state}
end
end
def handle_call(request, from, state) do
IO.inspect(from, label: "[CALL] Message from")
IO.inspect(request, label: "[CALL] Message request")
{:noreply, state}
end
# handle_cast
@impl true
def handle_cast(
{:send_signal, path, interface, signal},
%{bus: bus} = state
) do
GenServer.cast(bus, {
:send_signal,
path,
interface,
signal
})
{:noreply, state}
end
def handle_cast(
{:send_signal, path, interface, signal, {signature, types, args}},
%{bus: bus} = state
) do
GenServer.cast(bus, {
:send_signal,
path,
interface,
signal,
{signature, types, args}
})
{:noreply, state}
end
def handle_cast(request, state) do
IO.inspect(request, label: "[CAST] Request")
{:noreply, state}
end
# handle_info
@impl true
def handle_info({:dbus_method_call, msg, conn} = instr, state) do
path = Message.get_field(:path, msg)
case __get_registered_object(state, path) do
{:ok, handle} ->
Process.send_after(handle, instr, 1, [])
_ ->
state = handle_dbus_method_call(msg, conn, state)
{:noreply, state}
end
end
def handle_info(message, state) do
IO.inspect(message, label: "----[INFO]-----")
{:noreply, state}
end
def handle_dbus_method_call(msg, conn, state) do
path = Message.get_field(:path, msg)
interface = Message.get_field(:interface, msg)
member = Message.get_field(:member, msg)
signature =
Message.find_field(:signature, msg)
|> case do
:undefined -> ""
s -> s
end
body =
case msg do
{:dbus_message, _, :undefined} -> nil
{:dbus_message, _, body} -> body
end
method = {path, interface, member, signature, body}
reply =
case exec_dbus_method_call(method, state) do
{:ok, types, values} ->
Message.return(msg, types, values)
{:error, name, message} ->
Message.error(msg, name, message)
end
case reply do
{:error, _, _} ->
:ok
_ ->
try do
:dbus_connection.cast(conn, reply)
rescue
_ -> nil
end
end
state
end
@spec exec_dbus_method_call(
{path :: String.t(), interface_name :: String.t(), method_name :: String.t(),
signature :: String.t(), body :: any},
state :: map()
) ::
Spec.dbus_reply()
def exec_dbus_method_call({path, interface_name, method_name, signature, args}, %{
root: root,
router: router
}) do
with {:object, {:ok, object}} <- {:object, Tree.find_path([root], path)},
{:interface, {:ok, interface}} <-
{:interface, Tree.find_interface(object, interface_name)},
{:method, {:ok, method}} <-
{:method, Tree.find_method(interface, method_name, signature)} do
case Tree.get_method_callback(method) do
{:ok, callback} ->
call_method_callback(
callback,
method_name,
args,
%{
node: object,
path: path,
interface: interface_name,
method: method_name,
signature: signature,
router: router
}
)
nil ->
route_method(router, path, interface_name, method_name, signature, args, %{
node: object,
router: router
})
_ ->
{:error, "org.freedesktop.DBus.Error.UnknownMethod",
"Method not found on given interface"}
end
else
{:object, _} ->
{:error, "org.freedesktop.DBus.Error.UnknownObject",
"No such object (#{path}) in the service"}
{:interface, _} ->
{:error, "org.freedesktop.DBus.Error.UnknownInterface",
"Interface (#{interface_name}) not found at given path"}
{:method, _} ->
{:error, "org.freedesktop.DBus.Error.UnknownMethod",
"Method (#{method_name}) not found on given interface"}
end
end
# @spec register_service(pid(), String.t()) :: {:ok, {pid(), pid()}} | :ignore | {:error, any}
# def register_service(service_pid, service_name) do
# with {:ok, bus} <- ExDBus.Bus.start_link(:session),
# :ok <- ExDBus.Bus.connect(bus, service_pid),
# :ok <- ExDBus.Bus.register_name(bus, service_name) do
# {:ok, {service_pid, bus}}
# end
# end
defp register_name(bus, service_name) do
ExDBus.Bus.register_name(bus, service_name)
end
defp connect_bus(service_pid) do
with {:ok, bus} <- ExDBus.Bus.start_link(:session),
:ok <- ExDBus.Bus.connect(bus, service_pid) do
{:ok, bus}
end
end
# defp route_method(nil, _path, _interface, _method, _args, _context) do
# {:error, "org.freedesktop.DBus.Error.UnknownMethod", "Method not found on given interface"}
# end
defp route_method(router, path, interface, method, signature, args, context) do
try do
ExDBus.Router.Protocol.method(router, path, interface, method, signature, args, context)
rescue
_e ->
{:error, "org.freedesktop.DBus.Error.UnknownMethod",
"Method not found on given interface"}
else
:skip ->
{:error, "org.freedesktop.DBus.Error.UnknownMethod",
"Method not found on given interface"}
result ->
result
end
end
def call_method_callback(callback, _method_name, args, context) when is_function(callback) do
try do
callback.(args, context)
rescue
e -> {:error, "org.freedesktop.DBus.Error.Failed", e.message}
else
return -> return
end
end
def call_method_callback({:call, pid, remote_method}, method_name, args, context) do
GenServer.call(pid, {remote_method, method_name, args, context})
end
end
| 26.835317 | 97 | 0.60525 |
730b2fcc9a1c98bcab81c5160d546b2e2b1928ec | 4,048 | ex | Elixir | lib/quantum/normalizer.ex | elixir-twister/quantum-elixir | c13cd11fbb1791e9161413810373914df0dbf11f | [
"Apache-2.0"
] | null | null | null | lib/quantum/normalizer.ex | elixir-twister/quantum-elixir | c13cd11fbb1791e9161413810373914df0dbf11f | [
"Apache-2.0"
] | null | null | null | lib/quantum/normalizer.ex | elixir-twister/quantum-elixir | c13cd11fbb1791e9161413810373914df0dbf11f | [
"Apache-2.0"
] | null | null | null | defmodule Quantum.Normalizer do
@moduledoc false
def normalize(j) do
nj = normalize_job(j)
{nj.name, nj}
end
def default_nodes do
Application.get_env(:quantum, :default_nodes, [node()])
end
# Creates named Quantum.Job
# Input:
# [
# newsletter: [
# schedule: "* * * * *",
# task: "MyModule.my_method",
# args: [1, 2, 3]
# ]
# ]
# Output:
# %Quantum.Job{
# name: :newsletter,
# schedule: "* * * * *",
# task: {"MyModule", "my_method"},
# args: [1, 2, 3]
# }
defp normalize_job({job_name, %Quantum.Job{} = job}) do
# Sets defauts for job and normalizes values
job |> Map.merge(job_opts(job_name, Map.to_list(job)))
end
defp normalize_job({job_name, opts}) when opts |> is_list or opts |> is_map do
%Quantum.Job{} |> Map.merge(job_opts(job_name, opts))
end
# Creates unnamed Quantum.Job
# Input:
# "* * * * * MyModule.my_method"
# OR
# "* * * * *": {MyModule, "my_method"}
# OR
# "* * * * *": &MyModule.my_method/0
# Output:
# %Quantum.Job{
# name: :__unnamed__,
# schedule: "* * * * *",
# task: {"MyModule", "my_method"} / &MyModule.my_method/0,
# args: []
# }
defp normalize_job(j) do
opts = case normalize_unnamed_job(j) do
{schedule, task, args} -> %{schedule: schedule, task: task, args: args}
{schedule, task} -> %{schedule: schedule, task: task}
end
normalize_job({nil, opts})
end
# Converts a job {expr, fun} into its canonical format.
# Cron expression is converted to lowercase string and
# day and month names are translated to their indexes.
defp normalize_unnamed_job({e, fun}) do
schedule = normalize_schedule(e)
case normalize_task(fun) do
{mod, fun, args} -> {schedule, {mod, fun}, args}
fun -> {schedule, fun}
end
end
# Converts a string representation of schedule+job into
# its canonical format.
# Input: "* * * * * MyApp.MyModule.my_method"
# Output: {"* * * * *", {"MyApp.MyModule", "my_method"}}
defp normalize_unnamed_job(e) do
[[_, schedule, task]] =
~r/^(\S+\s+\S+\s+\S+\s+\S+\s+\S+|@\w+)\s+(.*\.\w+)$/
|> Regex.scan(e)
{normalize_schedule(schedule), normalize_task(task)}
end
# Converts string representation of task into its
# canonical format
# Input: "MyApp.MyModule.my_method"
# Output: {"MyApp.MyModule", "my_method"}
defp normalize_task(t) when t |> is_binary do
[[_, mod, fun]] = Regex.scan(~r/^(.*)\.(\w+)$/, t)
{mod, fun}
end
defp normalize_task({mod, fun, args}), do: {mod, fun, args}
defp normalize_task({mod, fun}), do: {mod, fun}
defp normalize_task(fun), do: fun
defp normalize_schedule(e = %Crontab.CronExpression{}), do: e
defp normalize_schedule(e) when e |> is_atom, do: e |> Atom.to_string |> normalize_schedule
defp normalize_schedule("nil"), do: nil
defp normalize_schedule(e) when e |> is_binary, do: e |> String.downcase |> Crontab.CronExpression.Parser.parse!
# Extracts given option from options list of named task
defp extract(name, opts, d \\ nil)
defp extract(name, opts, d) when opts |> is_list, do: extract(name, opts |> Enum.into(%{}), d)
defp extract(:schedule, opts, d), do: opts |> Map.get(:schedule, d) |> normalize_schedule
defp extract(:task, opts, d), do: opts |> Map.get(:task, d) |> normalize_task
defp extract(:nodes, opts, d), do: opts |> Map.get(:nodes, d) || d
defp extract(name, opts, d), do: opts |> Map.get(name, d)
defp atomize(list) when is_list(list), do: Enum.map(list, &atomize/1)
defp atomize(string) when is_binary(string), do: String.to_atom(string)
defp atomize(atom) when is_atom(atom), do: atom
defp job_opts(job_name, opts) do
overlap = Application.get_env(:quantum, :default_overlap, true)
%{
name: job_name,
schedule: extract(:schedule, opts),
task: extract(:task, opts),
args: extract(:args, opts, []),
overlap: extract(:overlap, opts, overlap),
nodes: :nodes |> extract(opts, default_nodes()) |> atomize
}
end
end
| 32.384 | 114 | 0.623024 |
730b4147ffc4aee5c42b81a40bd8b0bcf56e4c33 | 2,409 | ex | Elixir | lib/hedwig_slack/connection.ex | kevinmcnamee/hedwig_slack | 0ca4a18e8c1cf7550b34f052b8f086a1d19ea706 | [
"MIT"
] | 45 | 2016-03-24T00:37:55.000Z | 2020-11-28T01:26:18.000Z | lib/hedwig_slack/connection.ex | kevinmcnamee/hedwig_slack | 0ca4a18e8c1cf7550b34f052b8f086a1d19ea706 | [
"MIT"
] | 16 | 2016-03-24T02:41:49.000Z | 2022-03-17T02:21:48.000Z | lib/hedwig_slack/connection.ex | kevinmcnamee/hedwig_slack | 0ca4a18e8c1cf7550b34f052b8f086a1d19ea706 | [
"MIT"
] | 37 | 2016-03-18T22:11:43.000Z | 2022-03-06T10:51:51.000Z | defmodule HedwigSlack.Connection do
@behaviour :websocket_client
alias HedwigSlack.{Connection, ConnectionSupervisor}
require Logger
@keepalive 30_000
defstruct next_id: 1, owner: nil, ref: nil
### PUBLIC API ###
def start(url) do
{:ok, pid} = Supervisor.start_child(ConnectionSupervisor, [url, self()])
ref = Process.monitor(pid)
{:ok, pid, ref}
end
def start_link(url, owner) do
:websocket_client.start_link(to_charlist(url), __MODULE__, owner)
end
def ws_send(pid, msg) do
send(pid, {:ws_send, msg})
:ok
end
def close(pid, timeout \\ 5000) do
send(pid, :close)
receive do
{:DOWN, _, :process, ^pid, _reason} ->
:ok
after timeout ->
true = Process.exit(pid, :kill)
:ok
end
end
### :websocket_client callbacks ###
def init(owner) do
ref = Process.monitor(owner)
{:reconnect, %Connection{owner: owner, ref: ref}}
end
def onconnect(_req, state) do
{:ok, state, @keepalive}
end
def ondisconnect(reason, state) do
Logger.warn "Disconnected: #{inspect reason}"
{:close, reason, state}
end
def websocket_handle({:text, data}, _ref, state) do
send(self(), {:handle_data, data})
{:ok, state}
end
def websocket_handle({:ping, data}, _req, state) do
{:reply, {:pong, data}, state}
end
def websocket_handle({:pong, _data}, _req, state) do
{:ok, state}
end
def websocket_handle(msg, _req, state) do
Logger.warn "Received unhandled websocket message: #{inspect msg}"
{:ok, state}
end
def websocket_info(:close, _req, state) do
{:close, <<>>, state}
end
def websocket_info({:DOWN, ref, :process, pid, _reason}, _req, %{owner: pid, ref: ref} = state) do
{:close, <<>>, state}
end
def websocket_info({:handle_data, data}, _req, %{owner: owner} = state) do
data = Jason.decode!(data)
Logger.debug "INCOMING > #{inspect data}"
send(owner, data)
{:ok, state}
end
def websocket_info({:ws_send, msg}, _from, %{next_id: id} = state) do
msg = Map.put(msg, :id, id)
Logger.debug "OUTGOING > #{inspect msg}"
msg = Jason.encode!(msg)
{:reply, {:text, msg}, %{state | next_id: id + 1}}
end
def websocket_info(msg, _req, state) do
Logger.warn "Received unhandled message: #{inspect msg}"
{:ok, state}
end
def websocket_terminate(_reason, _req, _state) do
:ok
end
end
| 22.514019 | 100 | 0.631797 |
730b43895eb6c6fbec142ad45ab782efe26cd168 | 417 | ex | Elixir | lib/remote_retro_web/plugs/forbid_non_striders.ex | octosteve/remote_retro | 3385b0db3c2daab934ce12a2f7642a5f10ac5147 | [
"MIT"
] | 523 | 2017-03-15T15:21:11.000Z | 2022-03-14T03:04:18.000Z | lib/remote_retro_web/plugs/forbid_non_striders.ex | octosteve/remote_retro | 3385b0db3c2daab934ce12a2f7642a5f10ac5147 | [
"MIT"
] | 524 | 2017-03-16T18:31:09.000Z | 2022-02-26T10:02:06.000Z | lib/remote_retro_web/plugs/forbid_non_striders.ex | octosteve/remote_retro | 3385b0db3c2daab934ce12a2f7642a5f10ac5147 | [
"MIT"
] | 60 | 2017-05-01T18:02:28.000Z | 2022-03-04T21:04:56.000Z | defmodule RemoteRetroWeb.Plugs.ForbidNonStriders do
import Plug.Conn
def init(options), do: options
def call(conn, _opts) do
%{current_user: current_user} = conn.assigns
case current_user.email =~ ~r/@stridenyc\.com$/ do
true ->
conn
false ->
conn
|> put_resp_content_type("text/plain")
|> send_resp(403, "403 Forbidden")
|> halt
end
end
end
| 20.85 | 54 | 0.616307 |
730b4c57389c5277d1b11d0480e3c281916d3468 | 1,584 | exs | Elixir | test/errors_test.exs | Badiapp/bolt_sips | ea5e1ae3295700a9f9b0324d26d953845da17050 | [
"Apache-2.0"
] | null | null | null | test/errors_test.exs | Badiapp/bolt_sips | ea5e1ae3295700a9f9b0324d26d953845da17050 | [
"Apache-2.0"
] | null | null | null | test/errors_test.exs | Badiapp/bolt_sips | ea5e1ae3295700a9f9b0324d26d953845da17050 | [
"Apache-2.0"
] | null | null | null | defmodule ErrorsTest do
@moduledoc """
every new error, and related tests
"""
use ExUnit.Case, async: true
@simple_map %{foo: "bar", bolt_sips: true}
@nested_map %{
foo: "bar",
bolt_sips: true,
a_map: %{unu: 1, doi: 2, baz: "foo"},
a_list: [1, 2, 3.14]
}
test "create a node using SET properties and a simple map" do
r =
Bolt.Sips.query!(Bolt.Sips.conn(), "CREATE (report:Report) SET report = {props}", %{
props: @simple_map
})
assert r == %{
stats: %{"labels-added" => 1, "nodes-created" => 1, "properties-set" => 2},
type: "w"
}
end
test "exception when creating a node using SET properties with a nested map" do
assert_raise Bolt.Sips.Exception,
"Property values can only be of primitive types or arrays thereof",
fn ->
Bolt.Sips.query!(
Bolt.Sips.conn(),
"CREATE (report:Report) SET report = {props}",
%{props: @nested_map}
)
end
end
test "exception when creating a node using SET properties with a list" do
assert_raise Bolt.Sips.Exception,
"Expected {props} to be a map, but it was :`List{String(\"foo\"), String(\"bar\")}`",
fn ->
Bolt.Sips.query!(
Bolt.Sips.conn(),
"CREATE (report:Report) SET report = {props}",
%{props: ["foo", "bar"]}
)
end
end
end
| 31.058824 | 102 | 0.500631 |
730b5845f12f21bec0f6a469d4d3defa18f8546b | 5,253 | exs | Elixir | test/mbanking_web/controllers/user_controller_test.exs | kadmohardy/mbanking | 42a45f78956eac95597e72ade10cb0291ed5aa76 | [
"MIT"
] | null | null | null | test/mbanking_web/controllers/user_controller_test.exs | kadmohardy/mbanking | 42a45f78956eac95597e72ade10cb0291ed5aa76 | [
"MIT"
] | null | null | null | test/mbanking_web/controllers/user_controller_test.exs | kadmohardy/mbanking | 42a45f78956eac95597e72ade10cb0291ed5aa76 | [
"MIT"
] | null | null | null | defmodule MbankingWeb.UserControllerTest do
use MbankingWeb.ConnCase
alias Mbanking.Accounts.Entities.User
alias Mbanking.Accounts.Repositories.AccountRepository
alias Mbanking.UserFixture
require Logger
def fixture(:user) do
{:ok, user} = AccountRepository.create_user(UserFixture.valid_user_pending_api())
user
end
setup %{conn: conn} do
{:ok, conn: put_req_header(conn, "accept", "application/json")}
end
describe "index" do
test "lists all users", %{conn: conn} do
UserFixture.create_referral_user()
UserFixture.create_user()
conn = get(conn, Routes.api_user_path(conn, :index))
assert json_response(conn, 200)["data"] |> Enum.count() == 2
end
end
describe "create user" do
test "renders user when data is valid", %{conn: conn} do
conn = post(conn, Routes.api_user_path(conn, :create), user: UserFixture.valid_user())
response = json_response(conn, 201)["data"]
assert response["message"] =~
"Conta criada com sucesso. A conta possui status completo. Você pode indicar novos usuários. Seu código de indicação é:"
end
test "try register user with already registered email returns error", %{conn: conn} do
UserFixture.create_user()
conn =
post(conn, Routes.api_user_path(conn, :create),
user: %{
birth_date: ~D[2010-04-17],
city: "some city",
country: "some country",
cpf: "05679935074",
email: "email@email",
gender: "teste",
name: "some name",
state: "some state",
status: "completed"
}
)
assert json_response(conn, 422)["errors"] != %{}
end
test "insert user with referral code", %{conn: conn} do
user_referral = UserFixture.create_referral_user()
user_params = %{
birth_date: ~D[2010-04-17],
city: "some city",
country: "some country",
cpf: "11310402019",
email: "email@email",
gender: "male",
name: "some name",
state: "some state",
referral_code: user_referral.referral_code
}
conn = post(conn, Routes.api_user_path(conn, :create), user: user_params)
response = json_response(conn, 201)["data"]
assert response["message"] =~
"Conta criada com sucesso. A conta possui status completo. Você pode indicar novos usuários. Seu código de indicação é:"
end
test "create new pending account user", %{conn: conn} do
conn =
post(conn, Routes.api_user_path(conn, :create), user: UserFixture.valid_user_pending_api())
response = json_response(conn, 201)["data"]
assert response["message"] =~
"Conta criada com sucesso. A conta possui status pendente. Por favor atualize seus dados para finalizar o seu cadastro."
end
test "insert user with accounts already registered with account status pending", %{conn: conn} do
conn =
post(conn, Routes.api_user_path(conn, :create), user: UserFixture.valid_user_pending_api())
conn =
post(conn, Routes.api_user_path(conn, :create),
user: UserFixture.valid_user_complete_api()
)
response = json_response(conn, 201)["data"]
assert response["message"] =~
"Conta criada com sucesso. A conta possui status completo. Você pode indicar novos usuários. Seu código de indicação é:"
end
test "insert user with accounts already registered with account status completed should return error",
%{conn: conn} do
conn = post(conn, Routes.api_user_path(conn, :create), user: UserFixture.valid_user())
conn = post(conn, Routes.api_user_path(conn, :create), user: UserFixture.valid_user())
assert json_response(conn, 400)["error"] != %{}
end
test "renders errors when data is invalid", %{conn: conn} do
conn = post(conn, Routes.api_user_path(conn, :create), user: UserFixture.invalid_user())
assert json_response(conn, 422)["errors"] != %{}
end
end
describe "update user" do
setup [:create_user]
test "update user should return data", %{conn: conn, user: %User{id: id} = user} do
conn = put(conn, "/api/users/#{user.id}", user: UserFixture.valid_user_complete_api())
response = json_response(conn, 201)["data"]
assert response["message"] =~
"Conta criada com sucesso. A conta possui status completo. Você pode indicar novos usuários. Seu código de indicação é:"
end
test "update user with referral code should return data", %{
conn: conn,
user: %User{id: id} = user
} do
user_referral = UserFixture.create_referral_user()
user_params =
UserFixture.valid_user_complete_api()
|> Map.put_new(:referral_code, user_referral.referral_code)
conn = put(conn, "/api/users/#{user.id}", user: user_params)
response = json_response(conn, 201)["data"]
assert response["message"] =~
"Conta criada com sucesso. A conta possui status completo. Você pode indicar novos usuários. Seu código de indicação é:"
end
end
defp create_user(_) do
user = fixture(:user)
%{user: user}
end
end
| 34.559211 | 135 | 0.642871 |
730b5c84d9da851ecee8b4ecd939942c676bff67 | 623 | exs | Elixir | test/track_user_agents_web/views/error_view_test.exs | SparkPost/elixir-webhook-sample | 22d367b63f4995eb0ddb4b0aaf484790987b8491 | [
"Apache-2.0"
] | 1 | 2021-12-08T18:15:33.000Z | 2021-12-08T18:15:33.000Z | test/track_user_agents_web/views/error_view_test.exs | SparkPost/elixir-webhook-sample | 22d367b63f4995eb0ddb4b0aaf484790987b8491 | [
"Apache-2.0"
] | null | null | null | test/track_user_agents_web/views/error_view_test.exs | SparkPost/elixir-webhook-sample | 22d367b63f4995eb0ddb4b0aaf484790987b8491 | [
"Apache-2.0"
] | null | null | null | defmodule TrackUserAgentsWeb.ErrorViewTest do
use TrackUserAgentsWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(TrackUserAgentsWeb.ErrorView, "404.html", []) ==
"Page not found"
end
test "render 500.html" do
assert render_to_string(TrackUserAgentsWeb.ErrorView, "500.html", []) ==
"Internal server error"
end
test "render any other" do
assert render_to_string(TrackUserAgentsWeb.ErrorView, "505.html", []) ==
"Internal server error"
end
end
| 28.318182 | 76 | 0.704655 |
730b5da86b9e96c096450d27dde1c3f669c2357b | 7,344 | exs | Elixir | fogex/.credo.exs | FogEx/poc | 0d3a26b482152b4b73a279dfdb2bf69008234c06 | [
"MIT"
] | 1 | 2021-09-27T06:15:08.000Z | 2021-09-27T06:15:08.000Z | fogex/.credo.exs | FogEx/poc | 0d3a26b482152b4b73a279dfdb2bf69008234c06 | [
"MIT"
] | null | null | null | fogex/.credo.exs | FogEx/poc | 0d3a26b482152b4b73a279dfdb2bf69008234c06 | [
"MIT"
] | 1 | 2021-12-21T12:47:59.000Z | 2021-12-21T12:47:59.000Z | # This file contains the configuration for Credo and you are probably reading
# this after creating it with `mix credo.gen.config`.
#
# If you find anything wrong or unclear in this file, please report an
# issue on GitHub: https://github.com/rrrene/credo/issues
#
%{
#
# You can have as many configs as you like in the `configs:` field.
configs: [
%{
#
# Run any config using `mix credo -C <name>`. If no config name is given
# "default" is used.
#
name: "default",
#
# These are the files included in the analysis:
files: %{
#
# You can give explicit globs or simply directories.
# In the latter case `**/*.{ex,exs}` will be used.
#
included: [
"lib/",
"src/",
"test/",
"web/",
"apps/*/lib/",
"apps/*/src/",
"apps/*/test/",
"apps/*/web/"
],
excluded: [~r"/_build/", ~r"/deps/", ~r"/node_modules/"]
},
#
# Load and configure plugins here:
#
plugins: [],
#
# If you create your own checks, you must specify the source files for
# them here, so they can be loaded by Credo before running the analysis.
#
requires: [],
#
# If you want to enforce a style guide and need a more traditional linting
# experience, you can change `strict` to `true` below:
#
strict: true,
#
# To modify the timeout for parsing files, change this value:
#
parse_timeout: 5000,
#
# If you want to use uncolored output by default, you can change `color`
# to `false` below:
#
color: true,
#
# You can customize the parameters of any check by adding a second element
# to the tuple.
#
# To disable a check put `false` as second element:
#
# {Credo.Check.Design.DuplicatedCode, false}
#
checks: [
#
## Consistency Checks
#
{Credo.Check.Consistency.ExceptionNames, []},
{Credo.Check.Consistency.LineEndings, []},
{Credo.Check.Consistency.ParameterPatternMatching, []},
{Credo.Check.Consistency.SpaceAroundOperators, []},
{Credo.Check.Consistency.SpaceInParentheses, []},
{Credo.Check.Consistency.TabsOrSpaces, []},
#
## Design Checks
#
# You can customize the priority of any check
# Priority values are: `low, normal, high, higher`
#
{Credo.Check.Design.AliasUsage,
[priority: :low, if_nested_deeper_than: 2, if_called_more_often_than: 0]},
# You can also customize the exit_status of each check.
# If you don't want TODO comments to cause `mix credo` to fail, just
# set this value to 0 (zero).
#
{Credo.Check.Design.TagTODO, [exit_status: 2]},
{Credo.Check.Design.TagFIXME, []},
#
## Readability Checks
#
{Credo.Check.Readability.AliasOrder, []},
{Credo.Check.Readability.FunctionNames, []},
{Credo.Check.Readability.LargeNumbers, []},
{Credo.Check.Readability.MaxLineLength, [priority: :low, max_length: 120]},
{Credo.Check.Readability.ModuleAttributeNames, []},
{Credo.Check.Readability.ModuleDoc, false},
{Credo.Check.Readability.ModuleNames, []},
{Credo.Check.Readability.ParenthesesInCondition, []},
{Credo.Check.Readability.ParenthesesOnZeroArityDefs, []},
{Credo.Check.Readability.PredicateFunctionNames, []},
{Credo.Check.Readability.PreferImplicitTry, []},
{Credo.Check.Readability.RedundantBlankLines, []},
{Credo.Check.Readability.Semicolons, []},
{Credo.Check.Readability.SpaceAfterCommas, []},
{Credo.Check.Readability.StringSigils, []},
{Credo.Check.Readability.TrailingBlankLine, []},
{Credo.Check.Readability.TrailingWhiteSpace, []},
{Credo.Check.Readability.UnnecessaryAliasExpansion, []},
{Credo.Check.Readability.VariableNames, []},
#
## Refactoring Opportunities
#
{Credo.Check.Refactor.CondStatements, []},
{Credo.Check.Refactor.CyclomaticComplexity, []},
{Credo.Check.Refactor.FunctionArity, []},
{Credo.Check.Refactor.LongQuoteBlocks, []},
# {Credo.Check.Refactor.MapInto, []},
{Credo.Check.Refactor.MatchInCondition, []},
{Credo.Check.Refactor.NegatedConditionsInUnless, []},
{Credo.Check.Refactor.NegatedConditionsWithElse, []},
{Credo.Check.Refactor.Nesting, []},
{Credo.Check.Refactor.UnlessWithElse, []},
{Credo.Check.Refactor.WithClauses, []},
#
## Warnings
#
{Credo.Check.Warning.ApplicationConfigInModuleAttribute, []},
{Credo.Check.Warning.BoolOperationOnSameValues, []},
{Credo.Check.Warning.ExpensiveEmptyEnumCheck, []},
{Credo.Check.Warning.IExPry, []},
{Credo.Check.Warning.IoInspect, []},
# {Credo.Check.Warning.LazyLogging, []},
{Credo.Check.Warning.MixEnv, false},
{Credo.Check.Warning.OperationOnSameValues, []},
{Credo.Check.Warning.OperationWithConstantResult, []},
{Credo.Check.Warning.RaiseInsideRescue, []},
{Credo.Check.Warning.UnusedEnumOperation, []},
{Credo.Check.Warning.UnusedFileOperation, []},
{Credo.Check.Warning.UnusedKeywordOperation, []},
{Credo.Check.Warning.UnusedListOperation, []},
{Credo.Check.Warning.UnusedPathOperation, []},
{Credo.Check.Warning.UnusedRegexOperation, []},
{Credo.Check.Warning.UnusedStringOperation, []},
{Credo.Check.Warning.UnusedTupleOperation, []},
{Credo.Check.Warning.UnsafeExec, []},
#
# Checks scheduled for next check update (opt-in for now, just replace `false` with `[]`)
#
# Controversial and experimental checks (opt-in, just replace `false` with `[]`)
#
{Credo.Check.Consistency.MultiAliasImportRequireUse, false},
{Credo.Check.Consistency.UnusedVariableNames, false},
{Credo.Check.Design.DuplicatedCode, false},
{Credo.Check.Readability.AliasAs, false},
{Credo.Check.Readability.BlockPipe, false},
{Credo.Check.Readability.ImplTrue, false},
{Credo.Check.Readability.MultiAlias, false},
{Credo.Check.Readability.SeparateAliasRequire, false},
{Credo.Check.Readability.SinglePipe, false},
{Credo.Check.Readability.Specs, false},
{Credo.Check.Readability.StrictModuleLayout, []},
{Credo.Check.Readability.WithCustomTaggedTuple, false},
{Credo.Check.Refactor.ABCSize, false},
{Credo.Check.Refactor.AppendSingleItem, false},
{Credo.Check.Refactor.DoubleBooleanNegation, false},
{Credo.Check.Refactor.ModuleDependencies, false},
{Credo.Check.Refactor.NegatedIsNil, false},
{Credo.Check.Refactor.PipeChainStart, false},
{Credo.Check.Refactor.VariableRebinding, false},
{Credo.Check.Warning.LeakyEnvironment, false},
{Credo.Check.Warning.MapGetUnsafePass, false},
{Credo.Check.Warning.UnsafeToAtom, false}
#
# Custom checks can be created using `mix credo.gen.check`.
#
]
}
]
}
| 38.652632 | 97 | 0.612881 |
730b741b1eeaa46d0930b97e25f7f9c9e32ab118 | 1,677 | ex | Elixir | apps/andi/lib/andi_web/live/user_live_view/edit_user_live_view_table.ex | calebcarroll1/smartcitiesdata | b0f03496f6c592c82ba14aebf6c5996311cf3cd0 | [
"Apache-2.0"
] | 26 | 2019-09-20T23:54:45.000Z | 2020-08-20T14:23:32.000Z | apps/andi/lib/andi_web/live/user_live_view/edit_user_live_view_table.ex | calebcarroll1/smartcitiesdata | b0f03496f6c592c82ba14aebf6c5996311cf3cd0 | [
"Apache-2.0"
] | 757 | 2019-08-15T18:15:07.000Z | 2020-09-18T20:55:31.000Z | apps/andi/lib/andi_web/live/user_live_view/edit_user_live_view_table.ex | calebcarroll1/smartcitiesdata | b0f03496f6c592c82ba14aebf6c5996311cf3cd0 | [
"Apache-2.0"
] | 9 | 2019-11-12T16:43:46.000Z | 2020-03-25T16:23:16.000Z | defmodule AndiWeb.EditUserLiveView.EditUserLiveViewTable do
@moduledoc """
LiveComponent for organization table
"""
use Phoenix.LiveComponent
alias Phoenix.HTML.Link
def render(assigns) do
~L"""
<div id="<%= @id %>" class="organizations-index__table">
<table class="organizations-table">
<thead>
<th class="organizations-table__th organizations-table__cell organizations-table__th--sortable organizations-table__th--unsorted">Organization</th>
<th class="organizations-table__th organizations-table__cell" style="width: 20%">Actions</th>
</thead>
<%= if @organizations == [] do %>
<tr><td class="organizations-table__cell" colspan="100%">No Organizations Found!</td></tr>
<% else %>
<%= for organization <- @organizations do %>
<tr class="organizations-table__tr">
<td class="organizations-table__cell organizations-table__cell--break"><%= Map.get(organization, :orgName, "") %></td>
<td class="organizations-table__cell organizations-table__cell--break" style="width: 10%;">
<%= Link.link("Edit", to: "/organizations/#{Map.get(organization, :id)}", class: "btn") %>
<button phx-click="remove_org" phx-value-org-id="<%= organization.id %>" phx-target="<%= @myself %>" class="btn btn--remove-organization">Remove</button>
</td>
</td>
</tr>
<% end %>
<% end %>
</table>
</div>
"""
end
def handle_event("remove_org", %{"org-id" => org_id}, socket) do
send(self(), {:disassociate_org, org_id})
{:noreply, socket}
end
end
| 39.928571 | 169 | 0.607633 |
730b786424e80efa4f9618917e0eaf9c85227034 | 139 | ex | Elixir | lib/jeeves_web/controllers/home_controller.ex | rtvu/jeeves | 254c987426adcb939763429cae9c9653e8d30304 | [
"MIT"
] | null | null | null | lib/jeeves_web/controllers/home_controller.ex | rtvu/jeeves | 254c987426adcb939763429cae9c9653e8d30304 | [
"MIT"
] | 1 | 2021-03-09T09:29:43.000Z | 2021-03-09T09:29:43.000Z | lib/jeeves_web/controllers/home_controller.ex | rtvu/jeeves | 254c987426adcb939763429cae9c9653e8d30304 | [
"MIT"
] | null | null | null | defmodule JeevesWeb.HomeController do
use JeevesWeb, :controller
def index(conn, _params) do
render(conn, "index.html")
end
end
| 17.375 | 37 | 0.733813 |
730b8311ed25ed2822dab8be9f29babfc9aeea7d | 256 | exs | Elixir | priv/repo/migrations/20161204165454_create_role.exs | albertchan/phoenix_admin | 74a9bbb01d7574db887b19a13f2934b7372f7d20 | [
"MIT"
] | 1 | 2018-03-07T17:21:58.000Z | 2018-03-07T17:21:58.000Z | priv/repo/migrations/20161204165454_create_role.exs | albertchan/phoenix_admin | 74a9bbb01d7574db887b19a13f2934b7372f7d20 | [
"MIT"
] | null | null | null | priv/repo/migrations/20161204165454_create_role.exs | albertchan/phoenix_admin | 74a9bbb01d7574db887b19a13f2934b7372f7d20 | [
"MIT"
] | null | null | null | defmodule PhoenixAdmin.Repo.Migrations.CreateRole do
use Ecto.Migration
def change do
create table(:roles) do
add :name, :string
add :description, :string
timestamps()
end
create unique_index(:roles, [:name])
end
end
| 17.066667 | 52 | 0.667969 |
730bf4e38d7daa3c444ec62b4a5c7736d2b09393 | 1,688 | exs | Elixir | apps/ewallet_api/test/ewallet_api/v1/views/wallet_view_test.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/ewallet_api/test/ewallet_api/v1/views/wallet_view_test.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/ewallet_api/test/ewallet_api/v1/views/wallet_view_test.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | # Copyright 2018-2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWalletAPI.V1.WalletViewTest do
use EWalletAPI.ViewCase, :v1
alias EWalletAPI.V1.WalletView
alias EWallet.BalanceFetcher
alias EWallet.Web.Paginator
alias EWallet.Web.V1.WalletSerializer
alias Ecto.Adapters.SQL.Sandbox
alias LocalLedgerDB.Repo, as: LocalLedgerDBRepo
setup do
:ok = Sandbox.checkout(LocalLedgerDBRepo)
end
describe "render/2" do
test "renders wallets.json with the given wallets" do
wallet_1 = insert(:wallet)
wallet_2 = insert(:wallet)
{:ok, wallet_1} = BalanceFetcher.all(%{"wallet" => wallet_1})
{:ok, wallet_2} = BalanceFetcher.all(%{"wallet" => wallet_2})
paginator = %Paginator{
data: [wallet_1, wallet_2],
pagination: %{
per_page: 10,
current_page: 1,
is_first_page: true,
is_last_page: false
}
}
expected = %{
version: @expected_version,
success: true,
data: WalletSerializer.serialize(paginator)
}
assert WalletView.render("wallets.json", %{wallets: paginator}) == expected
end
end
end
| 30.142857 | 81 | 0.687796 |
730c31e71816ec7cc1fc7b3e84ce8f852a06dd9f | 165 | exs | Elixir | test/nerves_grove_test.exs | Manolets/nerves_grove | 7a67a34ab9d897214ce78814aebb56b346edc726 | [
"Unlicense"
] | 18 | 2016-08-14T19:17:42.000Z | 2018-06-09T08:11:22.000Z | test/nerves_grove_test.exs | Manolets/nerves_grove | 7a67a34ab9d897214ce78814aebb56b346edc726 | [
"Unlicense"
] | 3 | 2016-10-09T13:57:07.000Z | 2017-04-15T09:19:48.000Z | test/nerves_grove_test.exs | Manolets/nerves_grove | 7a67a34ab9d897214ce78814aebb56b346edc726 | [
"Unlicense"
] | 4 | 2016-09-11T12:37:30.000Z | 2018-08-30T19:47:24.000Z | # This is free and unencumbered software released into the public domain.
defmodule Nerves.Grove.Test do
use ExUnit.Case, async: true
doctest Nerves.Grove
end
| 20.625 | 73 | 0.781818 |
730c456a11f75c67949d0e0fc894d4d5ed82a5c8 | 118 | exs | Elixir | test/jorel_mix_test.exs | G-Corp/jorel_mix | d3bea2e19a5b725bbc90532d74613e0fd5441461 | [
"Unlicense"
] | null | null | null | test/jorel_mix_test.exs | G-Corp/jorel_mix | d3bea2e19a5b725bbc90532d74613e0fd5441461 | [
"Unlicense"
] | 1 | 2019-02-13T14:28:48.000Z | 2019-02-13T17:02:49.000Z | test/jorel_mix_test.exs | G-Corp/jorel_mix | d3bea2e19a5b725bbc90532d74613e0fd5441461 | [
"Unlicense"
] | null | null | null | defmodule JorelMixTest do
use ExUnit.Case
doctest JorelMix
test "the truth" do
assert 1 + 1 == 2
end
end
| 13.111111 | 25 | 0.677966 |
730c4668990b546df8125ccedbc1fd545ba4b98d | 515 | exs | Elixir | test/integration/room_management_integration_test.exs | debonair/mogo-chat | 108c611177363a04d278221aabc03dcd2b66597c | [
"MIT"
] | 210 | 2015-01-01T04:17:56.000Z | 2022-02-26T23:54:56.000Z | test/integration/room_management_integration_test.exs | debonair/mogo-chat | 108c611177363a04d278221aabc03dcd2b66597c | [
"MIT"
] | 1 | 2015-05-08T21:44:36.000Z | 2015-05-08T21:44:36.000Z | test/integration/room_management_integration_test.exs | debonair/mogo-chat | 108c611177363a04d278221aabc03dcd2b66597c | [
"MIT"
] | 40 | 2015-01-06T08:48:40.000Z | 2020-05-26T18:03:07.000Z | defmodule RoomManagementIntegrationTest do
use MogoChat.TestCase
use Hound.Helpers
import TestUtils
hound_session
truncate_db_after_test
test "admin should be able to view rooms" do
end
test "admin should be able to add rooms" do
end
test "admin should be able to remove rooms" do
end
test "admin should be able to edit rooms" do
end
test "member should *not* be able to manage rooms" do
end
test "admin should be able to go back to chat using navigation" do
end
end
| 15.147059 | 68 | 0.726214 |
730c4c21d4726ab8128ff78c730da4303d31c711 | 6,331 | exs | Elixir | .credo.exs | hrzndhrn/knigge | 72e4f7bd14f6b79fde2abdeaa996958984cab3e5 | [
"MIT"
] | null | null | null | .credo.exs | hrzndhrn/knigge | 72e4f7bd14f6b79fde2abdeaa996958984cab3e5 | [
"MIT"
] | null | null | null | .credo.exs | hrzndhrn/knigge | 72e4f7bd14f6b79fde2abdeaa996958984cab3e5 | [
"MIT"
] | null | null | null | # This file contains the configuration for Credo and you are probably reading
# this after creating it with `mix credo.gen.config`.
#
# If you find anything wrong or unclear in this file, please report an
# issue on GitHub: https://github.com/rrrene/credo/issues
#
%{
#
# You can have as many configs as you like in the `configs:` field.
configs: [
%{
#
# Run any exec using `mix credo -C <name>`. If no exec name is given
# "default" is used.
#
name: "default",
#
# These are the files included in the analysis:
files: %{
#
# You can give explicit globs or simply directories.
# In the latter case `**/*.{ex,exs}` will be used.
#
included: ["lib/", "src/", "test/", "web/"],
excluded: [~r"/_build/", ~r"/deps/", ~r"/node_modules/"]
},
#
# If you create your own checks, you must specify the source files for
# them here, so they can be loaded by Credo before running the analysis.
#
requires: [],
#
# If you want to enforce a style guide and need a more traditional linting
# experience, you can change `strict` to `true` below:
#
strict: false,
#
# If you want to use uncolored output by default, you can change `color`
# to `false` below:
#
color: true,
#
# You can customize the parameters of any check by adding a second element
# to the tuple.
#
# To disable a check put `false` as second element:
#
# {Credo.Check.Design.DuplicatedCode, false}
#
checks: [
#
## Consistency Checks
#
{Credo.Check.Consistency.ExceptionNames, []},
{Credo.Check.Consistency.LineEndings, []},
{Credo.Check.Consistency.ParameterPatternMatching, []},
{Credo.Check.Consistency.SpaceAroundOperators, []},
{Credo.Check.Consistency.SpaceInParentheses, []},
{Credo.Check.Consistency.TabsOrSpaces, []},
#
## Design Checks
#
# You can customize the priority of any check
# Priority values are: `low, normal, high, higher`
#
{Credo.Check.Design.AliasUsage,
[priority: :low, if_nested_deeper_than: 2, if_called_more_often_than: 0]},
# You can also customize the exit_status of each check.
# If you don't want TODO comments to cause `mix credo` to fail, just
# set this value to 0 (zero).
#
{Credo.Check.Design.TagTODO, [exit_status: 0]},
{Credo.Check.Design.TagFIXME, []},
#
## Readability Checks
#
{Credo.Check.Readability.AliasOrder, []},
{Credo.Check.Readability.FunctionNames, []},
{Credo.Check.Readability.LargeNumbers, []},
{Credo.Check.Readability.MaxLineLength, [priority: :low, max_length: 120]},
{Credo.Check.Readability.ModuleAttributeNames, []},
{Credo.Check.Readability.ModuleDoc, []},
{Credo.Check.Readability.ModuleNames, []},
{Credo.Check.Readability.ParenthesesInCondition, []},
{Credo.Check.Readability.ParenthesesOnZeroArityDefs, []},
{Credo.Check.Readability.PredicateFunctionNames, []},
{Credo.Check.Readability.PreferImplicitTry, []},
{Credo.Check.Readability.RedundantBlankLines, []},
{Credo.Check.Readability.Semicolons, []},
{Credo.Check.Readability.SpaceAfterCommas, []},
{Credo.Check.Readability.StringSigils, []},
{Credo.Check.Readability.TrailingBlankLine, []},
{Credo.Check.Readability.TrailingWhiteSpace, []},
# TODO: enable by default in Credo 1.1
{Credo.Check.Readability.UnnecessaryAliasExpansion, false},
{Credo.Check.Readability.VariableNames, []},
#
## Refactoring Opportunities
#
{Credo.Check.Refactor.CondStatements, []},
{Credo.Check.Refactor.CyclomaticComplexity, []},
{Credo.Check.Refactor.FunctionArity, []},
{Credo.Check.Refactor.LongQuoteBlocks, []},
{Credo.Check.Refactor.MapInto, []},
{Credo.Check.Refactor.MatchInCondition, []},
{Credo.Check.Refactor.NegatedConditionsInUnless, []},
{Credo.Check.Refactor.NegatedConditionsWithElse, []},
{Credo.Check.Refactor.Nesting, []},
{Credo.Check.Refactor.PipeChainStart,
[
excluded_argument_types: [:atom, :binary, :fn, :keyword, :number],
excluded_functions: []
]},
{Credo.Check.Refactor.UnlessWithElse, []},
#
## Warnings
#
{Credo.Check.Warning.BoolOperationOnSameValues, []},
{Credo.Check.Warning.ExpensiveEmptyEnumCheck, []},
{Credo.Check.Warning.IExPry, []},
{Credo.Check.Warning.IoInspect, []},
# Lazy logging is done by Logger macro starting at 1.7, so no need to check this
{Credo.Check.Warning.LazyLogging, false},
{Credo.Check.Warning.OperationWithConstantResult, []},
{Credo.Check.Warning.RaiseInsideRescue, []},
{Credo.Check.Warning.UnusedEnumOperation, []},
{Credo.Check.Warning.UnusedFileOperation, []},
{Credo.Check.Warning.UnusedKeywordOperation, []},
{Credo.Check.Warning.UnusedListOperation, []},
{Credo.Check.Warning.UnusedPathOperation, []},
{Credo.Check.Warning.UnusedRegexOperation, []},
{Credo.Check.Warning.UnusedStringOperation, []},
{Credo.Check.Warning.UnusedTupleOperation, []},
#
# Controversial and experimental checks (opt-in, just replace `false` with `[]`)
#
{Credo.Check.Consistency.MultiAliasImportRequireUse, false},
{Credo.Check.Design.DuplicatedCode, false},
{Credo.Check.Readability.MultiAlias, false},
{Credo.Check.Readability.Specs, false},
{Credo.Check.Refactor.ABCSize, false},
{Credo.Check.Refactor.AppendSingleItem, false},
{Credo.Check.Refactor.DoubleBooleanNegation, false},
{Credo.Check.Refactor.ModuleDependencies, false},
{Credo.Check.Refactor.VariableRebinding, false},
{Credo.Check.Warning.MapGetUnsafePass, false},
{Credo.Check.Warning.UnsafeToAtom, false}
#
# Custom checks can be created using `mix credo.gen.check`.
#
]
}
]
}
| 39.322981 | 88 | 0.616016 |
730c80ecbe2b98bdbbeb7f597905053bf7a1a891 | 410 | ex | Elixir | lib/file_summary.ex | akoutmos/ex_todo | f1aaf2ccb4ecdefb3fb48a36ef2cc76a0d9045fa | [
"MIT"
] | 14 | 2019-06-18T04:56:18.000Z | 2021-03-22T07:59:43.000Z | lib/file_summary.ex | akoutmos/ex_todo | f1aaf2ccb4ecdefb3fb48a36ef2cc76a0d9045fa | [
"MIT"
] | null | null | null | lib/file_summary.ex | akoutmos/ex_todo | f1aaf2ccb4ecdefb3fb48a36ef2cc76a0d9045fa | [
"MIT"
] | null | null | null | defmodule ExTodo.FileSummary do
@moduledoc """
This module defines a struct which is used to encapsulate all the information
for a file that contains code tags
"""
alias __MODULE__
defstruct file_path: nil, todo_entries: []
@doc "Build a file summary sruct"
def build(file_path, todo_entries) do
%FileSummary{
file_path: file_path,
todo_entries: todo_entries
}
end
end
| 21.578947 | 79 | 0.714634 |
730c84fc2ef85e0a37adde85e2f13ebad799ed04 | 2,459 | ex | Elixir | lib/travis.ex | kddeisz/travis.ex | 5d54cf61342f1b470d363c10b608a28ce97ee580 | [
"MIT"
] | null | null | null | lib/travis.ex | kddeisz/travis.ex | 5d54cf61342f1b470d363c10b608a28ce97ee580 | [
"MIT"
] | null | null | null | lib/travis.ex | kddeisz/travis.ex | 5d54cf61342f1b470d363c10b608a28ce97ee580 | [
"MIT"
] | null | null | null | defmodule Travis do
alias Travis.Client
@headers [{"User-Agent", "Travis.ex/0.0.1"}, {"Accept", "application/vnd.travis-ci.2+json"}]
@type response :: {integer, String.t} | :jsx.json_term
@spec delete(binary, Client.t) :: response
def delete(path, client, params \\ []), do: :delete |> request(path, client, params)
@spec get(binary, Client.t, list) :: response
def get(path, client, params \\ []), do: :get |> request(path, client, params)
@spec patch(binary, Client.t) :: response
def patch(path, client, params \\ []), do: :patch |> request(path, client, params)
@spec post(binary, Client.t) :: response
def post(path, client, params \\ []), do: :post |> request(path, client, params)
@spec put(binary, Client.t) :: response
def put(path, client, params \\ []), do: :put |> request(path, client, params)
@spec add_params(binary, list) :: binary
defp add_params(url, params) do
url |> URI.parse |> merge_params(params) |> String.Chars.to_string
end
@spec headers(Client.t) :: [{String.t, String.t}]
defp headers(%Client{token: token}) when is_nil(token), do: @headers
defp headers(%Client{token: token}), do: [{"Authorization", "token #{token}"} | @headers]
@spec merge_params(URI.t, list) :: URI.t
defp merge_params(uri, []), do: uri
defp merge_params(%URI{query: nil} = uri, params) when is_list(params) or is_map(params) do
uri |> Map.put(:query, URI.encode_query(params))
end
defp merge_params(%URI{} = uri, params) when is_list(params) or is_map(params) do
uri |> Map.update!(:query, fn q -> q |> URI.decode_query |> Map.merge(param_map(params)) |> URI.encode_query end)
end
@spec request(:delete | :get | :patch | :post | :put, binary, Client.t, list) :: response
defp request(method, path, client, params) do
uri = client |> url(path) |> add_params(params)
HTTPoison.request!(method, uri, "", client |> headers) |> process
end
@spec param_map(list) :: map
defp param_map(list) when is_list(list) or is_map(list) do
for {key, value} <- list, into: Map.new, do: {"#{key}", value}
end
@spec process(HTTPoison.Response.t) :: response
defp process(%HTTPoison.Response{status_code: 200, body: body}), do: JSX.decode!(body)
defp process(%HTTPoison.Response{status_code: status_code, body: body}), do: {status_code, body}
@spec url(Client.t, binary) :: binary
defp url(_client = %Client{endpoint: endpoint}, path) do
endpoint <> path
end
end
| 40.311475 | 117 | 0.663684 |
730c86d7ee6324133871a438b3d0a3e337ef27de | 1,982 | ex | Elixir | clients/os_login/lib/google_api/os_login/v1alpha/model/ssh_public_key.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/os_login/lib/google_api/os_login/v1alpha/model/ssh_public_key.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/os_login/lib/google_api/os_login/v1alpha/model/ssh_public_key.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.OSLogin.V1alpha.Model.SshPublicKey do
@moduledoc """
The SSH public key information associated with a Google account.
## Attributes
* `expirationTimeUsec` (*type:* `String.t`, *default:* `nil`) - An expiration time in microseconds since epoch.
* `fingerprint` (*type:* `String.t`, *default:* `nil`) - Output only. The SHA-256 fingerprint of the SSH public key.
* `key` (*type:* `String.t`, *default:* `nil`) - Public key text in SSH format, defined by RFC4253 section 6.6.
* `name` (*type:* `String.t`, *default:* `nil`) - Output only. The canonical resource name.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:expirationTimeUsec => String.t() | nil,
:fingerprint => String.t() | nil,
:key => String.t() | nil,
:name => String.t() | nil
}
field(:expirationTimeUsec)
field(:fingerprint)
field(:key)
field(:name)
end
defimpl Poison.Decoder, for: GoogleApi.OSLogin.V1alpha.Model.SshPublicKey do
def decode(value, options) do
GoogleApi.OSLogin.V1alpha.Model.SshPublicKey.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.OSLogin.V1alpha.Model.SshPublicKey do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.392857 | 120 | 0.70333 |
730c9178533e90ed05b413d8dbc0f85742208f3e | 1,114 | exs | Elixir | mix.exs | warmwaffles/auth0_ex | d0d7ad895c4db79d366c9c55e289a944c891936c | [
"Apache-2.0"
] | null | null | null | mix.exs | warmwaffles/auth0_ex | d0d7ad895c4db79d366c9c55e289a944c891936c | [
"Apache-2.0"
] | null | null | null | mix.exs | warmwaffles/auth0_ex | d0d7ad895c4db79d366c9c55e289a944c891936c | [
"Apache-2.0"
] | null | null | null | defmodule Auth0Ex.Mixfile do
use Mix.Project
def project do
[
app: :auth0_ex,
version: "0.7.0",
elixir: "~> 1.7",
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
preferred_cli_env: [
vcr: :test,
"vcr.delete": :test,
"vcr.check": :test,
"vcr.show": :test
],
deps: deps(),
package: package(),
description: "An elixir client library for Auth0",
source_url: "https://github.com/techgaun/auth0_ex",
docs: [extras: ["README.md"]]
]
end
def application do
[
applications: [:logger, :httpoison, :jason],
mod: {Auth0Ex.Application, []}
]
end
defp deps do
[
{:httpoison, "~> 1.6"},
{:jason, "~> 1.1"},
{:credo, "~> 1.6", only: [:dev, :test]},
{:exvcr, "~> 0.13", only: :test},
{:ex_doc, "~> 0.19", only: [:dev]}
]
end
defp package do
[
maintainers: [
"Samar Acharya"
],
licenses: ["Apache-2.0"],
links: %{"GitHub" => "https://github.com/techgaun/auth0_ex"}
]
end
end
| 21.423077 | 66 | 0.505386 |
730cc63f80faca1d5e3662861684dcce0ab16090 | 1,095 | ex | Elixir | lib/strichliste_elixir_web/channels/user_socket.ex | DEvil0000/strichliste_elixir | 33efe808ced1dd3e3650212a506e8c3322277b2b | [
"MIT"
] | null | null | null | lib/strichliste_elixir_web/channels/user_socket.ex | DEvil0000/strichliste_elixir | 33efe808ced1dd3e3650212a506e8c3322277b2b | [
"MIT"
] | null | null | null | lib/strichliste_elixir_web/channels/user_socket.ex | DEvil0000/strichliste_elixir | 33efe808ced1dd3e3650212a506e8c3322277b2b | [
"MIT"
] | 1 | 2019-05-24T18:18:24.000Z | 2019-05-24T18:18:24.000Z | defmodule StrichlisteElixirWeb.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", StrichlisteElixirWeb.RoomChannel
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket, _connect_info) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# StrichlisteElixirWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 32.205882 | 92 | 0.705023 |
730cd109a66ab14e21163511c07412b1c13f60a0 | 94 | exs | Elixir | test/steamex/steam_id_test.exs | csgo-pug-system/steamex | 6b01ec48ba0945450dfa41c03efa562aba7bf747 | [
"Apache-2.0"
] | null | null | null | test/steamex/steam_id_test.exs | csgo-pug-system/steamex | 6b01ec48ba0945450dfa41c03efa562aba7bf747 | [
"Apache-2.0"
] | null | null | null | test/steamex/steam_id_test.exs | csgo-pug-system/steamex | 6b01ec48ba0945450dfa41c03efa562aba7bf747 | [
"Apache-2.0"
] | null | null | null | defmodule Steamex.SteamIDTest do
use ExUnit.Case, async: true
doctest Steamex.SteamID
end
| 18.8 | 32 | 0.797872 |
730cd1e0529d08fb181743e175f17970cca5394f | 623 | exs | Elixir | test/tic_tac_toe_test.exs | mike-works/elixir-fundamentals | 7505a61adcedbe2da5f3c59dc765e2bfb7364c3c | [
"BSD-3-Clause"
] | 14 | 2018-04-18T12:58:54.000Z | 2020-09-14T16:40:07.000Z | test/tic_tac_toe_test.exs | mike-north/elixir-fundamentals | 2bad545e1eba966956ae929b3fa3c5cbd55db227 | [
"BSD-3-Clause"
] | 6 | 2018-02-27T18:25:57.000Z | 2021-10-19T02:40:08.000Z | test/tic_tac_toe_test.exs | mike-north/elixir-fundamentals | 2bad545e1eba966956ae929b3fa3c5cbd55db227 | [
"BSD-3-Clause"
] | 5 | 2018-02-28T16:10:53.000Z | 2019-04-18T11:18:05.000Z | defmodule TicTacToeTest do
use ExUnit.Case
@moduletag :exercise14
test "Starting a game should return a PID" do
game = TicTacToe.new
assert is_pid(game) === true
end
test "Board is full of nils before game starts" do
game = TicTacToe.new
board_before = game |> TicTacToe.get_board
assert board_before[1][1] === nil
end
test "Making a move should change game state" do
game = TicTacToe.new
board_before = game |> TicTacToe.get_board
game
|> TicTacToe.make_move("X", {1, 1})
board_after = game |> TicTacToe.get_board
assert board_after[1][1] === "X"
end
end | 21.482759 | 52 | 0.669342 |
730cd82a784409677ed729a77cbcdaa4fb702c9c | 6,160 | ex | Elixir | lib/swoosh/adapters/postmark.ex | joshnuss/swoosh | 47e7b46fd7635de90618c1320f6ab90880ec5418 | [
"MIT"
] | null | null | null | lib/swoosh/adapters/postmark.ex | joshnuss/swoosh | 47e7b46fd7635de90618c1320f6ab90880ec5418 | [
"MIT"
] | 21 | 2021-03-08T10:04:20.000Z | 2022-03-23T10:20:17.000Z | lib/swoosh/adapters/postmark.ex | joshnuss/swoosh | 47e7b46fd7635de90618c1320f6ab90880ec5418 | [
"MIT"
] | null | null | null | defmodule Swoosh.Adapters.Postmark do
@moduledoc ~S"""
An adapter that sends email using the Postmark API.
For reference: [Postmark API docs](http://developer.postmarkapp.com/developer-send-api.html)
## Example
# config/config.exs
config :sample, Sample.Mailer,
adapter: Swoosh.Adapters.Postmark,
api_key: "my-api-key"
# lib/sample/mailer.ex
defmodule Sample.Mailer do
use Swoosh.Mailer, otp_app: :sample
end
## Example of sending emails using templates
This will use Postmark's `withTemplate` endpoint.
import Swoosh.Email
new()
|> from({"T Stark", "[email protected]"})
|> to({"Steve Rogers", "[email protected]"})
|> subject("Hello, Avengers!")
|> put_provider_option(:template_id, "123456")
|> put_provider_option(:template_model, %{name: "Steve", email: "[email protected]"})
You can also use `template_alias` instead of `template_id`, if you use Postmark's
[TemplateAlias](https://postmarkapp.com/developer/api/templates-api#email-with-template)
feature.
## Example of sending emails with a tag
This will add a tag to the sent Postmark's email.
import Swoosh.Email
new()
|> from({"T Stark", "[email protected]"})
|> to({"Steve Rogers", "[email protected]"})
|> subject("Hello, Avengers!")
|> put_provider_option(:tag, "some tag")
"""
use Swoosh.Adapter, required_config: [:api_key]
alias Swoosh.Email
import Swoosh.Email.Render
@base_url "https://api.postmarkapp.com"
@api_endpoint "/email"
@impl true
def deliver(%Email{} = email, config \\ []) do
headers = prepare_headers(config)
params = email |> prepare_body |> Swoosh.json_library().encode!
url = [base_url(config), api_endpoint(email)]
case Swoosh.ApiClient.post(url, headers, params, email) do
{:ok, 200, _headers, body} ->
{:ok, %{id: Swoosh.json_library().decode!(body)["MessageID"]}}
{:ok, code, _headers, body} when code > 399 ->
case Swoosh.json_library().decode(body) do
{:ok, error} -> {:error, {code, error}}
{:error, _} -> {:error, {code, body}}
end
{:error, reason} ->
{:error, reason}
end
end
defp base_url(config), do: config[:base_url] || @base_url
defp prepare_headers(config) do
[
{"User-Agent", "swoosh/#{Swoosh.version()}"},
{"X-Postmark-Server-Token", config[:api_key]},
{"Content-Type", "application/json"},
{"Accept", "application/json"}
]
end
defp api_endpoint(%{provider_options: %{template_id: _, template_model: _}}),
do: @api_endpoint <> "/withTemplate"
defp api_endpoint(%{provider_options: %{template_alias: _, template_model: _}}),
do: @api_endpoint <> "/withTemplate"
defp api_endpoint(_email), do: @api_endpoint
defp prepare_body(email) do
%{}
|> prepare_from(email)
|> prepare_to(email)
|> prepare_subject(email)
|> prepare_html(email)
|> prepare_text(email)
|> prepare_cc(email)
|> prepare_bcc(email)
|> prepare_attachments(email)
|> prepare_reply_to(email)
|> prepare_template(email)
|> prepare_custom_headers(email)
|> prepare_tag(email)
end
defp prepare_from(body, %{from: from}), do: Map.put(body, "From", render_recipient(from))
defp prepare_to(body, %{to: to}), do: Map.put(body, "To", render_recipient(to))
defp prepare_cc(body, %{cc: []}), do: body
defp prepare_cc(body, %{cc: cc}), do: Map.put(body, "Cc", render_recipient(cc))
defp prepare_bcc(body, %{bcc: []}), do: body
defp prepare_bcc(body, %{bcc: bcc}), do: Map.put(body, "Bcc", render_recipient(bcc))
defp prepare_attachments(body, %{attachments: []}), do: body
defp prepare_attachments(body, %{attachments: attachments}) do
Map.put(body, "Attachments", Enum.map(attachments, &prepare_attachment/1))
end
defp prepare_attachment(attachment) do
attachment_data = %{
"Name" => attachment.filename,
"ContentType" => attachment.content_type,
"Content" => Swoosh.Attachment.get_content(attachment, :base64)
}
case attachment.type do
:attachment ->
attachment_data
:inline ->
Map.put(attachment_data, "ContentID", "cid:#{attachment.filename}")
end
end
defp prepare_reply_to(body, %{reply_to: nil}), do: body
defp prepare_reply_to(body, %{reply_to: {_name, address}}),
do: Map.put(body, "ReplyTo", address)
defp prepare_subject(body, %{subject: ""}), do: body
defp prepare_subject(body, %{subject: subject}), do: Map.put(body, "Subject", subject)
defp prepare_text(body, %{text_body: nil}), do: body
defp prepare_text(body, %{text_body: text_body}), do: Map.put(body, "TextBody", text_body)
defp prepare_html(body, %{html_body: nil}), do: body
defp prepare_html(body, %{html_body: html_body}), do: Map.put(body, "HtmlBody", html_body)
# example custom vars
#
# %{
# "template_id" => 123,
# "template_model" => %{"name": 1, "company": 2}
# }
#
# Or, using template_alias
#
# %{
# "template_alias" => "welcome",
# "template_model" => %{"name": 1, "company": 2}
# }
defp prepare_template(body, %{provider_options: provider_options}),
do: Enum.reduce(provider_options, body, &put_in_body/2)
defp prepare_template(body, _email), do: body
defp put_in_body({:template_model, val}, body_acc), do: Map.put(body_acc, "TemplateModel", val)
defp put_in_body({:template_id, val}, body_acc), do: Map.put(body_acc, "TemplateId", val)
defp put_in_body({:template_alias, val}, body_acc), do: Map.put(body_acc, "TemplateAlias", val)
defp put_in_body(_, body_acc), do: body_acc
defp prepare_custom_headers(body, %{headers: headers}) when map_size(headers) == 0, do: body
defp prepare_custom_headers(body, %{headers: headers}) do
custom_headers = Enum.map(headers, fn {k, v} -> %{"Name" => k, "Value" => v} end)
Map.put(body, "Headers", custom_headers)
end
defp prepare_tag(body, %{provider_options: %{tag: tag_value}}) do
Map.put(body, "Tag", tag_value)
end
defp prepare_tag(body, _), do: body
end
| 31.589744 | 97 | 0.652435 |
730ceefc0561eebe62b180afd65d657ec21e3fff | 1,803 | exs | Elixir | test/monocle_helpers_tests/lookahead/still_inline_code_test.exs | brianbroderick/monocle | eeabecea658468479c04a02352271f6304447736 | [
"Apache-2.0"
] | 2 | 2018-02-11T01:18:24.000Z | 2020-01-12T17:19:22.000Z | test/monocle_helpers_tests/lookahead/still_inline_code_test.exs | brianbroderick/monocle | eeabecea658468479c04a02352271f6304447736 | [
"Apache-2.0"
] | null | null | null | test/monocle_helpers_tests/lookahead/still_inline_code_test.exs | brianbroderick/monocle | eeabecea658468479c04a02352271f6304447736 | [
"Apache-2.0"
] | null | null | null | defmodule Helpers.InlineCodeTest.StillInlineCodeTest do
use ExUnit.Case
import Monocle.Helpers.LookaheadHelpers, only: [still_inline_code: 2]
[
{ "empty line -> not closing for single backquote" , "`" , "" , {"`", 24} },
{ "empty line -> not closing for double backquote" , "``" , "" , {"``", 24} },
{ "single backquote closes single backquote" , "`" , "`" , {nil, 0} },
{ "double backquote closes double backquote" , "``" , " ``" , {nil, 0} },
{ "pair of single backquotes does not close single backquote", "`", "alpha ` beta`", {"`", 42}},
{ "odd number of single backquotes closes single backquote", "`", "` ` `", {nil, 0}},
{ "escapes do not close", "`", "\\`", {"`", 24}},
{ "escaped escapes close", "`", "\\\\`", {nil, 0}},
{ "escapes do not close, same line", "`", "` ` \\`", {"`", 42}},
{ "escaped escapes close, same line", "`", "` ` \\\\`", {nil, 0}},
{ "single backquote in doublequotes reopens double", "`", "`` ` ``", {"``", 42}},
{ "triple backqoute is closed but double is opened", "```", "``` ``", {"``", 42}},
{ "triple backqoute is closed but single is opened", "```", "``` `` ``` ``` `", {"``", 42}},
{ "backquotes before closing do not matter", "``", "` ``", {nil, 0}},
{ "backquotes before closing do not matter (reopening case)", "``", "` `` ```", {"```", 42}},
{ "backquotes before closing and after opening do not matter", "``", "` `` ``` ````", {"```", 42}},
] |> Enum.each( fn { description, opener, line, result } ->
test(description) do
assert still_inline_code(%{line: unquote(line), lnb: 42}, {unquote(opener), 24}) == unquote(result)
end
end)
end
| 66.777778 | 110 | 0.494731 |
730d99f134296642594609b9bf0874710a6551d6 | 6,993 | ex | Elixir | deps/distillery/lib/mix/lib/releases/plugins/plugin.ex | renatoalmeidaoliveira/Merlin | 92a0e318872190cdfa07ced85ee54cc69cad5c14 | [
"Apache-2.0"
] | null | null | null | deps/distillery/lib/mix/lib/releases/plugins/plugin.ex | renatoalmeidaoliveira/Merlin | 92a0e318872190cdfa07ced85ee54cc69cad5c14 | [
"Apache-2.0"
] | null | null | null | deps/distillery/lib/mix/lib/releases/plugins/plugin.ex | renatoalmeidaoliveira/Merlin | 92a0e318872190cdfa07ced85ee54cc69cad5c14 | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Releases.Plugin do
@moduledoc """
This module provides a simple way to add additional processing to
phases of the release assembly and archival.
## Implementing your own plugin
To create a Distillery plugin, create a new module in which you
`use Mix.Releases.Plugin`. Then write implentations for the following
callbacks:
- `c:before_assembly/2`
- `c:after_assembly/2`
- `c:before_package/2`
- `c:after_package/2`
- `c:after_cleanup/2`
The default implementation is to pass the original `%Release{}`.
You will only need to implement the functions your plugin requires.
When you `use Mix.Releases.Plugin`, the following happens:
- Your module is marked with `@behaviour Mix.Releases.Plugin`.
- The `Mix.Releases.Release` struct is aliased to `%Release{}`.
- The functions `debug/1`, `info/1`, `warn/1`, `notice/1`, and `error/1`
are imported from `Mix.Releases.Logger`. These should be used to present
output to the user.
The first four callbacks (`c:before_assembly/2`, `c:after_assembly/2`,
`c:before_package/2`, and `c:after_package/2`) will each be passed the
`%Release{}` struct and options passed to the plugin. You can return a modified
struct, or `nil`. Any other return value will lead to runtime errors.
`c:after_cleanup/2` is only invoked on `mix release.clean`. It will be passed
the command line arguments. The return value is not used.
## Example
defmodule MyApp.PluginDemo do
use Mix.Releases.Plugin
def before_assembly(%Release{} = release, _opts) do
info "This is executed just prior to assembling the release"
release # or nil
end
def after_assembly(%Release{} = release, _opts) do
info "This is executed just after assembling, and just prior to packaging the release"
release # or nil
end
def before_package(%Release{} = release, _opts) do
info "This is executed just before packaging the release"
release # or nil
end
def after_package(%Release{} = release, _opts) do
info "This is executed just after packaging the release"
release # or nil
end
def after_cleanup(_args, _opts) do
info "This is executed just after running cleanup"
:ok # It doesn't matter what we return here
end
end
"""
alias Mix.Releases.Release
@doc """
Called before assembling the release.
Should return a modified `%Release{}` or `nil`.
"""
@callback before_assembly(Release.t, Keyword.t) :: Release.t | nil
@doc """
Called after assembling the release.
Should return a modified `%Release{}` or `nil`.
"""
@callback after_assembly(Release.t, Keyword.t) :: Release.t | nil
@doc """
Called before packaging the release.
Should return a modified `%Release{}` or `nil`.
When in `dev_mode`, the packaging phase is skipped.
"""
@callback before_package(Release.t, Keyword.t) :: Release.t | nil
@doc """
Called after packaging the release.
Should return a modified `%Release{}` or `nil`.
When in `dev_mode`, the packaging phase is skipped.
"""
@callback after_package(Release.t, Keyword.t) :: Release.t | nil
@doc """
Called when the user invokes the `mix release.clean` task.
The callback will be passed the command line arguments to `mix release.clean`.
It should clean up the files the plugin created. The return value of this
callback is ignored.
"""
@callback after_cleanup([String.t], Keyword.t) :: any
@doc false
defmacro __using__(_opts) do
quote do
@behaviour Mix.Releases.Plugin
alias Mix.Releases.{Logger, Release}
import Mix.Releases.Logger, only: [debug: 1, info: 1, warn: 1, notice: 1, error: 1]
Module.register_attribute __MODULE__, :name, accumulate: false, persist: true
Module.register_attribute __MODULE__, :moduledoc, accumulate: false, persist: true
Module.register_attribute __MODULE__, :shortdoc, accumulate: false, persist: true
def before_assembly(release), do: release
def after_assembly(release), do: release
def before_package(release), do: release
def after_package(release), do: release
def after_cleanup(release, _), do: release
defoverridable [before_assembly: 1, after_assembly: 1,
before_package: 1, after_package: 1, after_cleanup: 2]
end
end
@doc """
Run the `c:before_assembly/2` callback of all plugins of `release`.
"""
@spec before_assembly(Release.t) :: {:ok, Release.t} | {:error, term}
def before_assembly(release), do: call(:before_assembly, release)
@doc """
Run the `c:after_assembly/2` callback of all plugins of `release`.
"""
@spec after_assembly(Release.t) :: {:ok, Release.t} | {:error, term}
def after_assembly(release), do: call(:after_assembly, release)
@doc """
Run the `c:before_package/2` callback of all plugins of `release`.
"""
@spec before_package(Release.t) :: {:ok, Release.t} | {:error, term}
def before_package(release), do: call(:before_package, release)
@doc """
Run the `c:after_package/2` callback of all plugins of `release`.
"""
@spec after_package(Release.t) :: {:ok, Release.t} | {:error, term}
def after_package(release), do: call(:after_package, release)
@doc """
Run the `c:after_cleanup/2` callback of all plugins of `release`.
"""
@spec after_cleanup(Release.t, [String.t]) :: :ok | {:error, term}
def after_cleanup(release, args), do: run(release.profile.plugins, :after_package, args)
@spec call(atom(), Release.t) :: {:ok, term} | {:error, {:plugin, term}}
defp call(callback, release) do
Enum.map(release.profile.plugins, fn
{_p, _opts} = p -> p
p when is_atom(p) -> {p, []}
end)
|> call(callback, release)
end
defp call([], _, release), do: {:ok, release}
defp call([{plugin, opts}|plugins], callback, release) do
apply_plugin(plugin, callback, release, opts)
rescue
e ->
{:error, {:plugin, e}}
catch
kind, err ->
{:error, {:plugin, {kind, err}}}
else
nil ->
call(plugins, callback, release)
%Release{} = updated ->
call(plugins, callback, updated)
result ->
{:error, {:plugin, {:plugin_failed, :bad_return_value, result}}}
end
# TODO: remove once the /1 plugins are deprecated
defp apply_plugin(plugin, callback, release, opts) do
if function_exported?(plugin, callback, 2) do
apply(plugin, callback, [release, opts])
else
apply(plugin, callback, [release])
end
end
@spec run([atom()], atom, [String.t]) :: :ok | {:error, {:plugin, term}}
defp run([], _, _), do: :ok
defp run([{plugin, opts}|plugins], callback, args) do
apply_plugin(plugin, callback, args, opts)
rescue
e ->
{:error, {:plugin, e}}
catch
kind, err ->
{:error, {:plugin, {kind, err}}}
else
_ ->
run(plugins, callback, args)
end
end
| 32.67757 | 96 | 0.658945 |
730dcfbe8d3202fedc5b220aad42633e75da0543 | 590 | ex | Elixir | lib/langue/formatter/rails/serializer.ex | isshindev/accent | ae4c13139b0a0dfd64ff536b94c940a4e2862150 | [
"BSD-3-Clause"
] | 806 | 2018-04-07T20:40:33.000Z | 2022-03-30T01:39:57.000Z | lib/langue/formatter/rails/serializer.ex | isshindev/accent | ae4c13139b0a0dfd64ff536b94c940a4e2862150 | [
"BSD-3-Clause"
] | 194 | 2018-04-07T13:49:37.000Z | 2022-03-30T19:58:45.000Z | lib/langue/formatter/rails/serializer.ex | doc-ai/accent | e337e16f3658cc0728364f952c0d9c13710ebb06 | [
"BSD-3-Clause"
] | 89 | 2018-04-09T13:55:49.000Z | 2022-03-24T07:09:31.000Z | defmodule Langue.Formatter.Rails.Serializer do
@behaviour Langue.Formatter.Serializer
alias Langue.Utils.NestedSerializerHelper
@white_space_regex ~r/(:|-) \n/
def serialize(%{entries: entries, language: language}) do
render =
%{language.slug => entries}
|> Enum.with_index(-1)
|> Enum.map(&NestedSerializerHelper.map_value(elem(&1, 0), elem(&1, 1)))
|> :fast_yaml.encode()
|> IO.chardata_to_string()
|> String.replace(@white_space_regex, "\\1\n")
|> Kernel.<>("\n")
%Langue.Formatter.SerializerResult{render: render}
end
end
| 28.095238 | 78 | 0.659322 |
730dd40b502e323dd140f6f956baedf6e6e19d34 | 3,399 | ex | Elixir | clients/content/lib/google_api/content/v2/model/orders_custom_batch_request_entry_return_refund_line_item.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/orders_custom_batch_request_entry_return_refund_line_item.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/orders_custom_batch_request_entry_return_refund_line_item.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V2.Model.OrdersCustomBatchRequestEntryReturnRefundLineItem do
@moduledoc """
## Attributes
* `amountPretax` (*type:* `GoogleApi.Content.V2.Model.Price.t`, *default:* `nil`) - The amount that is refunded. If omitted, refundless return is assumed (same as calling returnLineItem method).
* `amountTax` (*type:* `GoogleApi.Content.V2.Model.Price.t`, *default:* `nil`) - Tax amount that corresponds to refund amount in amountPretax. Optional, but if filled, then amountPretax must be set. Calculated automatically if not provided.
* `lineItemId` (*type:* `String.t`, *default:* `nil`) - The ID of the line item to return. Either lineItemId or productId is required.
* `productId` (*type:* `String.t`, *default:* `nil`) - The ID of the product to return. This is the REST ID used in the products service. Either lineItemId or productId is required.
* `quantity` (*type:* `integer()`, *default:* `nil`) - The quantity to return and refund.
* `reason` (*type:* `String.t`, *default:* `nil`) - The reason for the return.
Acceptable values are:
- "`customerDiscretionaryReturn`"
- "`customerInitiatedMerchantCancel`"
- "`deliveredTooLate`"
- "`expiredItem`"
- "`invalidCoupon`"
- "`malformedShippingAddress`"
- "`other`"
- "`productArrivedDamaged`"
- "`productNotAsDescribed`"
- "`qualityNotAsExpected`"
- "`undeliverableShippingAddress`"
- "`unsupportedPoBoxAddress`"
- "`wrongProductShipped`"
* `reasonText` (*type:* `String.t`, *default:* `nil`) - The explanation of the reason.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:amountPretax => GoogleApi.Content.V2.Model.Price.t(),
:amountTax => GoogleApi.Content.V2.Model.Price.t(),
:lineItemId => String.t(),
:productId => String.t(),
:quantity => integer(),
:reason => String.t(),
:reasonText => String.t()
}
field(:amountPretax, as: GoogleApi.Content.V2.Model.Price)
field(:amountTax, as: GoogleApi.Content.V2.Model.Price)
field(:lineItemId)
field(:productId)
field(:quantity)
field(:reason)
field(:reasonText)
end
defimpl Poison.Decoder,
for: GoogleApi.Content.V2.Model.OrdersCustomBatchRequestEntryReturnRefundLineItem do
def decode(value, options) do
GoogleApi.Content.V2.Model.OrdersCustomBatchRequestEntryReturnRefundLineItem.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Content.V2.Model.OrdersCustomBatchRequestEntryReturnRefundLineItem do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.988235 | 244 | 0.692851 |
730dd64298c9a2277fcbebecaf6e5cdb4f31b5d8 | 141 | ex | Elixir | lib/blog_new_web/controllers/page_controller.ex | wagncarv/blog_new | bcfde533df5109cfa68b33362db56ef728090b02 | [
"MIT"
] | null | null | null | lib/blog_new_web/controllers/page_controller.ex | wagncarv/blog_new | bcfde533df5109cfa68b33362db56ef728090b02 | [
"MIT"
] | null | null | null | lib/blog_new_web/controllers/page_controller.ex | wagncarv/blog_new | bcfde533df5109cfa68b33362db56ef728090b02 | [
"MIT"
] | null | null | null | defmodule BlogNewWeb.PageController do
use BlogNewWeb, :controller
def index(conn, _params) do
render(conn, "index.html")
end
end
| 17.625 | 38 | 0.737589 |
730df51ca5f2eb8fc1a197c597bbc533950e7742 | 4,823 | ex | Elixir | apps/aecore/lib/aecore/sync/chain.ex | aeternity/epoch-elixir | d35613f5541a9bbebe61f90b8503a9b3416fe8b4 | [
"0BSD"
] | 131 | 2018-03-10T01:35:56.000Z | 2021-12-27T13:44:41.000Z | apps/aecore/lib/aecore/sync/chain.ex | aeternity/elixir-node | d35613f5541a9bbebe61f90b8503a9b3416fe8b4 | [
"0BSD"
] | 445 | 2018-03-12T09:46:17.000Z | 2018-12-12T09:52:07.000Z | apps/aecore/lib/aecore/sync/chain.ex | aeternity/epoch-elixir | d35613f5541a9bbebe61f90b8503a9b3416fe8b4 | [
"0BSD"
] | 23 | 2018-03-12T12:01:28.000Z | 2022-03-06T09:22:17.000Z | defmodule Aecore.Sync.Chain do
@moduledoc """
Implements all the functions regarding the Chain structure of the SyncTask
"""
alias __MODULE__
alias Aecore.Chain.Header
alias Aecore.Sync.{Task, Sync}
@type peer_id :: pid()
@type chain_id :: reference()
@type height :: non_neg_integer()
@type header_hash :: binary()
@typedoc "Holds data for header height and hash"
@type chain :: %{height: height(), hash: header_hash()}
@type t :: %Chain{chain_id: chain_id(), peers: list(peer_id()), chain: list(chain())}
defstruct chain_id: nil,
peers: [],
chain: []
@spec init_chain(peer_id(), Header.t()) :: Chain.t()
def init_chain(peer_id, header) do
init_chain(Kernel.make_ref(), [peer_id], header)
end
@spec init_chain(chain_id() | reference(), list(peer_id()), Header.t()) :: Chain.t()
def init_chain(chain_id, peers, %Header{height: height, prev_hash: prev_hash} = header) do
header_hash = Header.hash(header)
prev_header_data =
if height > 1 do
[%{height: height - 1, hash: prev_hash}]
else
[]
end
%Chain{
chain_id: chain_id,
peers: peers,
chain: [%{height: height, hash: header_hash}] ++ prev_header_data
}
end
@spec merge_chains(Chain.t(), Chain.t()) :: Chain.t()
def merge_chains(%Chain{chain_id: chain_id, peers: peers_1, chain: chain_1}, %Chain{
chain_id: chain_id,
peers: peers_2,
chain: chain_2
}) do
peers =
(peers_1 ++ peers_2)
|> Enum.sort()
|> Enum.uniq()
%Chain{
chain_id: chain_id,
peers: peers,
chain: merge_chain_list_descending(chain_1, chain_2)
}
end
@spec try_match_chains(list(chain()), list(chain())) ::
:equal | :different | {:first | :second, height()}
def try_match_chains([%{height: height_1} | chain_1], [
%{height: height_2, hash: header_hash} | _
])
when height_1 > height_2 do
case find_hash_at_height(height_2, chain_1) do
{:ok, ^header_hash} -> :equal
{:ok, _} -> :different
:not_found -> {:first, height_2}
end
end
def try_match_chains([%{height: height_1, hash: header_hash} | _], chain_2) do
case find_hash_at_height(height_1, chain_2) do
{:ok, ^header_hash} -> :equal
{:ok, _} -> :different
:not_found -> {:second, height_1}
end
end
@spec find_hash_at_height(height(), list(chain())) :: {:ok, header_hash()} | :not_found
def find_hash_at_height(height, [%{height: height, hash: header_hash} | _]),
do: {:ok, header_hash}
def find_hash_at_height(_, []), do: :not_found
def find_hash_at_height(height, [%{height: height_1} | _]) when height_1 < height,
do: :not_found
def find_hash_at_height(height, [_ | chain]), do: find_hash_at_height(height, chain)
@doc """
If there is a task with chain_id equal to the given chain,
merge the data between the chain in the task and the given chain
"""
@spec add_chain_info(Chain.t(), Sync.t()) :: Sync.t()
def add_chain_info(%Chain{chain_id: chain_id} = incoming_chain, sync) do
case Task.get_sync_task(chain_id, sync) do
{:ok, %Task{chain: current_chain} = task} ->
merged_chain = merge_chains(incoming_chain, current_chain)
task_with_merged_chain = %Task{task | chain: merged_chain}
Task.set_sync_task(task_with_merged_chain, sync)
{:error, :not_found} ->
sync
end
end
@doc """
Get the next known header_hash at a height bigger than N; or
if no such hash exist, the header_hash at the highest known height.
"""
@spec next_known_header_hash(Chain.t(), height()) :: header_hash()
def next_known_header_hash(chains, height) do
%{hash: header_hash} =
case Enum.take_while(chains, fn %{height: h} -> h > height end) do
[] ->
[chain | _] = chains
chain
chains_1 ->
List.last(chains_1)
end
header_hash
end
## Merges two list of chains, that are already sorted descending
## (based on the height), without keeping duplicates,
## where each element is a map with height and header hash
defp merge_chain_list_descending(list1, list2) do
merge(list1, list2, [])
end
defp merge([], [], acc), do: Enum.reverse(acc)
defp merge([], [head2 | rest2], acc) do
merge([], rest2, [head2 | acc])
end
defp merge([head1 | rest1], [], acc) do
merge(rest1, [], [head1 | acc])
end
defp merge(
[%{height: height1} = hd1 | rest1] = list1,
[%{height: height2} = hd2 | rest2] = list2,
acc
) do
cond do
height1 > height2 ->
merge(rest1, list2, [hd1 | acc])
height1 < height2 ->
merge(list1, rest2, [hd2 | acc])
true ->
merge(rest1, rest2, [hd1 | acc])
end
end
end
| 29.054217 | 92 | 0.620775 |
730dfa4423c69ff26c0f0178e8ae1cb7744139b3 | 7,283 | ex | Elixir | lib/aws/autoscaling.ex | ttyerl/aws-elixir | 48f6360fccee5dd587fab7a6efb109a399ff9a46 | [
"Apache-2.0"
] | 223 | 2015-05-29T17:45:35.000Z | 2021-06-29T08:37:14.000Z | lib/aws/autoscaling.ex | ttyerl/aws-elixir | 48f6360fccee5dd587fab7a6efb109a399ff9a46 | [
"Apache-2.0"
] | 33 | 2015-11-20T20:56:43.000Z | 2021-07-09T20:13:34.000Z | lib/aws/autoscaling.ex | ttyerl/aws-elixir | 48f6360fccee5dd587fab7a6efb109a399ff9a46 | [
"Apache-2.0"
] | 62 | 2015-06-14T20:53:24.000Z | 2021-12-13T07:20:15.000Z | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/jkakar/aws-codegen for more details.
defmodule AWS.AutoScaling do
@moduledoc """
With Application Auto Scaling, you can automatically scale your AWS
resources. The experience similar to that of [Auto
Scaling](https://aws.amazon.com/autoscaling/). You can use Application Auto
Scaling to accomplish the following tasks:
<ul> <li> Define scaling policies to automatically scale your AWS resources
</li> <li> Scale your resources in response to CloudWatch alarms
</li> <li> View the history of your scaling events
</li> </ul> Application Auto Scaling can scale the following AWS resources:
<ul> <li> Amazon ECS services. For more information, see [Service Auto
Scaling](http://docs.aws.amazon.com/AmazonECS/latest/developerguide/service-auto-scaling.html)
in the *Amazon EC2 Container Service Developer Guide*.
</li> <li> Amazon EC2 Spot fleets. For more information, see [Automatic
Scaling for Spot
Fleet](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/fleet-auto-scaling.html)
in the *Amazon EC2 User Guide*.
</li> <li> Amazon EMR clusters. For more information, see [Using Automatic
Scaling in Amazon
EMR](http://docs.aws.amazon.com/ElasticMapReduce/latest/ManagementGuide/emr-automatic-scaling.html)
in the *Amazon EMR Management Guide*.
</li> </ul> For a list of supported regions, see [AWS Regions and
Endpoints: Application Auto
Scaling](http://docs.aws.amazon.com/general/latest/gr/rande.html#as-app_region)
in the *AWS General Reference*.
"""
@doc """
Deletes the specified Application Auto Scaling scaling policy.
Deleting a policy deletes the underlying alarm action, but does not delete
the CloudWatch alarm associated with the scaling policy, even if it no
longer has an associated action.
To create a scaling policy or update an existing one, see
`PutScalingPolicy`.
"""
def delete_scaling_policy(client, input, options \\ []) do
request(client, "DeleteScalingPolicy", input, options)
end
@doc """
Deregisters a scalable target.
Deregistering a scalable target deletes the scaling policies that are
associated with it.
To create a scalable target or update an existing one, see
`RegisterScalableTarget`.
"""
def deregister_scalable_target(client, input, options \\ []) do
request(client, "DeregisterScalableTarget", input, options)
end
@doc """
Provides descriptive information about the scalable targets in the
specified namespace.
You can filter the results using the `ResourceIds` and `ScalableDimension`
parameters.
To create a scalable target or update an existing one, see
`RegisterScalableTarget`. If you are no longer using a scalable target, you
can deregister it using `DeregisterScalableTarget`.
"""
def describe_scalable_targets(client, input, options \\ []) do
request(client, "DescribeScalableTargets", input, options)
end
@doc """
Provides descriptive information about the scaling activities in the
specified namespace from the previous six weeks.
You can filter the results using the `ResourceId` and `ScalableDimension`
parameters.
Scaling activities are triggered by CloudWatch alarms that are associated
with scaling policies. To view the scaling policies for a service
namespace, see `DescribeScalingPolicies`. To create a scaling policy or
update an existing one, see `PutScalingPolicy`.
"""
def describe_scaling_activities(client, input, options \\ []) do
request(client, "DescribeScalingActivities", input, options)
end
@doc """
Provides descriptive information about the scaling policies in the
specified namespace.
You can filter the results using the `ResourceId`, `ScalableDimension`, and
`PolicyNames` parameters.
To create a scaling policy or update an existing one, see
`PutScalingPolicy`. If you are no longer using a scaling policy, you can
delete it using `DeleteScalingPolicy`.
"""
def describe_scaling_policies(client, input, options \\ []) do
request(client, "DescribeScalingPolicies", input, options)
end
@doc """
Creates or updates a policy for an Application Auto Scaling scalable
target.
Each scalable target is identified by a service namespace, resource ID, and
scalable dimension. A scaling policy applies to the scalable target
identified by those three attributes. You cannot create a scaling policy
without first registering a scalable target using `RegisterScalableTarget`.
To update a policy, specify its policy name and the parameters that you
want to change. Any parameters that you don't specify are not changed by
this update request.
You can view the scaling policies for a service namespace using
`DescribeScalingPolicies`. If you are no longer using a scaling policy, you
can delete it using `DeleteScalingPolicy`.
"""
def put_scaling_policy(client, input, options \\ []) do
request(client, "PutScalingPolicy", input, options)
end
@doc """
Registers or updates a scalable target. A scalable target is a resource
that Application Auto Scaling can scale out or scale in. After you have
registered a scalable target, you can use this operation to update the
minimum and maximum values for your scalable dimension.
After you register a scalable target, you can create and apply scaling
policies using `PutScalingPolicy`. You can view the scaling policies for a
service namespace using `DescribeScalableTargets`. If you are no longer
using a scalable target, you can deregister it using
`DeregisterScalableTarget`.
"""
def register_scalable_target(client, input, options \\ []) do
request(client, "RegisterScalableTarget", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "autoscaling"}
host = get_host("autoscaling", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AnyScaleFrontendService.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
| 38.331579 | 101 | 0.724015 |
730e0bc5fa350ba4853d563998f54da42adfe571 | 51,634 | ex | Elixir | lib/elixir/lib/code.ex | bessbd/elixir | 2b8de986238ce0e9593ed5b5d182994a562c1c30 | [
"Apache-2.0"
] | 1 | 2021-02-22T14:21:02.000Z | 2021-02-22T14:21:02.000Z | lib/elixir/lib/code.ex | bessbd/elixir | 2b8de986238ce0e9593ed5b5d182994a562c1c30 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/code.ex | bessbd/elixir | 2b8de986238ce0e9593ed5b5d182994a562c1c30 | [
"Apache-2.0"
] | null | null | null | defmodule Code do
@moduledoc ~S"""
Utilities for managing code compilation, code evaluation, and code loading.
This module complements Erlang's [`:code` module](`:code`)
to add behaviour which is specific to Elixir. Almost all of the functions in this module
have global side effects on the behaviour of Elixir.
## Working with files
This module contains three functions for compiling and evaluating files.
Here is a summary of them and their behaviour:
* `require_file/2` - compiles a file and tracks its name. It does not
compile the file again if it has been previously required.
* `compile_file/2` - compiles a file without tracking its name. Compiles the
file multiple times when invoked multiple times.
* `eval_file/2` - evaluates the file contents without tracking its name. It
returns the result of the last expression in the file, instead of the modules
defined in it. Evaluated files do not trigger the compilation tracers described
in the next section.
In a nutshell, the first must be used when you want to keep track of the files
handled by the system, to avoid the same file from being compiled multiple
times. This is common in scripts.
`compile_file/2` must be used when you are interested in the modules defined in a
file, without tracking. `eval_file/2` should be used when you are interested in
the result of evaluating the file rather than the modules it defines.
## Compilation tracers
Elixir supports compilation tracers, which allows modules to observe constructs
handled by the Elixir compiler when compiling files. A tracer is a module
that implements the `trace/2` function. The function receives the event name
as first argument and `Macro.Env` as second and it must return `:ok`. It is
very important for a tracer to do as little work as possible synchronously
and dispatch the bulk of the work to a separate process. **Slow tracers will
slow down compilation**.
You can configure your list of tracers via `put_compiler_option/2`. The
following events are available to tracers:
* `:start` - (since v1.11.0) invoked whenever the compiler starts to trace
a new lexical context, such as a new file. Keep in mind the compiler runs
in parallel, so multiple files may invoke `:start` and run at the same
time. The value of the `lexical_tracker` of the macro environment, albeit
opaque, can be used to uniquely identify the environment.
* `:stop` - (since v1.11.0) invoked whenever the compiler stops tracing a
new lexical context, such as a new file.
* `{:import, meta, module, opts}` - traced whenever `module` is imported.
`meta` is the import AST metadata and `opts` are the import options.
* `{:imported_function, meta, module, name, arity}` and
`{:imported_macro, meta, module, name, arity}` - traced whenever an
imported function or macro is invoked. `meta` is the call AST metadata,
`module` is the module the import is from, followed by the `name` and `arity`
of the imported function/macro.
* `{:alias, meta, alias, as, opts}` - traced whenever `alias` is aliased
to `as`. `meta` is the alias AST metadata and `opts` are the alias options.
* `{:alias_expansion, meta, as, alias}` traced whenever there is an alias
expansion for a previously defined `alias`, i.e. when the user writes `as`
which is expanded to `alias`. `meta` is the alias expansion AST metadata.
* `{:alias_reference, meta, module}` - traced whenever there is an alias
in the code, i.e. whenever the user writes `MyModule.Foo.Bar` in the code,
regardless if it was expanded or not.
* `{:require, meta, module, opts}` - traced whenever `module` is required.
`meta` is the require AST metadata and `opts` are the require options.
* `{:struct_expansion, meta, module, keys}` - traced whenever `module`'s struct
is expanded. `meta` is the struct AST metadata and `keys` are the keys being
used by expansion
* `{:remote_function, meta, module, name, arity}` and
`{:remote_macro, meta, module, name, arity}` - traced whenever a remote
function or macro is referenced. `meta` is the call AST metadata, `module`
is the invoked module, followed by the `name` and `arity`.
* `{:local_function, meta, name, arity}` and
`{:local_macro, meta, name, arity}` - traced whenever a local
function or macro is referenced. `meta` is the call AST metadata, followed by
the `name` and `arity`.
* `{:compile_env, app, path, return}` - traced whenever `Application.compile_env/3`
or `Application.compile_env!/2` are called. `app` is an atom, `path` is a list
of keys to traverse in the application environment and `return` is either
`{:ok, value}` or `:error`.
The `:tracers` compiler option can be combined with the `:parser_options`
compiler option to enrich the metadata of the traced events above.
New events may be added at any time in the future, therefore it is advised
for the `trace/2` function to have a "catch-all" clause.
Below is an example tracer that prints all remote function invocations:
defmodule MyTracer do
def trace({:remote_function, _meta, module, name, arity}, env) do
IO.puts "#{env.file}:#{env.line} #{inspect(module)}.#{name}/#{arity}"
:ok
end
def trace(_event, _env) do
:ok
end
end
"""
@typedoc """
A list with all variable bindings.
The binding keys are usually atoms, but they may be a tuple for variables
defined in a different context.
"""
@type binding :: [{atom() | tuple(), any}]
@boolean_compiler_options [
:docs,
:debug_info,
:ignore_module_conflict,
:relative_paths,
:warnings_as_errors
]
@list_compiler_options [:no_warn_undefined, :tracers, :parser_options]
@available_compiler_options @boolean_compiler_options ++ @list_compiler_options
@doc """
Lists all required files.
## Examples
Code.require_file("../eex/test/eex_test.exs")
List.first(Code.required_files()) =~ "eex_test.exs"
#=> true
"""
@doc since: "1.7.0"
@spec required_files() :: [binary]
def required_files do
:elixir_code_server.call(:required)
end
@deprecated "Use Code.required_files/0 instead"
@doc false
def loaded_files do
required_files()
end
@doc """
Removes files from the required files list.
The modules defined in the file are not removed;
calling this function only removes them from the list,
allowing them to be required again.
## Examples
# Require EEx test code
Code.require_file("../eex/test/eex_test.exs")
# Now unrequire all files
Code.unrequire_files(Code.required_files())
# Note that modules are still available
function_exported?(EExTest.Compiled, :before_compile, 0)
#=> true
"""
@doc since: "1.7.0"
@spec unrequire_files([binary]) :: :ok
def unrequire_files(files) when is_list(files) do
:elixir_code_server.cast({:unrequire_files, files})
end
@deprecated "Use Code.unrequire_files/1 instead"
@doc false
def unload_files(files) do
unrequire_files(files)
end
@doc """
Appends a path to the end of the Erlang VM code path list.
This is the list of directories the Erlang VM uses for
finding module code.
The path is expanded with `Path.expand/1` before being appended.
If this path does not exist, an error is returned.
## Examples
Code.append_path(".")
#=> true
Code.append_path("/does_not_exist")
#=> {:error, :bad_directory}
"""
@spec append_path(Path.t()) :: true | {:error, :bad_directory}
def append_path(path) do
:code.add_pathz(to_charlist(Path.expand(path)))
end
@doc """
Prepends a path to the beginning of the Erlang VM code path list.
This is the list of directories the Erlang VM uses for finding
module code.
The path is expanded with `Path.expand/1` before being prepended.
If this path does not exist, an error is returned.
## Examples
Code.prepend_path(".")
#=> true
Code.prepend_path("/does_not_exist")
#=> {:error, :bad_directory}
"""
@spec prepend_path(Path.t()) :: true | {:error, :bad_directory}
def prepend_path(path) do
:code.add_patha(to_charlist(Path.expand(path)))
end
@doc """
Deletes a path from the Erlang VM code path list. This is the list of
directories the Erlang VM uses for finding module code.
The path is expanded with `Path.expand/1` before being deleted. If the
path does not exist, this function returns `false`.
## Examples
Code.prepend_path(".")
Code.delete_path(".")
#=> true
Code.delete_path("/does_not_exist")
#=> false
"""
@spec delete_path(Path.t()) :: boolean
def delete_path(path) do
:code.del_path(to_charlist(Path.expand(path)))
end
@doc """
Evaluates the contents given by `string`.
The `binding` argument is a list of variable bindings.
The `opts` argument is a keyword list of environment options.
**Warning**: `string` can be any Elixir code and will be executed with
the same privileges as the Erlang VM: this means that such code could
compromise the machine (for example by executing system commands).
Don't use `eval_string/3` with untrusted input (such as strings coming
from the network).
## Options
Options can be:
* `:file` - the file to be considered in the evaluation
* `:line` - the line on which the script starts
Additionally, the following scope values can be configured:
* `:aliases` - a list of tuples with the alias and its target
* `:requires` - a list of modules required
* `:functions` - a list of tuples where the first element is a module
and the second a list of imported function names and arity; the list
of function names and arity must be sorted
* `:macros` - a list of tuples where the first element is a module
and the second a list of imported macro names and arity; the list
of function names and arity must be sorted
Note that setting any of the values above overrides Elixir's default
values. For example, setting `:requires` to `[]` will no longer
automatically require the `Kernel` module. In the same way setting
`:macros` will no longer auto-import `Kernel` macros like `Kernel.if/2`,
`Kernel.SpecialForms.case/2`, and so on.
Returns a tuple of the form `{value, binding}`,
where `value` is the value returned from evaluating `string`.
If an error occurs while evaluating `string` an exception will be raised.
`binding` is a list with all variable bindings
after evaluating `string`. The binding keys are usually atoms, but they
may be a tuple for variables defined in a different context.
## Examples
iex> Code.eval_string("a + b", [a: 1, b: 2], file: __ENV__.file, line: __ENV__.line)
{3, [a: 1, b: 2]}
iex> Code.eval_string("c = a + b", [a: 1, b: 2], __ENV__)
{3, [a: 1, b: 2, c: 3]}
iex> Code.eval_string("a = a + b", [a: 1, b: 2])
{3, [a: 3, b: 2]}
For convenience, you can pass `__ENV__/0` as the `opts` argument and
all imports, requires and aliases defined in the current environment
will be automatically carried over:
iex> Code.eval_string("a + b", [a: 1, b: 2], __ENV__)
{3, [a: 1, b: 2]}
"""
@spec eval_string(List.Chars.t(), binding, Macro.Env.t() | keyword) :: {term, binding}
def eval_string(string, binding \\ [], opts \\ [])
def eval_string(string, binding, %Macro.Env{} = env) do
eval_string_with_error_handling(string, binding, Map.to_list(env))
end
def eval_string(string, binding, opts) when is_list(opts) do
validate_eval_opts(opts)
eval_string_with_error_handling(string, binding, opts)
end
defp eval_string_with_error_handling(string, binding, opts) do
%{line: line, file: file} = env = :elixir.env_for_eval(opts)
forms = :elixir.string_to_quoted!(to_charlist(string), line, 1, file, [])
{value, binding, _env} = :elixir.eval_forms(forms, binding, env)
{value, binding}
end
@doc ~S"""
Formats the given code `string`.
The formatter receives a string representing Elixir code and
returns iodata representing the formatted code according to
pre-defined rules.
## Options
* `:file` - the file which contains the string, used for error
reporting
* `:line` - the line the string starts, used for error reporting
* `:line_length` - the line length to aim for when formatting
the document. Defaults to 98. Note this value is used as
reference but it is not enforced by the formatter as sometimes
user intervention is required. See "Running the formatter"
section
* `:locals_without_parens` - a keyword list of name and arity
pairs that should be kept without parens whenever possible.
The arity may be the atom `:*`, which implies all arities of
that name. The formatter already includes a list of functions
and this option augments this list.
* `:force_do_end_blocks` (since v1.9.0) - when `true`, converts all
inline usages of `do: ...`, `else: ...` and friends into `do/end`
blocks. Defaults to `false`. Note that this option is convergent:
once you set it to `true`, all keywords will be converted. If you
set it to `false` later on, `do/end` blocks won't be converted
back to keywords.
## Design principles
The formatter was designed under three principles.
First, the formatter never changes the semantics of the code by
default. This means the input AST and the output AST are equivalent.
The second principle is to provide as little configuration as possible.
This eases the formatter adoption by removing contention points while
making sure a single style is followed consistently by the community as
a whole.
The formatter does not hard code names. The formatter will not behave
specially because a function is named `defmodule`, `def`, or the like. This
principle mirrors Elixir's goal of being an extensible language where
developers can extend the language with new constructs as if they were
part of the language. When it is absolutely necessary to change behaviour
based on the name, this behaviour should be configurable, such as the
`:locals_without_parens` option.
## Running the formatter
The formatter attempts to fit the most it can on a single line and
introduces line breaks wherever possible when it cannot.
In some cases, this may lead to undesired formatting. Therefore, **some
code generated by the formatter may not be aesthetically pleasing and
may require explicit intervention from the developer**. That's why we
do not recommend to run the formatter blindly in an existing codebase.
Instead you should format and sanity check each formatted file.
Let's see some examples. The code below:
"this is a very long string ... #{inspect(some_value)}"
may be formatted as:
"this is a very long string ... #{
inspect(some_value)
}"
This happens because the only place the formatter can introduce a
new line without changing the code semantics is in the interpolation.
In those scenarios, we recommend developers to directly adjust the
code. Here we can use the binary concatenation operator `<>/2`:
"this is a very long string " <>
"... #{inspect(some_value)}"
The string concatenation makes the code fit on a single line and also
gives more options to the formatter.
A similar example is when the formatter breaks a function definition
over multiple clauses:
def my_function(
%User{name: name, age: age, ...},
arg1,
arg2
) do
...
end
While the code above is completely valid, you may prefer to match on
the struct variables inside the function body in order to keep the
definition on a single line:
def my_function(%User{} = user, arg1, arg2) do
%{name: name, age: age, ...} = user
...
end
In some situations, you can use the fact the formatter does not generate
elegant code as a hint for refactoring. Take this code:
def board?(board_id, %User{} = user, available_permissions, required_permissions) do
Tracker.OrganizationMembers.user_in_organization?(user.id, board.organization_id) and
required_permissions == Enum.to_list(MapSet.intersection(MapSet.new(required_permissions), MapSet.new(available_permissions)))
end
The code above has very long lines and running the formatter is not going
to address this issue. In fact, the formatter may make it more obvious that
you have complex expressions:
def board?(board_id, %User{} = user, available_permissions, required_permissions) do
Tracker.OrganizationMembers.user_in_organization?(user.id, board.organization_id) and
required_permissions ==
Enum.to_list(
MapSet.intersection(
MapSet.new(required_permissions),
MapSet.new(available_permissions)
)
)
end
Take such cases as a suggestion that your code should be refactored:
def board?(board_id, %User{} = user, available_permissions, required_permissions) do
Tracker.OrganizationMembers.user_in_organization?(user.id, board.organization_id) and
matching_permissions?(required_permissions, available_permissions)
end
defp matching_permissions?(required_permissions, available_permissions) do
intersection =
required_permissions
|> MapSet.new()
|> MapSet.intersection(MapSet.new(available_permissions))
|> Enum.to_list()
required_permissions == intersection
end
To sum it up: since the formatter cannot change the semantics of your
code, sometimes it is necessary to tweak or refactor the code to get
optimal formatting. To help better understand how to control the formatter,
we describe in the next sections the cases where the formatter keeps the
user encoding and how to control multiline expressions.
## Keeping user's formatting
The formatter respects the input format in some cases. Those are
listed below:
* Insignificant digits in numbers are kept as is. The formatter
however always inserts underscores for decimal numbers with more
than 5 digits and converts hexadecimal digits to uppercase
* Strings, charlists, atoms and sigils are kept as is. No character
is automatically escaped or unescaped. The choice of delimiter is
also respected from the input
* Newlines inside blocks are kept as in the input except for:
1) expressions that take multiple lines will always have an empty
line before and after and 2) empty lines are always squeezed
together into a single empty line
* The choice between `:do` keyword and `do/end` blocks is left
to the user
* Lists, tuples, bitstrings, maps, structs and function calls will be
broken into multiple lines if they are followed by a newline in the
opening bracket and preceded by a new line in the closing bracket
* Newlines before certain operators (such as the pipeline operators)
and before other operators (such as comparison operators)
The behaviours above are not guaranteed. We may remove or add new
rules in the future. The goal of documenting them is to provide better
understanding on what to expect from the formatter.
### Multi-line lists, maps, tuples, and the like
You can force lists, tuples, bitstrings, maps, structs and function
calls to have one entry per line by adding a newline after the opening
bracket and a new line before the closing bracket lines. For example:
[
foo,
bar
]
If there are no newlines around the brackets, then the formatter will
try to fit everything on a single line, such that the snippet below
[foo,
bar]
will be formatted as
[foo, bar]
You can also force function calls and keywords to be rendered on multiple
lines by having each entry on its own line:
defstruct name: nil,
age: 0
The code above will be kept with one keyword entry per line by the
formatter. To avoid that, just squash everything into a single line.
### Parens and no parens in function calls
Elixir has two syntaxes for function calls. With parens and no parens.
By default, Elixir will add parens to all calls except for:
1. calls that have do/end blocks
2. local calls without parens where the name and arity of the local
call is also listed under `:locals_without_parens` (except for
calls with arity 0, where the compiler always require parens)
The choice of parens and no parens also affects indentation. When a
function call with parens doesn't fit on the same line, the formatter
introduces a newline around parens and indents the arguments with two
spaces:
some_call(
arg1,
arg2,
arg3
)
On the other hand, function calls without parens are always indented
by the function call length itself, like this:
some_call arg1,
arg2,
arg3
If the last argument is a data structure, such as maps and lists, and
the beginning of the data structure fits on the same line as the function
call, then no indentation happens, this allows code like this:
Enum.reduce(some_collection, initial_value, fn element, acc ->
# code
end)
some_function_without_parens %{
foo: :bar,
baz: :bat
}
## Code comments
The formatter also handles code comments in a way to guarantee a space
is always added between the beginning of the comment (#) and the next
character.
The formatter also extracts all trailing comments to their previous line.
For example, the code below
hello #world
will be rewritten to
# world
hello
Because code comments are handled apart from the code representation (AST),
there are some situations where code comments are seen as ambiguous by the
code formatter. For example, the comment in the anonymous function below
fn
arg1 ->
body1
# comment
arg2 ->
body2
end
and in this one
fn
arg1 ->
body1
# comment
arg2 ->
body2
end
are considered equivalent (the nesting is discarded alongside most of
user formatting). In such cases, the code formatter will always format to
the latter.
## Newlines
The formatter converts all newlines in code from `\r\n` to `\n`.
"""
@doc since: "1.6.0"
@spec format_string!(binary, keyword) :: iodata
def format_string!(string, opts \\ []) when is_binary(string) and is_list(opts) do
line_length = Keyword.get(opts, :line_length, 98)
algebra = Code.Formatter.to_algebra!(string, opts)
Inspect.Algebra.format(algebra, line_length)
end
@doc """
Formats a file.
See `format_string!/2` for more information on code formatting and
available options.
"""
@doc since: "1.6.0"
@spec format_file!(binary, keyword) :: iodata
def format_file!(file, opts \\ []) when is_binary(file) and is_list(opts) do
string = File.read!(file)
formatted = format_string!(string, [file: file, line: 1] ++ opts)
[formatted, ?\n]
end
@doc """
Evaluates the quoted contents.
**Warning**: Calling this function inside a macro is considered bad
practice as it will attempt to evaluate runtime values at compile time.
Macro arguments are typically transformed by unquoting them into the
returned quoted expressions (instead of evaluated).
See `eval_string/3` for a description of `binding` and options.
## Examples
iex> contents = quote(do: var!(a) + var!(b))
iex> Code.eval_quoted(contents, [a: 1, b: 2], file: __ENV__.file, line: __ENV__.line)
{3, [a: 1, b: 2]}
For convenience, you can pass `__ENV__/0` as the `opts` argument and
all options will be automatically extracted from the current environment:
iex> contents = quote(do: var!(a) + var!(b))
iex> Code.eval_quoted(contents, [a: 1, b: 2], __ENV__)
{3, [a: 1, b: 2]}
"""
@spec eval_quoted(Macro.t(), binding, Macro.Env.t() | keyword) :: {term, binding}
def eval_quoted(quoted, binding \\ [], opts \\ [])
def eval_quoted(quoted, binding, %Macro.Env{} = env) do
{value, binding, _env} = :elixir.eval_quoted(quoted, binding, Map.to_list(env))
{value, binding}
end
def eval_quoted(quoted, binding, opts) when is_list(opts) do
validate_eval_opts(opts)
{value, binding, _env} = :elixir.eval_quoted(quoted, binding, opts)
{value, binding}
end
defp validate_eval_opts(opts) do
if f = opts[:functions], do: validate_imports(:functions, f)
if m = opts[:macros], do: validate_imports(:macros, m)
if a = opts[:aliases], do: validate_aliases(:aliases, a)
if r = opts[:requires], do: validate_requires(:requires, r)
end
defp validate_requires(kind, requires) do
valid = is_list(requires) and Enum.all?(requires, &is_atom(&1))
unless valid do
raise ArgumentError, "expected :#{kind} option given to eval in the format: [module]"
end
end
defp validate_aliases(kind, aliases) do
valid = is_list(aliases) and Enum.all?(aliases, fn {k, v} -> is_atom(k) and is_atom(v) end)
unless valid do
raise ArgumentError,
"expected :#{kind} option given to eval in the format: [{module, module}]"
end
end
defp validate_imports(kind, imports) do
valid =
is_list(imports) and
Enum.all?(imports, fn {k, v} ->
is_atom(k) and is_list(v) and
Enum.all?(v, fn {name, arity} -> is_atom(name) and is_integer(arity) end)
end)
unless valid do
raise ArgumentError,
"expected :#{kind} option given to eval in the format: [{module, [{name, arity}]}]"
end
end
@doc ~S"""
Converts the given string to its quoted form.
Returns `{:ok, quoted_form}` if it succeeds,
`{:error, {line, error, token}}` otherwise.
## Options
* `:file` - the filename to be reported in case of parsing errors.
Defaults to "nofile".
* `:line` - the starting line of the string being parsed.
Defaults to 1.
* `:column` - (since v1.11.0) the starting column of the string being parsed.
Defaults to 1.
* `:columns` - when `true`, attach a `:column` key to the quoted
metadata. Defaults to `false`.
* `:existing_atoms_only` - when `true`, raises an error
when non-existing atoms are found by the tokenizer.
Defaults to `false`.
* `:token_metadata` (since v1.10.0) - when `true`, includes token-related
metadata in the expression AST, such as metadata for `do` and `end`
tokens, for closing tokens, end of expressions, as well as delimiters
for sigils. See `t:Macro.metadata/0`. Defaults to `false`.
* `:literal_encoder` (since v1.10.0) - how to encode literals in the AST.
It must be a function that receives two arguments, the literal and its
metadata, and it must return `{:ok, ast :: Macro.t}` or
`{:error, reason :: binary}`. If you return anything than the literal
itself as the `term`, then the AST is no longer valid. This option
may still useful for textual analysis of the source code.
* `:static_atoms_encoder` - the static atom encoder function, see
"The `:static_atoms_encoder` function" section below. Note this
option overrides the `:existing_atoms_only` behaviour for static
atoms but `:existing_atoms_only` is still used for dynamic atoms,
such as atoms with interpolations.
* `:warn_on_unnecessary_quotes` - when `false`, does not warn
when atoms, keywords or calls have unnecessary quotes on
them. Defaults to `true`.
## `Macro.to_string/2`
The opposite of converting a string to its quoted form is
`Macro.to_string/2`, which converts a quoted form to a string/binary
representation.
## The `:static_atoms_encoder` function
When `static_atoms_encoder: &my_encoder/2` is passed as an argument,
`my_encoder/2` is called every time the tokenizer needs to create a
"static" atom. Static atoms are atoms in the AST that function as
aliases, remote calls, local calls, variable names, regular atoms
and keyword lists.
The encoder function will receive the atom name (as a binary) and a
keyword list with the current file, line and column. It must return
`{:ok, token :: term} | {:error, reason :: binary}`.
The encoder function is supposed to create an atom from the given
string. To produce a valid AST, it is required to return `{:ok, term}`,
where `term` is an atom. It is possible to return something other than an atom,
however, in that case the AST is no longer "valid" in that it cannot
be used to compile or evaluate Elixir code. A use case for this is
if you want to use the Elixir parser in a user-facing situation, but
you don't want to exhaust the atom table.
The atom encoder is not called for *all* atoms that are present in
the AST. It won't be invoked for the following atoms:
* operators (`:+`, `:-`, and so on)
* syntax keywords (`fn`, `do`, `else`, and so on)
* atoms containing interpolation (`:"#{1 + 1} is two"`), as these
atoms are constructed at runtime.
"""
@spec string_to_quoted(List.Chars.t(), keyword) ::
{:ok, Macro.t()} | {:error, {location :: keyword, term, term}}
def string_to_quoted(string, opts \\ []) when is_list(opts) do
file = Keyword.get(opts, :file, "nofile")
line = Keyword.get(opts, :line, 1)
column = Keyword.get(opts, :column, 1)
case :elixir.string_to_tokens(to_charlist(string), line, column, file, opts) do
{:ok, tokens} ->
:elixir.tokens_to_quoted(tokens, file, opts)
{:error, _error_msg} = error ->
error
end
end
@doc """
Converts the given string to its quoted form.
It returns the ast if it succeeds,
raises an exception otherwise. The exception is a `TokenMissingError`
in case a token is missing (usually because the expression is incomplete),
`SyntaxError` otherwise.
Check `string_to_quoted/2` for options information.
"""
@spec string_to_quoted!(List.Chars.t(), keyword) :: Macro.t()
def string_to_quoted!(string, opts \\ []) when is_list(opts) do
file = Keyword.get(opts, :file, "nofile")
line = Keyword.get(opts, :line, 1)
column = Keyword.get(opts, :column, 1)
:elixir.string_to_quoted!(to_charlist(string), line, column, file, opts)
end
@doc """
Evals the given file.
Accepts `relative_to` as an argument to tell where the file is located.
While `require_file/2` and `compile_file/2` return the loaded modules and their
bytecode, `eval_file/2` simply evaluates the file contents and returns the
evaluation result and its binding (exactly the same return value as `eval_string/3`).
"""
@spec eval_file(binary, nil | binary) :: {term, binding}
def eval_file(file, relative_to \\ nil) when is_binary(file) do
file = find_file(file, relative_to)
eval_string(File.read!(file), [], file: file, line: 1)
end
@deprecated "Use Code.require_file/2 or Code.compile_file/2 instead"
@doc false
def load_file(file, relative_to \\ nil) when is_binary(file) do
file = find_file(file, relative_to)
:elixir_code_server.call({:acquire, file})
loaded = :elixir_compiler.file(file, fn _, _ -> :ok end)
:elixir_code_server.cast({:required, file})
verify_loaded(loaded)
end
@doc """
Requires the given `file`.
Accepts `relative_to` as an argument to tell where the file is located.
If the file was already required, `require_file/2` doesn't do anything and
returns `nil`.
Note that if `require_file/2` is invoked by different processes concurrently,
the first process to invoke `require_file/2` acquires a lock and the remaining
ones will block until the file is available. This means that if `require_file/2`
is called more than once with a given file, that file will be compiled only once.
The first process to call `require_file/2` will get the list of loaded modules,
others will get `nil`.
See `compile_file/2` if you would like to compile a file without tracking its
filenames. Finally, if you would like to get the result of evaluating a file rather
than the modules defined in it, see `eval_file/2`.
## Examples
If the file has not been required, it returns the list of modules:
modules = Code.require_file("eex_test.exs", "../eex/test")
List.first(modules)
#=> {EExTest.Compiled, <<70, 79, 82, 49, ...>>}
If the file has been required, it returns `nil`:
Code.require_file("eex_test.exs", "../eex/test")
#=> nil
"""
@spec require_file(binary, nil | binary) :: [{module, binary}] | nil
def require_file(file, relative_to \\ nil) when is_binary(file) do
file = find_file(file, relative_to)
case :elixir_code_server.call({:acquire, file}) do
:required ->
nil
:proceed ->
loaded = :elixir_compiler.file(file, fn _, _ -> :ok end)
:elixir_code_server.cast({:required, file})
verify_loaded(loaded)
end
end
@doc """
Gets all compilation options from the code server.
To get individual options, see `get_compiler_option/1`.
For a description of all options, see `put_compiler_option/2`.
## Examples
Code.compiler_options()
#=> %{debug_info: true, docs: true, ...}
"""
@spec compiler_options :: map
def compiler_options do
for key <- @available_compiler_options, into: %{} do
{key, :elixir_config.get(key)}
end
end
@doc """
Stores all given compilation options.
To store individual options, see `put_compiler_option/2`.
For a description of all options, see `put_compiler_option/2`.
## Examples
Code.compiler_options()
#=> %{debug_info: true, docs: true, ...}
"""
@spec compiler_options(Enumerable.t()) :: %{optional(atom) => boolean}
def compiler_options(opts) do
for {key, value} <- opts, into: %{} do
previous = get_compiler_option(key)
put_compiler_option(key, value)
{key, previous}
end
end
@doc """
Returns the value of a given compiler option.
For a description of all options, see `put_compiler_option/2`.
## Examples
Code.get_compiler_option(:debug_info)
#=> true
"""
@doc since: "1.10.0"
@spec get_compiler_option(atom) :: term
def get_compiler_option(key) when key in @available_compiler_options do
:elixir_config.get(key)
end
@doc """
Returns a list with all available compiler options.
For a description of all options, see `put_compiler_option/2`.
## Examples
Code.available_compiler_options()
#=> [:docs, :debug_info, ...]
"""
@spec available_compiler_options() :: [atom]
def available_compiler_options do
@available_compiler_options
end
@doc """
Stores a compilation option.
These options are global since they are stored by Elixir's code server.
Available options are:
* `:docs` - when `true`, retain documentation in the compiled module.
Defaults to `true`.
* `:debug_info` - when `true`, retain debug information in the compiled
module. This allows a developer to reconstruct the original source
code. Defaults to `true`.
* `:ignore_module_conflict` - when `true`, override modules that were
already defined without raising errors. Defaults to `false`.
* `:relative_paths` - when `true`, use relative paths in quoted nodes,
warnings and errors generated by the compiler. Note disabling this option
won't affect runtime warnings and errors. Defaults to `true`.
* `:warnings_as_errors` - causes compilation to fail when warnings are
generated. Defaults to `false`.
* `:no_warn_undefined` (since v1.10.0) - list of modules and `{Mod, fun, arity}`
tuples that will not emit warnings that the module or function does not exist
at compilation time. Pass atom `:all` to skip warning for all undefined
functions. This can be useful when doing dynamic compilation. Defaults to `[]`.
* `:tracers` (since v1.10.0) - a list of tracers (modules) to be used during
compilation. See the module docs for more information. Defaults to `[]`.
* `:parser_options` (since v1.10.0) - a keyword list of options to be given
to the parser when compiling files. It accepts the same options as
`string_to_quoted/2` (except by the options that change the AST itself).
This can be used in combination with the tracer to retrieve localized
information about events happening during compilation. Defaults to `[]`.
It always returns `:ok`. Raises an error for invalid options.
## Examples
Code.put_compiler_option(:debug_info, true)
#=> :ok
"""
@doc since: "1.10.0"
@spec put_compiler_option(atom, term) :: :ok
def put_compiler_option(key, value) when key in @boolean_compiler_options do
if not is_boolean(value) do
raise "compiler option #{inspect(key)} should be a boolean, got: #{inspect(value)}"
end
:elixir_config.put(key, value)
:ok
end
def put_compiler_option(:no_warn_undefined, value) do
if value != :all and not is_list(value) do
raise "compiler option :no_warn_undefined should be a list or the atom :all, " <>
"got: #{inspect(value)}"
end
:elixir_config.put(:no_warn_undefined, value)
:ok
end
def put_compiler_option(key, value) when key in @list_compiler_options do
if not is_list(value) do
raise "compiler option #{inspect(key)} should be a list, got: #{inspect(value)}"
end
if key == :parser_options and not Keyword.keyword?(value) do
raise "compiler option #{inspect(key)} should be a keyword list, " <>
"got: #{inspect(value)}"
end
if key == :tracers and not Enum.all?(value, &is_atom/1) do
raise "compiler option #{inspect(key)} should be a list of modules, " <>
"got: #{inspect(value)}"
end
:elixir_config.put(key, value)
:ok
end
def put_compiler_option(key, _value) do
raise "unknown compiler option: #{inspect(key)}"
end
@doc """
Purge compiler modules.
The compiler utilizes temporary modules to compile code. For example,
`elixir_compiler_1`, `elixir_compiler_2`, and so on. In case the compiled code
stores references to anonymous functions or similar, the Elixir compiler
may be unable to reclaim those modules, keeping an unnecessary amount of
code in memory and eventually leading to modules such as `elixir_compiler_12345`.
This function purges all modules currently kept by the compiler, allowing
old compiler module names to be reused. If there are any processes running
any code from such modules, they will be terminated too.
It returns `{:ok, number_of_modules_purged}`.
"""
@doc since: "1.7.0"
@spec purge_compiler_modules() :: {:ok, non_neg_integer()}
def purge_compiler_modules() do
:elixir_code_server.call(:purge_compiler_modules)
end
@doc """
Compiles the given string.
Returns a list of tuples where the first element is the module name
and the second one is its bytecode (as a binary). A `file` can be
given as second argument which will be used for reporting warnings
and errors.
**Warning**: `string` can be any Elixir code and code can be executed with
the same privileges as the Erlang VM: this means that such code could
compromise the machine (for example by executing system commands).
Don't use `compile_string/2` with untrusted input (such as strings coming
from the network).
"""
@spec compile_string(List.Chars.t(), binary) :: [{module, binary}]
def compile_string(string, file \\ "nofile") when is_binary(file) do
loaded = :elixir_compiler.string(to_charlist(string), file, fn _, _ -> :ok end)
Enum.map(loaded, fn {module, _map, binary} -> {module, binary} end)
end
@doc """
Compiles the quoted expression.
Returns a list of tuples where the first element is the module name and
the second one is its bytecode (as a binary). A `file` can be
given as second argument which will be used for reporting warnings
and errors.
"""
@spec compile_quoted(Macro.t(), binary) :: [{module, binary}]
def compile_quoted(quoted, file \\ "nofile") when is_binary(file) do
loaded = :elixir_compiler.quoted(quoted, file, fn _, _ -> :ok end)
Enum.map(loaded, fn {module, _map, binary} -> {module, binary} end)
end
@doc """
Compiles the given file.
Accepts `relative_to` as an argument to tell where the file is located.
Returns a list of tuples where the first element is the module name and
the second one is its bytecode (as a binary). Opposite to `require_file/2`,
it does not track the filename of the compiled file.
If you would like to get the result of evaluating file rather than the
modules defined in it, see `eval_file/2`.
For compiling many files concurrently, see `Kernel.ParallelCompiler.compile/2`.
"""
@doc since: "1.7.0"
@spec compile_file(binary, nil | binary) :: [{module, binary}]
def compile_file(file, relative_to \\ nil) when is_binary(file) do
loaded = :elixir_compiler.file(find_file(file, relative_to), fn _, _ -> :ok end)
verify_loaded(loaded)
end
@doc """
Ensures the given module is loaded.
If the module is already loaded, this works as no-op. If the module
was not yet loaded, it tries to load it.
If it succeeds in loading the module, it returns `{:module, module}`.
If not, returns `{:error, reason}` with the error reason.
## Code loading on the Erlang VM
Erlang has two modes to load code: interactive and embedded.
By default, the Erlang VM runs in interactive mode, where modules
are loaded as needed. In embedded mode the opposite happens, as all
modules need to be loaded upfront or explicitly.
Therefore, this function is used to check if a module is loaded
before using it and allows one to react accordingly. For example, the `URI`
module uses this function to check if a specific parser exists for a given
URI scheme.
## `ensure_compiled/1`
Elixir also contains an `ensure_compiled/1` function that is a
superset of `ensure_loaded/1`.
Since Elixir's compilation happens in parallel, in some situations
you may need to use a module that was not yet compiled, therefore
it can't even be loaded.
When invoked, `ensure_compiled/1` halts the compilation of the caller
until the module given to `ensure_compiled/1` becomes available or
all files for the current project have been compiled. If compilation
finishes and the module is not available, an error tuple is returned.
`ensure_compiled/1` does not apply to dependencies, as dependencies
must be compiled upfront.
In most cases, `ensure_loaded/1` is enough. `ensure_compiled/1`
must be used in rare cases, usually involving macros that need to
invoke a module for callback information.
## Examples
iex> Code.ensure_loaded(Atom)
{:module, Atom}
iex> Code.ensure_loaded(DoesNotExist)
{:error, :nofile}
"""
@spec ensure_loaded(module) ::
{:module, module} | {:error, :embedded | :badfile | :nofile | :on_load_failure}
def ensure_loaded(module) when is_atom(module) do
:code.ensure_loaded(module)
end
@doc """
Ensures the given module is loaded.
Similar to `ensure_loaded/1`, but returns `true` if the module
is already loaded or was successfully loaded. Returns `false`
otherwise.
## Examples
iex> Code.ensure_loaded?(Atom)
true
"""
@spec ensure_loaded?(module) :: boolean
def ensure_loaded?(module) when is_atom(module) do
match?({:module, ^module}, ensure_loaded(module))
end
@doc """
Ensures the given module is compiled and loaded.
If the module is already loaded, it works as no-op. If the module was
not compiled yet, `ensure_compiled/1` halts the compilation of the caller
until the module given to `ensure_compiled/1` becomes available or
all files for the current project have been compiled. If compilation
finishes and the module is not available, an error tuple is returned.
Given this function halts compilation, use it carefully. In particular,
avoid using it to guess which modules are in the system. Overuse of this
function can also lead to deadlocks, where two modules check at the same time
if the other is compiled. This returns a specific unavailable error code,
where we cannot successfully verify a module is available or not.
If it succeeds in loading the module, it returns `{:module, module}`.
If not, returns `{:error, reason}` with the error reason.
If the module being checked is currently in a compiler deadlock,
this function returns `{:error, :unavailable}`. Unavailable doesn't
necessarily mean the module doesn't exist, just that it is not currently
available, but it (or may not) become available in the future.
Check `ensure_loaded/1` for more information on module loading
and when to use `ensure_loaded/1` or `ensure_compiled/1`.
"""
@spec ensure_compiled(module) ::
{:module, module}
| {:error, :embedded | :badfile | :nofile | :on_load_failure | :unavailable}
def ensure_compiled(module) when is_atom(module) do
case :code.ensure_loaded(module) do
{:error, :nofile} = error ->
if can_await_module_compilation?() do
case Kernel.ErrorHandler.ensure_compiled(module, :module, :soft) do
:found -> {:module, module}
:deadlock -> {:error, :unavailable}
:not_found -> {:error, :nofile}
end
else
error
end
other ->
other
end
end
@doc """
Returns true if the current process can await for module compilation.
When compiling Elixir code via `Kernel.ParallelCompiler`, which is
used by Mix and `elixirc`, calling a module that has not yet been
compiled will block the caller until the module becomes available.
Executing Elixir scripts, such as passing a filename to `elixir`,
does not await.
"""
@doc since: "1.11.0"
@spec can_await_module_compilation? :: boolean
def can_await_module_compilation? do
:erlang.process_info(self(), :error_handler) == {:error_handler, Kernel.ErrorHandler}
end
@doc false
@deprecated "Use Code.ensure_compiled/1 instead (see the proper disclaimers in its docs)"
def ensure_compiled?(module) when is_atom(module) do
match?({:module, ^module}, ensure_compiled(module))
end
@doc ~S"""
Returns the docs for the given module or path to `.beam` file.
When given a module name, it finds its BEAM code and reads the docs from it.
When given a path to a `.beam` file, it will load the docs directly from that
file.
It returns the term stored in the documentation chunk in the format defined by
[EEP 48](https://erlang.org/eep/eeps/eep-0048.html) or `{:error, reason}` if
the chunk is not available.
## Examples
# Module documentation of an existing module
iex> {:docs_v1, _, :elixir, _, %{"en" => module_doc}, _, _} = Code.fetch_docs(Atom)
iex> module_doc |> String.split("\n") |> Enum.at(0)
"Atoms are constants whose values are their own name."
# A module that doesn't exist
iex> Code.fetch_docs(ModuleNotGood)
{:error, :module_not_found}
"""
@doc since: "1.7.0"
@spec fetch_docs(module | String.t()) ::
{:docs_v1, annotation, beam_language, format, module_doc :: doc_content, metadata,
docs :: [doc_element]}
| {:error, :module_not_found | :chunk_not_found | {:invalid_chunk, binary}}
when annotation: :erl_anno.anno(),
beam_language: :elixir | :erlang | atom(),
doc_content: %{optional(binary) => binary} | :none | :hidden,
doc_element:
{{kind :: atom, function_name :: atom, arity}, annotation, signature, doc_content,
metadata},
format: binary,
signature: [binary],
metadata: map
def fetch_docs(module_or_path)
def fetch_docs(module) when is_atom(module) do
case :code.get_object_code(module) do
{_module, bin, beam_path} ->
case fetch_docs_from_beam(bin) do
{:error, :chunk_not_found} ->
app_root = Path.expand(Path.join(["..", ".."]), beam_path)
path = Path.join([app_root, "doc", "chunks", "#{module}.chunk"])
fetch_docs_from_chunk(path)
other ->
other
end
:error ->
case :code.which(module) do
:preloaded ->
# The erts directory is not necessarily included in releases
# unless it is listed as an extra application.
case :code.lib_dir(:erts) do
path when is_list(path) ->
path = Path.join([path, "doc", "chunks", "#{module}.chunk"])
fetch_docs_from_chunk(path)
{:error, _} ->
{:error, :chunk_not_found}
end
_ ->
{:error, :module_not_found}
end
end
end
def fetch_docs(path) when is_binary(path) do
fetch_docs_from_beam(String.to_charlist(path))
end
@docs_chunk 'Docs'
defp fetch_docs_from_beam(bin_or_path) do
case :beam_lib.chunks(bin_or_path, [@docs_chunk]) do
{:ok, {_module, [{@docs_chunk, bin}]}} ->
load_docs_chunk(bin)
{:error, :beam_lib, {:missing_chunk, _, @docs_chunk}} ->
{:error, :chunk_not_found}
{:error, :beam_lib, {:file_error, _, :enoent}} ->
{:error, :module_not_found}
end
end
defp fetch_docs_from_chunk(path) do
case File.read(path) do
{:ok, bin} ->
load_docs_chunk(bin)
{:error, _} ->
{:error, :chunk_not_found}
end
end
defp load_docs_chunk(bin) do
:erlang.binary_to_term(bin)
rescue
_ ->
{:error, {:invalid_chunk, bin}}
end
@doc ~S"""
Deprecated function to retrieve old documentation format.
Elixir v1.7 adopts [EEP 48](https://erlang.org/eep/eeps/eep-0048.html)
which is a new documentation format meant to be shared across all
BEAM languages. The old format, used by `Code.get_docs/2`, is no
longer available, and therefore this function always returns `nil`.
Use `Code.fetch_docs/1` instead.
"""
@deprecated "Code.get_docs/2 always returns nil as its outdated documentation is no longer stored on BEAM files. Use Code.fetch_docs/1 instead"
@spec get_docs(module, :moduledoc | :docs | :callback_docs | :type_docs | :all) :: nil
def get_docs(_module, _kind) do
nil
end
## Helpers
# Finds the file given the relative_to path.
#
# If the file is found, returns its path in binary, fails otherwise.
defp find_file(file, relative_to) do
file =
if relative_to do
Path.expand(file, relative_to)
else
Path.expand(file)
end
if File.regular?(file) do
file
else
raise Code.LoadError, file: file
end
end
defp verify_loaded(loaded) do
maps_binaries = Enum.map(loaded, fn {_module, map, binary} -> {map, binary} end)
Module.ParallelChecker.verify(maps_binaries, [])
Enum.map(loaded, fn {module, _map, binary} -> {module, binary} end)
end
end
| 35.197001 | 145 | 0.686621 |
730e0bc8ea3634e00698a1644ba4b5015c603db2 | 1,125 | exs | Elixir | config/config.exs | lorenzosinisi/extop | 64d6408709224a6daf2db6b233d47d78c37ba0bb | [
"Apache-2.0"
] | 2 | 2020-01-14T05:10:41.000Z | 2020-03-02T10:43:38.000Z | config/config.exs | lorenzosinisi/extop | 64d6408709224a6daf2db6b233d47d78c37ba0bb | [
"Apache-2.0"
] | 1 | 2019-08-24T08:59:40.000Z | 2019-08-24T08:59:40.000Z | config/config.exs | lorenzosinisi/extop | 64d6408709224a6daf2db6b233d47d78c37ba0bb | [
"Apache-2.0"
] | 1 | 2019-08-23T20:53:38.000Z | 2019-08-23T20:53:38.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# third-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :extop, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:extop, :key)
#
# You can also configure a third-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env()}.exs"
| 36.290323 | 73 | 0.750222 |
730e22162a4f3272dc1792e9485d60ffb5b2e930 | 5,025 | exs | Elixir | test/distance_test.exs | carsdotcom/google_maps | 64c10e5936d957d9249b86df5b25398bb5eda004 | [
"MIT"
] | null | null | null | test/distance_test.exs | carsdotcom/google_maps | 64c10e5936d957d9249b86df5b25398bb5eda004 | [
"MIT"
] | null | null | null | test/distance_test.exs | carsdotcom/google_maps | 64c10e5936d957d9249b86df5b25398bb5eda004 | [
"MIT"
] | 2 | 2020-06-20T18:15:37.000Z | 2021-03-30T20:22:02.000Z | defmodule DistanceTest do
use ExUnit.Case, async: true
alias GoogleMaps, as: Maps
test "distance between two addresses" do
origin = "Cột mốc Quốc Gia, Đất Mũi, Ca Mau, Vietnam"
destination = "Cột cờ Lũng Cú, Đường lên Cột Cờ, Lũng Cú, Ha Giang, Vietnam"
{:ok, result} = Maps.distance(origin, destination)
assert_num_destination_addresses result, 1
assert_num_rows result, 1
%{"rows" => [row]} = result
assert_num_elements_for_row row, 1
%{"elements" => [element]} = row
%{
"distance" => %{"text" => distance_text, "value" => distance_value},
"duration" => %{"text" => duration_text, "value" => duration_value},
"status" => status
} = element
assert "OK" == status
assert is_binary(distance_text)
assert is_integer(distance_value)
assert is_binary(distance_text)
assert is_integer(distance_value)
end
test "distance between lat/lng tupples" do
origin = {8.6069305,104.7196242}
destination = {23.363697,105.3140251}
{:ok, result} = Maps.distance(origin, destination)
assert_num_destination_addresses result, 1
assert_num_rows result, 1
%{"rows" => [row]} = result
assert_num_elements_for_row row, 1
%{"elements" => [element]} = row
%{
"distance" => %{"text" => distance_text, "value" => distance_value},
"duration" => %{"text" => duration_text, "value" => duration_value},
"status" => status
} = element
assert "OK" == status
assert is_binary(distance_text)
assert is_integer(distance_value)
assert is_binary(distance_text)
assert is_integer(distance_value)
end
test "distance between one origin and two destinations using lat/lng tupples" do
origin = {8.6069305,104.7196242}
destinations = [{23.363697,105.3140251}, {22.593417, 104.617724}]
{:ok, result} = Maps.distance(origin, destinations)
assert_num_destination_addresses result, 2
assert_num_rows result, 1
%{"rows" => [row]} = result
assert_num_elements_for_row row, 2
%{"elements" => elements} = row
[first_element | [last_element]] = elements
%{
"distance" => %{"text" => first_distance_text, "value" => first_distance_value},
"duration" => %{"text" => first_duration_text, "value" => first_duration_value},
"status" => first_status
} = first_element
assert "OK" == first_status
assert is_binary(first_distance_text)
assert is_integer(first_distance_value)
assert is_binary(first_distance_text)
assert is_integer(first_distance_value)
%{
"distance" => %{"text" => second_distance_text, "value" => second_distance_value},
"duration" => %{"text" => second_duration_text, "value" => second_duration_value},
"status" => second_status
} = last_element
assert "OK" == second_status
assert is_binary(second_distance_text)
assert is_integer(second_distance_value)
assert is_binary(second_distance_text)
assert is_integer(second_distance_value)
end
test "distance between two origins and one destination using lat/lng tupples" do
destination = {8.6069305,104.7196242}
origins = [{23.363697,105.3140251}, {22.593417, 104.617724}]
{:ok, result} = Maps.distance(origins, destination)
assert_num_destination_addresses result, 1
assert_num_rows result, 2
%{"rows" => [first_row | [last_row]]} = result
assert_num_elements_for_row first_row, 1
assert_num_elements_for_row last_row, 1
%{"elements" => [first_row_element]} = first_row
%{"elements" => [last_row_element]} = last_row
%{
"distance" => %{"text" => first_row_distance_text, "value" => first_row_distance_value},
"duration" => %{"text" => first_row_duration_text, "value" => first_row_duration_value},
"status" => first_row_status
} = first_row_element
assert "OK" == first_row_status
assert is_binary(first_row_distance_text)
assert is_integer(first_row_distance_value)
assert is_binary(first_row_duration_text)
assert is_integer(first_row_duration_value)
%{
"distance" => %{"text" => last_row_distance_text, "value" => last_row_distance_value},
"duration" => %{"text" => last_row_duration_text, "value" => last_row_duration_value},
"status" => last_row_status
} = last_row_element
assert "OK" == last_row_status
assert is_binary(last_row_distance_text)
assert is_integer(last_row_distance_value)
assert is_binary(last_row_duration_text)
assert is_integer(last_row_duration_value)
end
defp assert_num_destination_addresses(%{"destination_addresses" => addresses}, expected_count) when is_list(addresses) do
assert Enum.count(addresses) == expected_count
end
defp assert_num_rows(%{"rows" => rows}, expected_count) when is_list(rows) do
assert Enum.count(rows) == expected_count
end
defp assert_num_elements_for_row(%{"elements" => elements}, expected_count) when is_list(elements) do
assert Enum.count(elements) == expected_count
end
end
| 35.387324 | 123 | 0.692139 |
730e51d23e597faf7edb9d74a4829b648e85f724 | 6,082 | ex | Elixir | clients/genomics/lib/google_api/genomics/v1/api/referencesets.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/genomics/lib/google_api/genomics/v1/api/referencesets.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/genomics/lib/google_api/genomics/v1/api/referencesets.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Genomics.V1.Api.Referencesets do
@moduledoc """
API calls for all endpoints tagged `Referencesets`.
"""
alias GoogleApi.Genomics.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@doc """
Gets a reference set. Implements [GlobalAllianceApi.getReferenceSet](https://github.com/ga4gh/schemas/blob/v0.5.1/src/main/resources/avro/referencemethods.avdl#L83).
## Parameters
- connection (GoogleApi.Genomics.V1.Connection): Connection to server
- reference_set_id (String.t): The ID of the reference set.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
## Returns
{:ok, %GoogleApi.Genomics.V1.Model.ReferenceSet{}} on success
{:error, info} on failure
"""
@spec genomics_referencesets_get(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.Genomics.V1.Model.ReferenceSet.t()} | {:error, Tesla.Env.t()}
def genomics_referencesets_get(connection, reference_set_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/referencesets/{referenceSetId}", %{
"referenceSetId" => URI.encode_www_form(reference_set_id)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Genomics.V1.Model.ReferenceSet{}])
end
@doc """
Searches for reference sets which match the given criteria. Implements [GlobalAllianceApi.searchReferenceSets](https://github.com/ga4gh/schemas/blob/v0.5.1/src/main/resources/avro/referencemethods.avdl#L71)
## Parameters
- connection (GoogleApi.Genomics.V1.Connection): Connection to server
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :body (SearchReferenceSetsRequest):
## Returns
{:ok, %GoogleApi.Genomics.V1.Model.SearchReferenceSetsResponse{}} on success
{:error, info} on failure
"""
@spec genomics_referencesets_search(Tesla.Env.client(), keyword()) ::
{:ok, GoogleApi.Genomics.V1.Model.SearchReferenceSetsResponse.t()}
| {:error, Tesla.Env.t()}
def genomics_referencesets_search(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/referencesets/search")
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.Genomics.V1.Model.SearchReferenceSetsResponse{}]
)
end
end
| 43.755396 | 209 | 0.688096 |
730e572756a70e70bfbc6fe95e370ea92d62d0e9 | 6,440 | ex | Elixir | lib/ecto/embedded.ex | dgvncsz0f/ecto | bae06fe650328cc1060c09fe889a2de9a10edb1b | [
"Apache-2.0"
] | null | null | null | lib/ecto/embedded.ex | dgvncsz0f/ecto | bae06fe650328cc1060c09fe889a2de9a10edb1b | [
"Apache-2.0"
] | null | null | null | lib/ecto/embedded.ex | dgvncsz0f/ecto | bae06fe650328cc1060c09fe889a2de9a10edb1b | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Embedded do
@moduledoc false
alias __MODULE__
alias Ecto.Changeset
@type t :: %Embedded{cardinality: :one | :many,
on_replace: :raise | :mark_as_invalid | :delete,
field: atom,
owner: atom,
on_cast: nil | fun,
related: atom,
unique: boolean}
@behaviour Ecto.Changeset.Relation
@on_replace_opts [:raise, :mark_as_invalid, :delete]
@embeds_one_on_replace_opts @on_replace_opts ++ [:update]
defstruct [:cardinality, :field, :owner, :related, :on_cast, on_replace: :raise, unique: true]
@doc """
Builds the embedded struct.
## Options
* `:cardinality` - tells if there is one embedded schema or many
* `:related` - name of the embedded schema
* `:on_replace` - the action taken on embeds when the embed is replaced
"""
def struct(module, name, opts) do
opts = Keyword.put_new(opts, :on_replace, :raise)
cardinality = Keyword.fetch!(opts, :cardinality)
on_replace_opts = if cardinality == :one, do: @embeds_one_on_replace_opts, else: @on_replace_opts
unless opts[:on_replace] in on_replace_opts do
raise ArgumentError, "invalid `:on_replace` option for #{inspect name}. " <>
"The only valid options are: " <>
Enum.map_join(@on_replace_opts, ", ", &"`#{inspect &1}`")
end
struct(%Embedded{field: name, owner: module}, opts)
end
@doc """
Callback invoked by repository to prepare embeds.
It replaces the changesets for embeds inside changes
by actual structs so it can be dumped by adapters and
loaded into the schema struct afterwards.
"""
def prepare(changeset, embeds, adapter, repo_action) do
%{changes: changes, types: types, repo: repo} = changeset
prepare(Map.take(changes, embeds), types, adapter, repo, repo_action)
end
defp prepare(embeds, _types, _adapter, _repo, _repo_action) when embeds == %{} do
embeds
end
defp prepare(embeds, types, adapter, repo, repo_action) do
Enum.reduce embeds, embeds, fn {name, changeset_or_changesets}, acc ->
{:embed, embed} = Map.get(types, name)
Map.put(acc, name, prepare_each(embed, changeset_or_changesets, adapter, repo, repo_action))
end
end
defp prepare_each(%{cardinality: :one}, nil, _adapter, _repo, _repo_action) do
nil
end
defp prepare_each(%{cardinality: :one} = embed, changeset, adapter, repo, repo_action) do
action = check_action!(changeset.action, repo_action, embed)
changeset = run_prepare(changeset, repo)
to_struct(changeset, action, embed, adapter)
end
defp prepare_each(%{cardinality: :many} = embed, changesets, adapter, repo, repo_action) do
for changeset <- changesets,
action = check_action!(changeset.action, repo_action, embed),
changeset = run_prepare(changeset, repo),
prepared = to_struct(changeset, action, embed, adapter),
do: prepared
end
defp to_struct(%Changeset{valid?: false}, _action,
%{related: schema}, _adapter) do
raise ArgumentError, "changeset for embedded #{inspect schema} is invalid, " <>
"but the parent changeset was not marked as invalid"
end
defp to_struct(%Changeset{data: %{__struct__: actual}}, _action,
%{related: expected}, _adapter) when actual != expected do
raise ArgumentError, "expected changeset for embedded schema `#{inspect expected}`, " <>
"got: #{inspect actual}"
end
defp to_struct(%Changeset{changes: changes, data: schema}, :update,
_embed, _adapter) when changes == %{} do
schema
end
defp to_struct(%Changeset{}, :delete, _embed, _adapter) do
nil
end
defp to_struct(%Changeset{} = changeset, action, %{related: schema}, adapter) do
%{data: struct, changes: changes} = changeset
embeds = prepare(changeset, schema.__schema__(:embeds), adapter, action)
changes
|> Map.merge(embeds)
|> autogenerate_id(struct, action, schema, adapter)
|> autogenerate(action, schema)
|> apply_embeds(struct)
end
defp run_prepare(changeset, repo) do
changeset = %{changeset | repo: repo}
Enum.reduce(Enum.reverse(changeset.prepare), changeset, fn fun, acc ->
case fun.(acc) do
%Ecto.Changeset{} = acc -> acc
other ->
raise "expected function #{inspect fun} given to Ecto.Changeset.prepare_changes/2 " <>
"to return an Ecto.Changeset, got: `#{inspect other}`"
end
end)
end
defp apply_embeds(changes, struct) do
struct(struct, changes)
end
defp check_action!(:replace, action, %{on_replace: :delete} = embed),
do: check_action!(:delete, action, embed)
defp check_action!(:update, :insert, %{related: schema}),
do: raise(ArgumentError, "got action :update in changeset for embedded #{inspect schema} while inserting")
defp check_action!(:delete, :insert, %{related: schema}),
do: raise(ArgumentError, "got action :delete in changeset for embedded #{inspect schema} while inserting")
defp check_action!(action, _, _), do: action
defp autogenerate_id(changes, _struct, :insert, schema, adapter) do
case schema.__schema__(:autogenerate_id) do
{key, _source, :binary_id} ->
Map.put_new_lazy(changes, key, fn -> adapter.autogenerate(:embed_id) end)
{_key, :id} ->
raise ArgumentError, "embedded schema `#{inspect schema}` cannot autogenerate `:id` primary keys, " <>
"those are typically used for auto-incrementing constraints. " <>
"Maybe you meant to use `:binary_id` instead?"
nil ->
changes
end
end
defp autogenerate_id(changes, struct, :update, _schema, _adapter) do
for {_, nil} <- Ecto.primary_key(struct) do
raise Ecto.NoPrimaryKeyValueError, struct: struct
end
changes
end
defp autogenerate(changes, action, schema) do
Enum.reduce schema.__schema__(action_to_auto(action)), changes, fn
{k, {mod, fun, args}}, acc ->
case Map.fetch(acc, k) do
{:ok, _} -> acc
:error -> Map.put(acc, k, apply(mod, fun, args))
end
end
end
defp action_to_auto(:insert), do: :autogenerate
defp action_to_auto(:update), do: :autoupdate
@impl true
def build(%Embedded{related: related}) do
related.__struct__
end
end
| 35.977654 | 110 | 0.652174 |
730e6e2165d88a2225331ae8cda4684a08557325 | 620 | exs | Elixir | exercises/pascals-triangle/example.exs | darktef/elixir-exercism | bcaae351486b1405f0a01cd33b4d39555546298e | [
"MIT"
] | 1 | 2021-08-16T20:24:14.000Z | 2021-08-16T20:24:14.000Z | exercises/pascals-triangle/example.exs | Triangle-Elixir/xelixir | 08d23bf47f57799f286567cb26f635291de2fde5 | [
"MIT"
] | null | null | null | exercises/pascals-triangle/example.exs | Triangle-Elixir/xelixir | 08d23bf47f57799f286567cb26f635291de2fde5 | [
"MIT"
] | null | null | null | defmodule PascalsTriangle do
@doc """
Calculates the rows of a pascal triangle
with the given height
"""
@spec rows(integer) :: [[integer]]
def rows(num) do
do_rows(num - 1, [[1]])
end
defp do_rows(0, rows), do: Enum.reverse rows
defp do_rows(n, rows = [h | _]) do
do_rows(n - 1, [next_row(h) | rows])
end
defp next_row(list) do
list
|> each_cons
|> Enum.map(fn [a, b] -> a + b end)
|> add_ends
end
defp each_cons(list) do
list
|> Enum.flat_map(&[&1, &1])
|> Enum.slice(1..-2)
|> Enum.chunk(2)
end
defp add_ends(list), do: [1] ++ list ++ [1]
end
| 19.375 | 46 | 0.570968 |
730e90d279eb2eff3767d7ce953df53ffd4dfbfb | 218 | exs | Elixir | chapter-6/math.exs | CuriousCurmudgeon/programming-elixir-exercises | ba112f498f7cb7fefaa1359c928ff1b385d1add1 | [
"MIT"
] | null | null | null | chapter-6/math.exs | CuriousCurmudgeon/programming-elixir-exercises | ba112f498f7cb7fefaa1359c928ff1b385d1add1 | [
"MIT"
] | null | null | null | chapter-6/math.exs | CuriousCurmudgeon/programming-elixir-exercises | ba112f498f7cb7fefaa1359c928ff1b385d1add1 | [
"MIT"
] | null | null | null | defmodule Math do
# Exercise: ModulesAndFunctions-4
def sum(0), do: 0
def sum(n), do: n + sum(n-1)
# Exercise: ModulesAndFunctions-5
def gcd(x, 0), do: x
def gcd(x, y), do: gcd(y, rem(x,y))
end | 24.222222 | 39 | 0.591743 |
730e91b8173cd50c8f63c12a387a948f29f790d3 | 2,476 | exs | Elixir | test/behaviour/with_already_existing_implementation_test.exs | samuel-uniris/knigge | fd7c6e735a1840211a02733c628167a1831d9c92 | [
"MIT"
] | 83 | 2019-07-26T14:51:19.000Z | 2022-03-27T08:05:15.000Z | test/behaviour/with_already_existing_implementation_test.exs | samuel-uniris/knigge | fd7c6e735a1840211a02733c628167a1831d9c92 | [
"MIT"
] | 21 | 2019-08-06T08:35:44.000Z | 2021-10-17T19:55:46.000Z | test/behaviour/with_already_existing_implementation_test.exs | samuel-uniris/knigge | fd7c6e735a1840211a02733c628167a1831d9c92 | [
"MIT"
] | 10 | 2019-07-31T09:56:26.000Z | 2022-01-03T12:03:33.000Z | defmodule Behaviour.WithAlreadyExistingImplementationTest do
use ExUnit.Case, async: false
import ExUnit.CaptureIO
import Knigge.Test.SaltedModule
require Mox
defmacrop define_facade(knigge_options \\ []) do
behaviour = salt_atom(Behaviour)
implementation = salt_atom(Implementation)
options = Keyword.put(knigge_options, :implementation, implementation)
quote bind_quoted: [behaviour: behaviour, implementation: implementation], unquote: true do
warnings =
capture_io(:stderr, fn ->
defmodule behaviour do
use Knigge, unquote(options)
@callback my_function_with_default() :: :ok
@callback my_other_function() :: :ok
def my_function_with_default, do: :ok
end
end)
Mox.defmock(implementation, for: behaviour)
%{
facade: behaviour,
behaviour: behaviour,
implementation: implementation,
warnings: warnings
}
end
end
test "does not generate a compilation warning for a clause never matching" do
%{warnings: warnings} = define_facade()
refute warnings =~
~r"this clause cannot match because a previous clause at line \d+ always matches"
end
test "prints a Knigge warning for an already existing clause because Knigge doesn't know what to do about it" do
%{facade: facade, warnings: warnings} = define_facade()
assert_lines(
warnings,
"""
Knigge encountered definition `#{facade}.my_function_with_default/0` which matches callback `my_function_with_default/0`. It will not delegate this callback!
If this is your intention you can tell Knigge to ignore this callback:
use Knigge, do_not_delegate: [my_function_with_default: 0]
"""
)
end
test "does not print a Knigge warning for an already existing clause when `do_not_delegate` is provided for the definition" do
%{warnings: warnings} = define_facade(do_not_delegate: [my_function_with_default: 0])
assert warnings == ""
end
test "does not print a Knigge warning for an already existing clause when `warn` is `false`" do
%{warnings: warnings} = define_facade(warn: false)
assert warnings == ""
end
defp assert_lines(received, expected) do
[
String.split(received, "\n"),
String.split(expected, "\n")
]
|> Enum.zip()
|> Enum.each(fn {received, expected} ->
assert received =~ expected
end)
end
end
| 30.567901 | 163 | 0.678918 |
730eb25fa1dfe9c1b978be37a9600a5302640b31 | 469 | exs | Elixir | priv/repo/migrations/20170702160756_add_permissions_to_keys.exs | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 691 | 2017-03-08T09:15:45.000Z | 2022-03-23T22:04:47.000Z | priv/repo/migrations/20170702160756_add_permissions_to_keys.exs | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 491 | 2017-03-07T12:58:42.000Z | 2022-03-29T23:32:54.000Z | priv/repo/migrations/20170702160756_add_permissions_to_keys.exs | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 200 | 2017-03-12T23:03:39.000Z | 2022-03-05T17:55:52.000Z | defmodule Hexpm.Repo.Migrations.AddPermissionsToKeys do
use Ecto.Migration
def up() do
alter table(:keys) do
add(
:permissions,
{:array, :jsonb},
null: false,
default: fragment("ARRAY[json_build_object('id', uuid_generate_v4(), 'domain', 'api')]")
)
end
execute("ALTER TABLE keys ALTER permissions DROP DEFAULT")
end
def down() do
alter table(:keys) do
remove(:permissions)
end
end
end
| 20.391304 | 96 | 0.620469 |
730ecb77984448209af3b670689ac5d37c5f2175 | 424 | exs | Elixir | priv/repo/migrations/20210316052639_create_legal_entytis.exs | gissandrogama/contracts_api | 13bcd292637d0e2bc4d2a6c05f5b3266e8bf28e1 | [
"MIT"
] | null | null | null | priv/repo/migrations/20210316052639_create_legal_entytis.exs | gissandrogama/contracts_api | 13bcd292637d0e2bc4d2a6c05f5b3266e8bf28e1 | [
"MIT"
] | 2 | 2021-03-16T06:43:04.000Z | 2021-03-16T06:54:55.000Z | priv/repo/migrations/20210316052639_create_legal_entytis.exs | gissandrogama/contracts_api | 13bcd292637d0e2bc4d2a6c05f5b3266e8bf28e1 | [
"MIT"
] | null | null | null | defmodule ContractsApi.Repo.Migrations.CreateLegalEntytis do
use Ecto.Migration
def change do
create table(:legal_entytis, primary_key: false) do
add :id, :binary_id, primary_key: true
add :name, :string
add :cnpj, :string
add :contract_id, references(:contracts, on_delete: :nothing, type: :binary_id)
timestamps()
end
create index(:legal_entytis, [:contract_id])
end
end
| 24.941176 | 85 | 0.695755 |
730ee972602cd526f2c3f2e8a16364c19f97ce2f | 2,269 | exs | Elixir | config/prod.exs | Harmful-Alchemist/FunRetro | 6b53c16adb2c233e5338799732a5a5c2fe10acaf | [
"MIT"
] | null | null | null | config/prod.exs | Harmful-Alchemist/FunRetro | 6b53c16adb2c233e5338799732a5a5c2fe10acaf | [
"MIT"
] | null | null | null | config/prod.exs | Harmful-Alchemist/FunRetro | 6b53c16adb2c233e5338799732a5a5c2fe10acaf | [
"MIT"
] | null | null | null | use Mix.Config
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
config :fun_retro, FunRetroWeb.Endpoint,
force_ssl: [rewrite_on: [:x_forwarded_proto]],
url: [host: "funretro.fun", port: 443],
https: [
port: 443,
cipher_suite: :strong,
keyfile: System.get_env("SSL_KEY_PATH"),
certfile: System.get_env("SSL_CERT_PATH"),
transport_options: [socket_opts: [:inet6]]
],
cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :fun_retro, FunRetroWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH"),
# transport_options: [socket_opts: [:inet6]]
# ]
#
# The `cipher_suite` is set to `:strong` to support only the
# latest and more secure SSL ciphers. This means old browsers
# and clients may not be supported. You can set it to
# `:compatible` for wider support.
#
# `:keyfile` and `:certfile` expect an absolute path to the key
# and cert in disk or a relative path inside priv, for example
# "priv/ssl/server.key". For all supported SSL configuration
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
#
# We also recommend setting `force_ssl` in your endpoint, ensuring
# no data is ever sent via http, always redirecting to https:
#
# config :fun_retro, FunRetroWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# Finally import the config/prod.secret.exs which loads secrets
# and configuration from environment variables.
import_config "prod.secret.exs"
| 35.453125 | 66 | 0.711327 |
730eec4534d4e39b3f9c397103a55e3b21c794c0 | 614 | ex | Elixir | lib/probe/function.ex | scohen/instruments | 6162bd3280795b544d052fce871acd9975aee542 | [
"MIT"
] | 167 | 2017-11-09T08:25:21.000Z | 2020-03-05T12:26:20.000Z | lib/probe/function.ex | Seanpm2001-Discord/instruments | 4e8a34655d5b469567be69ccf3cf2dcdeba798f6 | [
"MIT"
] | 7 | 2020-05-13T15:16:08.000Z | 2022-03-03T01:39:56.000Z | lib/probe/function.ex | Seanpm2001-Discord/instruments | 4e8a34655d5b469567be69ccf3cf2dcdeba798f6 | [
"MIT"
] | 16 | 2017-11-10T10:35:37.000Z | 2020-02-25T09:54:53.000Z | defmodule Instruments.Probe.Function do
@moduledoc false
@behaviour Instruments.Probe
def probe_init(_name, _probe_type, options) do
probe_fn = Keyword.fetch!(options, :function)
{:ok, {probe_fn, nil}}
end
def probe_get_value({_, last_value}) do
{:ok, last_value}
end
def probe_reset({probe_fn, _}) do
{:ok, {probe_fn, nil}}
end
def probe_sample({probe_fn, _}) do
probe_value =
case probe_fn.() do
{:ok, result} -> result
other -> other
end
{:ok, {probe_fn, probe_value}}
end
def probe_handle_message(_, state), do: {:ok, state}
end
| 19.806452 | 54 | 0.640065 |
730efbea1a3b747dc4a91aa6b2b0a951483d6d4a | 4,437 | exs | Elixir | test/adaptable_costs_evaluator/formulas_test.exs | patrotom/adaptable-costs-evaluator | c97e65af1e021d7c6acf6564f4671c60321346e3 | [
"MIT"
] | null | null | null | test/adaptable_costs_evaluator/formulas_test.exs | patrotom/adaptable-costs-evaluator | c97e65af1e021d7c6acf6564f4671c60321346e3 | [
"MIT"
] | 4 | 2021-12-07T12:26:50.000Z | 2021-12-30T14:17:25.000Z | test/adaptable_costs_evaluator/formulas_test.exs | patrotom/adaptable-costs-evaluator | c97e65af1e021d7c6acf6564f4671c60321346e3 | [
"MIT"
] | null | null | null | defmodule AdaptableCostsEvaluator.FormulasTest do
use AdaptableCostsEvaluator.DataCase
use AdaptableCostsEvaluator.Fixtures.{
UserFixture,
ComputationFixture,
FormulaFixture,
InputFixture,
OutputFixture,
FieldSchemaFixture,
EvaluatorFixture
}
alias AdaptableCostsEvaluator.Formulas
alias AdaptableCostsEvaluator.Formulas.Formula
setup do
user = user_fixture()
computation = computation_fixture(user)
formula = formula_fixture(%{computation_id: computation.id})
%{formula: formula, computation: computation}
end
describe "common formulas functions" do
test "list_formulas/1 returns all desired formulas", %{
formula: formula,
computation: computation
} do
assert Formulas.list_formulas(computation) == [formula]
end
test "get_formula!/2 returns the formula with given id", %{
formula: formula,
computation: computation
} do
assert Formulas.get_formula!(formula.id) == formula
assert Formulas.get_formula!(formula.id, computation) == formula
end
test "create_formula/1 with valid data creates a formula", %{
formula: _,
computation: computation
} do
attrs =
%{@valid_formula_attrs | label: "custom"}
|> Map.put(:computation_id, computation.id)
assert {:ok, %Formula{} = formula} = Formulas.create_formula(attrs)
assert formula.definition == attrs[:definition]
assert formula.label == attrs[:label]
assert formula.name == attrs[:name]
end
test "create_formula/1 with invalid data returns error changeset" do
assert {:error, %Ecto.Changeset{}} = Formulas.create_formula(@invalid_formula_attrs)
end
test "update_formula/2 with valid data updates the formula", %{
formula: formula,
computation: _
} do
assert {:ok, %Formula{} = formula} = Formulas.update_formula(formula, @update_formula_attrs)
assert formula.definition == @update_formula_attrs[:definition]
assert formula.label == @update_formula_attrs[:label]
assert formula.name == @update_formula_attrs[:name]
end
test "update_formula/2 with invalid data returns error changeset", %{
formula: formula,
computation: computation
} do
assert {:error, %Ecto.Changeset{}} =
Formulas.update_formula(formula, @invalid_formula_attrs)
assert formula == Formulas.get_formula!(formula.id, computation)
end
test "delete_formula/1 deletes the formula", %{formula: formula, computation: computation} do
assert {:ok, %Formula{}} = Formulas.delete_formula(formula)
assert_raise Ecto.NoResultsError, fn -> Formulas.get_formula!(formula.id, computation) end
end
test "change_formula/1 returns a formula changeset", %{formula: formula, computation: _} do
assert %Ecto.Changeset{} = Formulas.change_formula(formula)
end
end
describe "evaluate_formula/1" do
setup %{formula: formula, computation: computation} do
evaluator = evaluator_fixture()
attrs = %{definition: "10 + 5 * input1", evaluator_id: evaluator.id}
{:ok, formula} = Formulas.update_formula(formula, attrs)
field_schema = field_schema_fixture(%{definition: %{"type" => "integer"}})
attrs = %{
computation_id: computation.id,
last_value: 5,
label: "input1",
field_schema_id: field_schema.id
}
input_fixture(attrs)
attrs = %{
computation_id: computation.id,
label: "output1",
field_schema_id: field_schema.id,
formula_id: formula.id
}
output = output_fixture(attrs)
%{formula: formula, output: output}
end
test "returns affected outputs and the result with the valid data", context do
{:ok, result} = Formulas.evaluate_formula(context[:formula])
assert result[:outputs] == [Repo.reload(context[:output])]
assert result[:result] == 35
end
test "returns error with the invalid data", context do
{:ok, formula} = Formulas.update_formula(context[:formula], %{definition: "+"})
assert {:error, _} = Formulas.evaluate_formula(formula)
end
test "returns error when evaluator is missing", context do
{:ok, formula} = Formulas.update_formula(context[:formula], %{evaluator_id: nil})
assert Formulas.evaluate_formula(formula) == {:error, "evaluator not specified"}
end
end
end
| 32.152174 | 98 | 0.678612 |
730f02e76d37b1d6532fcb88810064094cbd4a1b | 6,972 | ex | Elixir | lib/dwarlixir/controllers/human_old.ex | Trevoke/dwarlixir | e0a7ae39d3687d8e649fba67ce3dee413f00307e | [
"MIT"
] | 54 | 2017-03-09T20:43:11.000Z | 2022-03-29T16:59:20.000Z | lib/dwarlixir/controllers/human_old.ex | Trevoke/dwarlixir | e0a7ae39d3687d8e649fba67ce3dee413f00307e | [
"MIT"
] | 17 | 2017-03-12T18:16:21.000Z | 2017-05-30T03:34:57.000Z | lib/dwarlixir/controllers/human_old.ex | Trevoke/dwarlixir | e0a7ae39d3687d8e649fba67ce3dee413f00307e | [
"MIT"
] | 2 | 2017-04-23T23:47:41.000Z | 2017-11-19T13:38:25.000Z | defmodule Dwarlixir.Controllers.HumanOld do
alias Dwarlixir.Controllers
alias Dwarlixir.World
defstruct [
:socket, :id, :location_id, exits: [], messages: []
]
use GenServer
def start_link(args \\ %__MODULE__{}) do
GenServer.start_link(__MODULE__, args, name: via_tuple(args.id))
end
def init(args) do
Registry.update_value(Registry.HumanControllers, args.id, fn(_x) -> args.id end)
Registry.register(Registry.Controllers, "human", args.id)
Registry.register(Registry.Tick, :subject_to_time, self())
{:ok, args}
end
def via_tuple(id) do
{:via, Registry, {Registry.HumanControllers, id}}
end
def log_in(user_id, _password, socket) do
user_id = String.trim user_id
case Controllers.Human.start_link(%__MODULE__{id: user_id, socket: socket}) do
{:ok, _pid} -> {:ok, user_id}
{:error, {:already_started, _pid}} -> {:error, :username_taken}
end
end
def join_room(user_id, loc_id)do
GenServer.cast(via_tuple(user_id), {:join_room, loc_id})
end
def terminate(reason, _state) do
#Registry.unregister(Registry.HumanControllers, state.id)
reason
end
defp polish_event(string, :arrive, from), do: string <> " arrived from #{from}.\n"
defp polish_event(string, :depart, to), do: string <> " is leaving going #{to}.\n"
defp polish_event(string, :death, nil), do: string <> " died.\n"
def handle(user_id, {:input, input}) do
GenServer.cast(via_tuple(user_id), {:input, String.trim(input)})
end
def handle(user_id, message) do
GenServer.cast(via_tuple(user_id), message)
end
# messages => [{:arrive, mob_id, loc}, {:depart}]
# => %{:arrive => [{}], :depart => [{}]}
# => %{:arrive => ["John McKoala", "Oliver McKoala"]}
# => [["John McKoala, OliverMcKoala arrive."]]
# => "Foo\nbar"
def handle_cast(:tick, state) do
events = state.messages
|> Enum.reduce(%{}, fn(msg, acc) ->
Map.update(acc, {elem(msg, 0), elem(msg, 2)}, [msg], fn(v) -> [msg | v] end)
end)
|> Enum.sort
|> Enum.map(fn({{event_name, event_property}, instances}) ->
Enum.map(instances, fn(instance) -> elem(instance, 1).name end)
|> Enum.join(", ")
|> polish_event(event_name, event_property)
end)
|> Enum.join
write_line(state.socket, events)
# handle arrive messages - write_line(state.socket, "#{info.name} arrived from #{from_loc}.\n")
# handle depart messages - write_line(state.socket, "#{info.name} is leaving towards #{to}.\n")
# handle death messages - write_line(state.socket, "#{info.name} has just died.")
{:noreply, %__MODULE__{state | messages: []}}
end
def handle_cast({:arrive, _info, _from_loc} = message, state) do
{:noreply, %__MODULE__{state | messages: [message | state.messages]}}
end
def handle_cast({:depart, _info, _to} = message, state) do
{:noreply, %__MODULE__{state | messages: [message | state.messages]}}
end
def handle_cast({:death, _info} = message, state) do
{:noreply, %__MODULE__{state | messages: [Tuple.append(message, nil) | state.messages]}}
end
def handle_cast({:join_room, loc_id}, state) do
{:ok, exits} = World.Location.arrive(loc_id,
{
{__MODULE__, state.id},
public_info(state),
"seemingly nowhere"})
{
:noreply,
%__MODULE__{state |
location_id: loc_id,
exits: exits}}
end
def handle_cast({:input, "help"}, state) do
table = TableRex.quick_render!([
["look", "see what is in the room"],
["wall <message>", "talk to all other users"],
["<exit number>", "move"],
["who", "see who is logged in"],
["help", "read this again"],
["quit", "log out"],
["spawn_more", "spawn more mobs"]
], ["Command", "Description"])
write_line(state.socket, Bunt.ANSI.format [
:bright,
:blue,
"""
Welcome, #{state.id}! Here are the available commands.
#{table}
"""
]
)
{:noreply, state}
end
def handle_cast({:input, "who"}, state) do
users =
Registry.Controllers
|> Registry.match("human", :_)
|> Enum.map(&([elem(&1, 1)]))
output = TableRex.quick_render!(users, ["Users logged in"]) <> "\n"
write_line(state.socket, output)
{:noreply, state}
end
def handle_cast({:input, "spawn_more"}, state) do
write_line(state.socket, "Spawning 40 more mobs.\n")
Mobs.create_mobs(40)
{:noreply, state}
end
def handle_cast({:input, "quit"}, state) do
write_line(state.socket, "Goodbye.\n")
World.Location.depart(
state.location_id,
{
{__MODULE__, state.id},
state,
"the real world"
}
)
:gen_tcp.close(state.socket)
{:stop, :normal, state}
end
def handle_cast({:input, "look"}, state) do
things_seen = World.Location.look(state.location_id)
text = """
#{things_seen.description}
#{Bunt.ANSI.format [:green, read_exits(things_seen.exits)]}
#{read_entities(things_seen.living_things)}
#{read_entities(things_seen.items)}
"""
|> String.trim()
state.socket
|> write_line(text <> "\n")
{:noreply, state}
end
def handle_cast({:input, "wall " <> message}, state) do
Registry.Controllers
|> Registry.match("human", :_)
|> Enum.map(&(elem(&1, 0)))
|> Enum.each(fn(x) -> GenServer.cast(x, {:receive_wall, state.id, message}) end)
{:noreply, state}
end
def handle_cast({:receive_wall, from_user, message}, state) do
write_line(state.socket, Bunt.ANSI.format [:bright, :yellow, "#{from_user} says: #{message}\n"])
{:noreply, state}
end
def handle_cast({:input, input}, state) do
cond do
pathway = Enum.find(state.exits, &(&1.name == input)) ->
with info <- public_info(state),
:ok <- World.Location.depart(state.location_id, {{__MODULE__, state.id}, info, pathway.from_id}),
{:ok, exits} <- World.Location.arrive(pathway.from_id, {{__MODULE__, state.id}, info, state.location_id}) do
GenServer.cast(self(), {:input, "look"})
{:noreply, %__MODULE__{state | location_id: pathway.from_id, exits: exits}}
end
true ->
write_line(state.socket, "Sorry, I don't understand that.")
{:noreply, state}
end
end
defp read_entities(entities) do
entities
|> Enum.group_by(&(&1))
|> Enum.map(fn({k, v}) -> {k, Enum.count(v)} end)
|> Enum.sort(fn({_n1, c1}, {_n2, c2}) -> c1 > c2 end)
|> Enum.map(fn
{name, 1} -> name
{name, count} -> "#{count} #{name}"
end)
|> Enum.join(", ")
end
defp read_exits(exits) do
exit_text =
exits
|> Enum.map(fn(x) -> x.name end)
|> Enum.join(", ")
"Exits: #{exit_text}."
end
defp write_line(socket, line) do
:gen_tcp.send(socket, line)
end
defp public_info(state) do
%{
gender: :male,
name: state.id
}
end
end
| 29.922747 | 121 | 0.606139 |
730f100a560ea2087a5fb5b970244a19a5879521 | 2,526 | ex | Elixir | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p4beta1_gcs_destination.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p4beta1_gcs_destination.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p4beta1_gcs_destination.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p4beta1GcsDestination do
@moduledoc """
The Google Cloud Storage location where the output will be written to.
## Attributes
* `uri` (*type:* `String.t`, *default:* `nil`) - Google Cloud Storage URI prefix where the results will be stored. Results will be in JSON format and preceded by its corresponding input URI prefix. This field can either represent a gcs file prefix or gcs directory. In either case, the uri should be unique because in order to get all of the output files, you will need to do a wildcard gcs search on the uri prefix you provide. Examples: * File Prefix: gs://bucket-name/here/filenameprefix The output files will be created in gs://bucket-name/here/ and the names of the output files will begin with "filenameprefix". * Directory Prefix: gs://bucket-name/some/location/ The output files will be created in gs://bucket-name/some/location/ and the names of the output files could be anything because there was no filename prefix specified. If multiple outputs, each response is still AnnotateFileResponse, each of which contains some subset of the full list of AnnotateImageResponse. Multiple outputs can happen if, for example, the output JSON is too large and overflows into multiple sharded files.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:uri => String.t()
}
field(:uri)
end
defimpl Poison.Decoder, for: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p4beta1GcsDestination do
def decode(value, options) do
GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p4beta1GcsDestination.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p4beta1GcsDestination do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 53.744681 | 1,102 | 0.770784 |
730f4693f2f504f34f990064041f5ae3b3200039 | 14,874 | exs | Elixir | apps/site/test/site_web/views/helpers_test.exs | mbta/crispy-spoon | 7ef28a1a6adc73899b007e334b9220f7a48a60fa | [
"MIT"
] | null | null | null | apps/site/test/site_web/views/helpers_test.exs | mbta/crispy-spoon | 7ef28a1a6adc73899b007e334b9220f7a48a60fa | [
"MIT"
] | null | null | null | apps/site/test/site_web/views/helpers_test.exs | mbta/crispy-spoon | 7ef28a1a6adc73899b007e334b9220f7a48a60fa | [
"MIT"
] | null | null | null | defmodule SiteWeb.ViewHelpersTest do
@moduledoc false
use SiteWeb.ConnCase, async: true
import SiteWeb.ViewHelpers
import Phoenix.HTML.Tag, only: [tag: 2, content_tag: 3]
import Phoenix.HTML, only: [safe_to_string: 1, html_escape: 1]
alias Routes.{Repo, Route}
describe "break_text_at_slash/1" do
test "doesn't change text without slashes" do
s = "this text doesn't contain a slash"
assert s == break_text_at_slash(s)
end
test "adds zero width spaces after slashes" do
s = "abc/123/xyz"
result = break_text_at_slash(s)
assert String.length(result) == 13
assert result == "abc/123/xyz"
end
end
describe "tel_link/1" do
test "renders formattable numbers as a link" do
assert tel_link("617-222-3200") ==
content_tag(:a, "617-222-3200",
href: "tel:+1-617-222-3200",
class: "no-wrap",
aria: [label: "6 1 7. 2 2 2. 3 2 0 0"]
)
end
test "non-formattable numbers don't get processed and don't become links" do
assert tel_link("0118 999 881 999 119 7253") ==
content_tag(:span, "0118 999 881 999 119 7253", [])
end
end
describe "hidden_query_params/2" do
test "creates a hidden tag for each query parameter", %{conn: conn} do
actual = hidden_query_params(%{conn | query_params: %{"one" => "value", "two" => "other"}})
expected = [
tag(:input, type: "hidden", name: "one", value: "value"),
tag(:input, type: "hidden", name: "two", value: "other")
]
assert expected == actual
end
test "can handle nested params", %{conn: conn} do
query_params = %{"location" => %{"address" => "value"}}
actual = hidden_query_params(%{conn | query_params: query_params})
expected = [
tag(:input, type: "hidden", name: "location[address]", value: "value")
]
assert actual == expected
end
test "can handle lists of params", %{conn: conn} do
query_params = %{"address" => ["one", "two"]}
actual = hidden_query_params(%{conn | query_params: query_params})
expected = [
tag(:input, type: "hidden", name: "address[]", value: "one"),
tag(:input, type: "hidden", name: "address[]", value: "two")
]
assert actual == expected
end
end
describe "stop_link/1" do
test "given a stop, returns a link to that stop" do
link =
%Stops.Stop{id: "place-sstat", name: "South Station"}
|> stop_link
|> safe_to_string
assert link == ~s(<a href="/stops/place-sstat">South Station</a>)
end
test "given a stop ID, returns a link to that stop" do
link =
"place-sstat"
|> stop_link
|> safe_to_string
assert link == ~s(<a href="/stops/place-sstat">South Station</a>)
end
end
describe "external_link/1" do
test "Protocol is added when one is not included" do
assert external_link("http://www.google.com") == "http://www.google.com"
assert external_link("www.google.com") == "http://www.google.com"
assert external_link("https://google.com") == "https://google.com"
end
end
describe "route_to_class/1" do
test "converts routes to css classes based on type" do
assert route_to_class(%Routes.Route{type: 0}) == "subway"
assert route_to_class(%Routes.Route{type: 1}) == "subway"
assert route_to_class(%Routes.Route{type: 2}) == "commuter-rail"
assert route_to_class(%Routes.Route{type: 3}) == "bus"
assert route_to_class(%Routes.Route{type: 4}) == "ferry"
end
test "no route generates no class" do
assert route_to_class(nil) == ""
end
end
describe "mode_summaries/2" do
test "commuter rail summaries only include commuter_rail mode" do
summaries = mode_summaries(:commuter_rail, {:zone, "7"})
assert Enum.all?(summaries, fn summary -> :commuter_rail in summary.modes end)
end
test "Bus summaries return bus single trip information with subway passes" do
[first | rest] = mode_summaries(:bus)
assert first.modes == [:bus]
assert first.duration == :single_trip
assert Enum.all?(rest, fn summary -> summary.duration in [:week, :month] end)
end
test "Bus_subway summaries return both bus and subway information" do
summaries = mode_summaries(:bus_subway)
mode_present = fn summary, mode -> mode in summary.modes end
assert Enum.any?(summaries, &mode_present.(&1, :bus)) &&
Enum.any?(summaries, &mode_present.(&1, :subway))
end
test "Ferry summaries with nil fare name return range of fares including passes" do
fares =
:ferry
|> mode_summaries(nil)
|> Enum.map(fn %Fares.Summary{fares: [{text, prices}]} ->
IO.iodata_to_binary([text, " ", prices])
end)
assert fares == ["All ferry routes $3.70 – $9.75", "All ferry routes $80.00 – $329.00"]
end
test "Ferry summmaries with a fare name return a single fare" do
fares =
:ferry
|> mode_summaries(:ferry_inner_harbor)
|> Enum.map(fn %Fares.Summary{fares: [{text, prices}]} ->
IO.iodata_to_binary([text, " ", prices])
end)
assert fares == [
"mTicket App, Paper Ferry Ticket, or Cash $3.70",
"CharlieTicket $90.00",
"mTicket App $80.00"
]
end
end
describe "mode_name/1" do
test "returns correct name for custom routes" do
assert mode_name("909") == "Logan Express"
assert mode_name("983") == "Massport Shuttle"
assert mode_name("Massport-1") == "Massport Shuttle"
end
end
describe "mode_atom/1" do
test "Mode atoms do not contain spaces" do
assert mode_atom("Commuter Rail") == :commuter_rail
assert mode_atom("Red Line") == :red_line
assert mode_atom("Ferry") == :ferry
end
end
describe "format_full_date/1" do
test "formats a date" do
assert format_full_date(~D[2017-03-31]) == "March 31, 2017"
end
end
describe "cms_static_page_path/2" do
test "returns the given path as-is", %{conn: conn} do
assert cms_static_page_path(conn, "/cms/path") == "/cms/path"
end
test "external links should not be processed", %{conn: conn} do
path =
conn
|> Map.put(:query_params, %{"preview" => nil, "vid" => "latest"})
|> cms_static_page_path("https://www.google.com")
assert path == "https://www.google.com"
end
test "returns the given path as-is, even with preview params (chained-preview disabled)", %{
conn: conn
} do
path =
conn
|> Map.put(:query_params, %{"preview" => nil, "vid" => "112", "nid" => "6"})
|> cms_static_page_path("/cms/path")
assert path == "/cms/path"
end
end
describe "fare_group/1" do
test "return correct fare group for all modes" do
assert fare_group(:bus) == "bus_subway"
assert fare_group(:subway) == "bus_subway"
assert fare_group(:commuter_rail) == "commuter_rail"
assert fare_group(:ferry) == "ferry"
end
test "return correct fare group when route type given (as integer)" do
assert fare_group(0) == "bus_subway"
assert fare_group(1) == "bus_subway"
assert fare_group(2) == "commuter_rail"
assert fare_group(3) == "bus_subway"
assert fare_group(4) == "ferry"
end
end
describe "to_camelcase/1" do
test "turns a phrase with spaces into camelcased format" do
assert to_camelcase("Capitalized With Spaces") == "capitalizedWithSpaces"
assert to_camelcase("Capitalized") == "capitalized"
assert to_camelcase("Sentence case") == "sentenceCase"
assert to_camelcase("no words capitalized") == "noWordsCapitalized"
assert to_camelcase("with_underscores") == "withUnderscores"
end
end
describe "struct_name_to_string/1" do
test "turns a module name atom into an underscored string" do
assert struct_name_to_string(CMS.Partial.Paragraph.CustomHTML) == "custom_html"
end
test "turns a module struct into an underscored string" do
assert struct_name_to_string(%CMS.Partial.Paragraph.CustomHTML{}) == "custom_html"
end
end
describe "fa/2" do
test "creates the HTML for a FontAwesome icon" do
expected = ~s(<i aria-hidden="true" class="notranslate fa fa-arrow-right "></i>)
result = fa("arrow-right")
assert result |> safe_to_string() == expected
end
test "when optional attributes are included" do
expected =
~s(<i aria-hidden="true" class="notranslate fa fa-arrow-right foo" title="title"></i>)
result = fa("arrow-right", class: "foo", title: "title")
assert result |> safe_to_string() == expected
end
end
describe "direction_with_headsign/3" do
test "returns the direction name and headsign when included" do
actual = safe_to_string(html_escape(direction_with_headsign(%Route{}, 0, "headsign")))
assert actual =~ "Outbound"
assert actual =~ "arrow-right"
assert actual =~ ~s(<span class="sr-only">to</span>)
assert actual =~ "headsign"
end
test "uses route's direction_destination if the headsign is empty" do
route = Repo.get("1")
actual = route |> direction_with_headsign(0, "") |> html_escape() |> safe_to_string()
assert actual =~ "Outbound"
assert actual =~ "arrow-right"
assert actual =~ "Harvard"
actual = route |> direction_with_headsign(0, []) |> html_escape() |> safe_to_string()
assert actual =~ "Outbound"
assert actual =~ "arrow-right"
assert actual =~ "Harvard"
end
test "returns an empty value for the direction_names and direction_destinations if not included" do
actual =
safe_to_string(
html_escape(
direction_with_headsign(
%Routes.Route{
custom_route?: false,
description: :rail_replacement_bus,
direction_destinations: %{0 => nil, 1 => nil},
direction_names: %{0 => "", 1 => ""},
id: "Shuttle-NewtonHighlandsRiverside",
long_name: "Green Line D Shuttle",
name: "Green Line D Shuttle",
type: 3
},
0,
"Riverside (Shuttle)"
)
)
)
assert actual =~ "arrow-right"
assert actual =~ "Riverside (Shuttle)"
end
end
describe "pretty_date/2" do
test "it is today when the date given is todays date" do
assert pretty_date(Util.service_date()) == "today"
end
test "it abbreviates the month when the date is not today" do
date = ~D[2017-01-01]
assert pretty_date(date) == "Jan 1"
end
test "it applies custom formatting if provided" do
date = ~D[2017-01-01]
assert pretty_date(date, "{Mfull} {D}, {YYYY}") == "January 1, 2017"
end
end
describe "svg/1" do
test "wraps svg in span with icon class" do
svg_name =
:site
|> Application.app_dir("priv/static/**/*.svg")
|> Path.wildcard()
|> List.first()
|> Path.basename()
rendered =
svg_name
|> svg()
|> safe_to_string()
assert [{"span", _, _}] = Floki.find(rendered, ".c-svg__#{Path.rootname(svg_name)}")
end
test "throw exception for unknown SVG" do
assert_raise ArgumentError, fn ->
svg("???")
end
end
end
test "mode_icon/2" do
for type <- [
:subway,
:commuter_rail,
:bus,
:logan_express,
:massport_shuttle,
:ferry,
:trolley
],
size <- [:default, :small] do
assert [{"span", [{"class", class}], _}] =
type
|> mode_icon(size)
|> safe_to_string()
|> Floki.parse_fragment()
|> elem(1)
case type do
:commuter_rail -> assert class == "notranslate c-svg__icon-mode-commuter-rail-#{size}"
:logan_express -> assert class == "notranslate c-svg__icon-mode-bus-#{size}"
:massport_shuttle -> assert class == "notranslate c-svg__icon-mode-bus-#{size}"
other -> assert class == "notranslate c-svg__icon-mode-#{other}-#{size}"
end
end
assert [{"span", [{"class", "notranslate c-svg__icon-the-ride-default"}], _}] =
:the_ride
|> mode_icon(:default)
|> safe_to_string()
|> Floki.parse_fragment()
|> elem(1)
end
test "bw_circle_icon/2" do
for type <- [0, 1, 2, 3, 4],
size <- [:default] do
assert [{"span", [{"class", class}], _}] =
type
|> bw_circle_icon(size)
|> safe_to_string()
|> Floki.parse_fragment()
|> elem(1)
if type == 0 do
assert class == "notranslate c-svg__icon-trolley-circle-bw-#{size}"
else
type =
type
|> Route.type_atom()
|> Atom.to_string()
|> String.replace("_", "-")
assert class == "notranslate c-svg__icon-#{type}-circle-bw-#{size}"
end
end
end
describe "line_icon/2" do
test "for subway routes" do
for id <- ["Red", "Orange", "Blue"] do
icon =
%Routes.Route{id: id, type: 1}
|> line_icon(:default)
|> safe_to_string()
assert icon =~ "c-svg__icon-#{String.downcase(id)}-line-default"
end
end
test "for green line" do
for branch <- ["B", "C", "D", "E"] do
icon =
%Routes.Route{id: "Green-" <> branch, type: 0}
|> line_icon(:default)
|> safe_to_string()
assert icon =~ "c-svg__icon-green-line-#{String.downcase(branch)}-default"
end
icon =
%Routes.Route{id: "Green", type: 0}
|> line_icon(:default)
|> safe_to_string()
assert icon =~ "c-svg__icon-green-line-default"
end
test "for mattapan" do
icon =
%Routes.Route{id: "Mattapan", type: 0}
|> line_icon(:default)
|> safe_to_string()
assert icon =~ "c-svg__icon-mattapan-line-default"
end
end
describe "bus_icon_pill/1" do
test "for silver line" do
icon =
%Routes.Route{
id: "742",
long_name: "Design Center - South Station",
name: "SL2",
type: 3
}
|> bus_icon_pill
|> safe_to_string
assert icon =~ "u-bg--silver-line"
end
test "for buses" do
icon =
%Routes.Route{
id: "221",
type: 3,
name: "221"
}
|> bus_icon_pill
|> safe_to_string
assert icon =~ "u-bg--bus"
end
end
end
| 30.604938 | 103 | 0.584712 |
730f544f2deaa84f263cfcdf5c802001e8359b08 | 163 | ex | Elixir | lib/ex_polygon/historic_forex.ex | aai/ex_polygon | 3bd6d8d0f1fbe4cd3fa939751c5ff00813eeeba3 | [
"MIT"
] | null | null | null | lib/ex_polygon/historic_forex.ex | aai/ex_polygon | 3bd6d8d0f1fbe4cd3fa939751c5ff00813eeeba3 | [
"MIT"
] | null | null | null | lib/ex_polygon/historic_forex.ex | aai/ex_polygon | 3bd6d8d0f1fbe4cd3fa939751c5ff00813eeeba3 | [
"MIT"
] | null | null | null | defmodule ExPolygon.HistoricForex do
@type t :: %ExPolygon.HistoricForex{}
defstruct ~w(
day
map
msLatency
status
pair
ticks
)a
end
| 12.538462 | 39 | 0.638037 |
730f77316cca246fca3cad45aab7fc612d278c76 | 52 | ex | Elixir | lib/alchemy.ex | shutangyu/alchemy | 07d7f99d5e206819ffb9001ef8248c76a2201fa5 | [
"MIT"
] | 53 | 2016-02-17T13:09:59.000Z | 2021-08-09T11:43:29.000Z | lib/alchemy.ex | shutangyu/alchemy | 07d7f99d5e206819ffb9001ef8248c76a2201fa5 | [
"MIT"
] | 18 | 2016-02-19T16:55:46.000Z | 2021-11-01T10:11:26.000Z | lib/alchemy.ex | shutangyu/alchemy | 07d7f99d5e206819ffb9001ef8248c76a2201fa5 | [
"MIT"
] | 1 | 2016-12-07T17:10:59.000Z | 2016-12-07T17:10:59.000Z | defmodule Alchemy do
@moduledoc ~S"""
"""
end
| 7.428571 | 20 | 0.596154 |
730fcf5d596c8c0cc022b9e82989ce5c27e9a760 | 2,015 | exs | Elixir | apps/database/priv/repo/abc_seeds.exs | gregredhead/belethor | 255925396b18ba4a6950f386abf8a9e17a7e5e7c | [
"Apache-2.0"
] | 3 | 2018-07-20T22:14:36.000Z | 2018-12-21T19:54:48.000Z | apps/database/priv/repo/abc_seeds.exs | gregredhead/belethor | 255925396b18ba4a6950f386abf8a9e17a7e5e7c | [
"Apache-2.0"
] | 36 | 2018-09-15T21:46:54.000Z | 2020-03-28T16:10:18.000Z | apps/database/priv/repo/abc_seeds.exs | gregredhead/belethor | 255925396b18ba4a6950f386abf8a9e17a7e5e7c | [
"Apache-2.0"
] | 2 | 2018-07-22T08:47:07.000Z | 2021-12-11T01:39:19.000Z | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# Website.Repo.insert!(%Website.SomeSchema{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
alias Database.Schema.Repo
alias Database.Schema.Mod
alias Database.Schema.ModTag
alias Database.Schema.Modfile
alias Ecto.Changeset
mod_a =
%Mod{name: "Mod A", desc: "aaaaaaaaaaaaaaa", pic: "/favicon.ico"}
|> Repo.insert!()
|> Repo.preload([:tags, :oldrim, :sse])
mod_b =
%Mod{name: "Mod B", desc: "bbbbbbbbbbbbbbb", pic: "/favicon.ico"}
|> Repo.insert!()
|> Repo.preload([:tags, :oldrim, :sse])
mod_c =
%Mod{name: "Mod C", desc: "ccccccccccccccc", pic: "/favicon.ico"}
|> Repo.insert!()
|> Repo.preload([:tags, :oldrim, :sse])
tag_a = %ModTag{name: "Tag A"} |> Repo.insert!() |> Repo.preload(:mods)
tag_b = %ModTag{name: "Tag B"} |> Repo.insert!() |> Repo.preload(:mods)
tag_c = %ModTag{name: "Tag C"} |> Repo.insert!() |> Repo.preload(:mods)
file_a_sse =
%Modfile{console_compat: false, steam: "steam url: sse aaaaaaaaaa"}
|> Repo.insert!()
file_a_oldrim =
%Modfile{console_compat: false, nexus: "nexus url: oldrim aaaaaaaa"}
|> Repo.insert!()
file_b_sse =
%Modfile{console_compat: true, bethesda: "beths url: bbbbbbbb"}
|> Repo.insert!()
mod_a =
mod_a
|> Changeset.change()
|> Changeset.put_assoc(:sse, file_a_sse)
|> Changeset.put_assoc(:oldrim, file_a_oldrim)
|> Repo.update!()
mod_b =
mod_b
|> Changeset.change()
|> Changeset.put_assoc(:sse, file_b_sse)
|> Repo.update!()
mod_a =
mod_a
|> Changeset.change()
|> Changeset.put_assoc(:tags, [tag_a, tag_c])
|> Repo.update!()
mod_b =
mod_b
|> Changeset.change()
|> Changeset.put_assoc(:tags, [tag_a, tag_b, tag_c])
|> Repo.update!()
mod_c =
mod_c
|> Changeset.change()
|> Changeset.put_assoc(:tags, [tag_b])
|> Repo.update!()
| 25.1875 | 71 | 0.658561 |
730fd6ef03b8292c021d6b6da2c2d0bca378ace2 | 6,462 | ex | Elixir | lib/terminus.ex | libitx/terminus | f394aea4ced49aec216203b19c33f53578eac7ec | [
"Apache-2.0"
] | 16 | 2020-04-15T14:45:15.000Z | 2022-02-28T03:28:22.000Z | lib/terminus.ex | libitx/terminus | f394aea4ced49aec216203b19c33f53578eac7ec | [
"Apache-2.0"
] | 1 | 2020-07-28T21:39:41.000Z | 2020-07-29T13:05:45.000Z | lib/terminus.ex | libitx/terminus | f394aea4ced49aec216203b19c33f53578eac7ec | [
"Apache-2.0"
] | 3 | 2021-01-10T22:39:43.000Z | 2022-03-15T07:39:22.000Z | defmodule Terminus do
@moduledoc """

Terminus allows you to crawl and subscribe to Bitcoin transaction events and
download binary data from transactions, using a combination of
[Bitbus](https://bitbus.network) and [Bitsocket](https://bitsocket.network),
and [BitFS](https://bitfs.network).
Terminus provides a single unified interface for querying Planaria corp APIs
in a highly performant manner. Each request is a `GenStage` process, enabling
you to create powerful concurrent data flows. Terminus may well be the most
powerful way of querying Bitcoin in the Universe!
## APIs
Terminus can be used to interface with the following Planaria Corp APIs.
* [`Bitbus`](`Terminus.Bitbus`) - crawl filtered subsets of **confirmed** Bitcoin transactions in blocks.
* [`Bitsocket`](`Terminus.Bitsocket`) - subscribe to a live, filterable stream of realtime transaction events.
* [`BitFS`](`Terminus.BitFS`) - fetch raw binary data chunks (over 512kb) indexed from all Bitcoin transactions.
### Authentication
Both Bitbus and Bitsocket require a token to authenticate requests. *(The Bitsocket
`listen` API currently doesn't require a token but that is likely to change).*
Currently tokens are free with no usage limits. *(Also likely to change)*
**[Get your Planaria Token](https://token.planaria.network).**
Where a token is given as a tuple pair in the format, `{app, key}`, Terminus
will fetch the token at runtime using `Application.get_env(app, key)`.callback()
iex> Terminus.Omni.find(txid, token: {:my_app, :planaria_token})
{:ok, %{...}}
### Query language
Both Bitbus and Bitsocket use the same MongoDB-like query language, known as
[Bitquery](https://bitquery.planaria.network). Terminus fully supports both
the TXO (Transaction Object) and BOB (Bitcoin OP_RETURN Bytecode) schemas, and
allows the optional use of shorthand queries (just the `q` value).
iex> Terminus.Bitbus.fetch!(%{
...> find: %{ "out.s2" => "1LtyME6b5AnMopQrBPLk4FGN8UBuhxKqrn" },
...> sort: %{ "blk.i": -1 },
...> project: %{ "tx.h": 1 },
...> limit: 5
...> }, token: token)
[
%{"tx" => %{"h" => "fca7bdd7658613418c54872212811cf4c5b4f8ee16864eaf70cb1393fb0df6ca"}},
%{"tx" => %{"h" => "79ae3ca23d1067b9ab45aba7e8ff4de1943e383e9a33e562d5ffd8489f388c93"}},
%{"tx" => %{"h" => "5526989417f28da5e0c99b58863db58c1faf8862ac9325dc415ad4b11605c1b1"}},
%{"tx" => %{"h" => "0bac587681360f961dbccba4c49a5c8f1b6f0bef61fe8501a28dcfe981a920b5"}},
%{"tx" => %{"h" => "fa13a8f0f5688f761b2f34949bb35fa5d6fd14cb3d49c2c1617363b6984df162"}}
]
## Using Terminus
Terminus can be used as a simple API client, or a turbo-charged, concurrent
multi-stream Bitcoin scraper on steroids. You decide.
The following modules are the primary ways of using Terminus.
* `Terminus.Bitbus` - functions for crawling and query confirmed Bitcoin transactions.
* `Terminus.Bitsocket` - query mempool transactions and listen to realtime transaction events.
* `Terminus.BitFS` - fetch binary data blobs embedded in Bitcoin transactions.
* `Terminus.Omni` - conveniently fetch confirmed and mempool transactions together.
* `Terminus.Planaria` - run Bitcoin scraper processes under your application's supervision tree.
### Streams
Most Terminus functions return a streaming `t:Enumerable.t/0` allowing you to
compose data processing pipelines and operations.
iex> Terminus.Bitbus.crawl!(query, token: token)
...> |> Stream.map(&Terminus.BitFS.scan_tx/1)
...> |> Stream.each(&save_to_db/1)
...> |> Stream.run
:ok
### Omni
Sometimes it's necessary to query both confirmed and confirmed transaction
simultaneously. This is where `Terminus.Omni` comes in, effectively replicating
the functionality of legacy Planaria APIs and returning returning results from
Bitbus and Bitsocket in one call.
iex> Terminus.Omni.fetch(query, token: token)
{:ok, %{
c: [...], # collection of confirmed tx
u: [...] # collection of mempool tx
}}
You can also easily find a single transaction by its [`txid`](`t:Terminus.txid`)
irrespective of whether it is confirmed or not.
iex> Terminus.Omni.find(txid, token: token)
{:ok, %{
"tx" => %{"h" => "fca7bdd7658613418c54872212811cf4c5b4f8ee16864eaf70cb1393fb0df6ca"},
...
}}
### Planaria
Using `Terminus.Planaria` inside a module allows you to simply recreate
[Planaria](https://neon.planaria.network)-like state machine functionality.
Planarias can be started under your app's supervision tree, allowing multiple
Planaria processes to run concurrently.
defmodule TwetchScraper do
@query %{
"find" => %{
"out.s2": "19HxigV4QyBv3tHpQVcUEQyq1pzZVdoAut",
"out.s25": "twetch"
}
}
use Terminus.Planaria, token: {:my_app, :planaria_token},
from: 600000,
query: @query
def handle_data(:block, txns) do
# Handle confirmed transactions
end
def handle_data(:mempool, txns) do
# Handle unconfirmed transactions
end
end
### Concurrency
Under the hood, each Terminus request is a `GenStage` producer process, and
the bare [`pid`](`t:pid/0`) can be returned. This allows you to take full
advantage of Elixir's concurrency, by either using with your own `GenStage`
consumers or using a tool like `Flow` to create powerful concurrent pipelines.
# One stream of transactions will be distributed across eight concurrent
# processes for mapping and saving the data.
iex> {:ok, pid} = Terminus.Bitbus.crawl(query, token: token, stage: true)
iex> Flow.from_stages([pid], stages: 8)
...> |> Flow.map(&Terminus.BitFS.scan_tx/1)
...> |> Flow.map(&save_to_db/1)
...> |> Flow.run
:ok
"""
@typedoc "Bitcoin data query language."
@type bitquery :: map | String.t
@typedoc "BitFS URI scheme."
@type bitfs_uri :: String.t
@typedoc "On-data callback function."
@type callback :: function | nil
@typedoc "Hex-encoded transaction ID."
@type txid :: function
end
| 39.644172 | 164 | 0.682761 |
730fe41c653bccd40fff59ebb491d2a6a65334b7 | 1,075 | ex | Elixir | lib/chat_api_web/controllers/widget_settings_controller.ex | rlanga/papercups | 358ca46c344908585cd0214a0de96e5676120c68 | [
"MIT"
] | null | null | null | lib/chat_api_web/controllers/widget_settings_controller.ex | rlanga/papercups | 358ca46c344908585cd0214a0de96e5676120c68 | [
"MIT"
] | null | null | null | lib/chat_api_web/controllers/widget_settings_controller.ex | rlanga/papercups | 358ca46c344908585cd0214a0de96e5676120c68 | [
"MIT"
] | null | null | null | defmodule ChatApiWeb.WidgetSettingsController do
use ChatApiWeb, :controller
alias ChatApi.WidgetSettings
alias ChatApi.WidgetSettings.WidgetSetting
action_fallback ChatApiWeb.FallbackController
def show(conn, %{"account_id" => account_id}) do
widget_settings = WidgetSettings.get_settings_by_account(account_id)
render(conn, "show.json", widget_settings: widget_settings)
end
def create_or_update(conn, %{"widget_settings" => widget_settings_params}) do
with %{account_id: account_id} <- conn.assigns.current_user do
widget_settings_params = Map.merge(widget_settings_params, %{"account_id" => account_id})
{:ok, widget_settings} = WidgetSettings.create_or_update(account_id, widget_settings_params)
render(conn, "update.json", widget_settings: widget_settings)
end
end
def delete(conn, %{"id" => id}) do
widget_setting = WidgetSettings.get_widget_setting!(id)
with {:ok, %WidgetSetting{}} <- WidgetSettings.delete_widget_setting(widget_setting) do
send_resp(conn, :no_content, "")
end
end
end
| 33.59375 | 98 | 0.752558 |
731081df37162c9da77854b938ba05549f144e11 | 1,098 | ex | Elixir | test/support/conn_case.ex | jdesilvio/pin-bin-api | 3fa1b5ecc21a99e08a11d9bbaf21d558cf5fdbb7 | [
"Apache-2.0"
] | null | null | null | test/support/conn_case.ex | jdesilvio/pin-bin-api | 3fa1b5ecc21a99e08a11d9bbaf21d558cf5fdbb7 | [
"Apache-2.0"
] | 5 | 2018-11-22T01:01:29.000Z | 2019-03-15T01:02:43.000Z | test/support/conn_case.ex | jdesilvio/pin_bin | 3fa1b5ecc21a99e08a11d9bbaf21d558cf5fdbb7 | [
"Apache-2.0"
] | null | null | null | defmodule PinBinWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build and query models.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
alias Ecto.Adapters.SQL.Sandbox
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
alias PinBin.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import PinBinWeb.Router.Helpers
# The default endpoint for testing
@endpoint PinBinWeb.Endpoint
end
end
setup tags do
:ok = Sandbox.checkout(PinBin.Repo)
unless tags[:async] do
Sandbox.mode(PinBin.Repo, {:shared, self()})
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 23.361702 | 56 | 0.703097 |
7310aaaa2fb67117dbad43ecd52c8551a9fe9049 | 2,334 | ex | Elixir | lib/changelog/schema/topic.ex | gustavoarmoa/changelog.com | e898a9979a237ae66962714821ed8633a4966f37 | [
"MIT"
] | 2,599 | 2016-10-25T15:02:53.000Z | 2022-03-26T02:34:42.000Z | lib/changelog/schema/topic.ex | codexn/changelog.com | 25ce501ee62eef76731c38d590667e8132096ba8 | [
"MIT"
] | 253 | 2016-10-25T20:29:24.000Z | 2022-03-29T21:52:36.000Z | lib/changelog/schema/topic.ex | codexn/changelog.com | 25ce501ee62eef76731c38d590667e8132096ba8 | [
"MIT"
] | 298 | 2016-10-25T15:18:31.000Z | 2022-01-18T21:25:52.000Z | defmodule Changelog.Topic do
use Changelog.Schema
alias Changelog.{EpisodeTopic, Files, NewsItemTopic, PostTopic, Regexp}
schema "topics" do
field :name, :string
field :slug, :string
field :description, :string
field :website, :string
field :twitter_handle, :string
field :icon, Files.Icon.Type
has_many :episode_topics, EpisodeTopic, on_delete: :delete_all
has_many :episodes, through: [:episode_topics, :episode]
has_many :news_item_topics, NewsItemTopic, on_delete: :delete_all
has_many :news_items, through: [:news_item_topics, :news_item]
has_many :post_topics, PostTopic, on_delete: :delete_all
has_many :posts, through: [:post_topics, :post]
timestamps()
end
def with_news_items(query \\ __MODULE__) do
from(q in query,
distinct: true,
left_join: i in assoc(q, :news_item_topics),
where: not is_nil(i.id)
)
end
def file_changeset(topic, attrs \\ %{}),
do: cast_attachments(topic, attrs, [:icon], allow_urls: true)
def insert_changeset(topic, attrs \\ %{}) do
topic
|> cast(attrs, ~w(name slug description twitter_handle website)a)
|> validate_required([:name, :slug])
|> validate_format(:slug, Regexp.slug(), message: Regexp.slug_message())
|> validate_format(:website, Regexp.http(), message: Regexp.http_message())
|> unique_constraint(:slug)
|> unique_constraint(:twitter_handle)
end
def update_changeset(topic, attrs \\ %{}) do
topic
|> insert_changeset(attrs)
|> file_changeset(attrs)
end
def preload_news_items(query = %Ecto.Query{}) do
query
|> Ecto.Query.preload(news_item_topics: ^NewsItemTopic.by_position())
|> Ecto.Query.preload(:news_items)
end
def preload_news_items(topic) do
topic
|> Repo.preload(news_item_topics: {NewsItemTopic.by_position(), :news_item})
|> Repo.preload(:news_items)
end
def episode_count(topic),
do: Repo.count(from(q in EpisodeTopic, where: q.topic_id == ^topic.id))
def news_count(topic),
do:
Repo.count(
from(q in NewsItemTopic,
where: q.topic_id == ^topic.id,
join: i in assoc(q, :news_item),
where: i.status == ^:published
)
)
def post_count(topic), do: Repo.count(from(q in PostTopic, where: q.topic_id == ^topic.id))
end
| 29.544304 | 93 | 0.673522 |
7310b5cea020120c849724e4906bba0c1bd978cc | 636 | ex | Elixir | lib/pulsar_ex/producer_supervisor.ex | blueshift-labs/pulsar_ex | 8fd5b6b7fa98b172645bce56dd3a46395935e2e0 | [
"MIT"
] | 3 | 2021-04-16T01:54:20.000Z | 2022-01-09T23:26:56.000Z | lib/pulsar_ex/producer_supervisor.ex | blueshift-labs/pulsar_ex | 8fd5b6b7fa98b172645bce56dd3a46395935e2e0 | [
"MIT"
] | null | null | null | lib/pulsar_ex/producer_supervisor.ex | blueshift-labs/pulsar_ex | 8fd5b6b7fa98b172645bce56dd3a46395935e2e0 | [
"MIT"
] | null | null | null | defmodule PulsarEx.ProducerSupervisor do
use Supervisor
alias PulsarEx.{ProducerRegistry, Producers, ProducerManager}
def start_link(_) do
Supervisor.start_link(__MODULE__, :init, name: __MODULE__)
end
@impl true
def init(:init) do
producer_opts = Application.get_env(:pulsar_ex, :producer_opts, [])
auto_start = Keyword.get(producer_opts, :auto_start, true)
children = [
{Registry, keys: :unique, name: ProducerRegistry},
{DynamicSupervisor, strategy: :one_for_one, name: Producers},
{ProducerManager, auto_start}
]
Supervisor.init(children, strategy: :one_for_all)
end
end
| 26.5 | 71 | 0.720126 |
7310e46352eedb3f1541b0c5b876f487fa76ed99 | 6,646 | ex | Elixir | lib/spotify/models/audio_features.ex | chippers/spotify_web_api | 221a197dbac4971f87e9917d02cb335e6a42b726 | [
"MIT"
] | null | null | null | lib/spotify/models/audio_features.ex | chippers/spotify_web_api | 221a197dbac4971f87e9917d02cb335e6a42b726 | [
"MIT"
] | null | null | null | lib/spotify/models/audio_features.ex | chippers/spotify_web_api | 221a197dbac4971f87e9917d02cb335e6a42b726 | [
"MIT"
] | null | null | null | defmodule Spotify.AudioFeatures do
@moduledoc """
A complete audio features object.
[Spotify Docs](https://beta.developer.spotify.com/documentation/web-api/reference/object-model/#audio-features-object)
"""
@behaviour Spotify.ObjectModel
@typedoc """
A float measurement of Acousticness.
A confidence measure from 0.0 to 1.0 of whether the track is acoustic.
1.0 represents high confidence the track is acoustic.
"""
@type acousticness :: float
@typedoc """
An HTTP URL to access the full audio analysis of this track.
An access token is required to access this data.
"""
@type analysis_url :: String.t
@typedoc """
How danceable a track is.
Danceability describes how suitable a track is for dancing based on a
combination of musical elements including tempo, rhythm stability, beat
strength, and overall regularity. A value of 0.0 is least danceable and
1.0 is most danceable.
"""
@type danceability :: float
@typedoc """
The duration of the track in milliseconds.
"""
@type duration_ms :: integer
@typedoc """
Measurement of intensity and activity.
Energy is a measure from 0.0 to 1.0 and represents a perceptual measure of
intensity and activity. Typically, energetic tracks feel fast, loud, and noisy.
For example, death metal has high energy, while a Bach prelude scores low on the scale.
Perceptual features contributing to this attribute include dynamic range,
perceived loudness, timbre, onset rate, and general entropy.
"""
@type energy :: float
@typedoc """
The Spotify ID for the track.
"""
@type id :: String.t
@typedoc """
Measurement of the likelihood the track is instrumental.
Predicts whether a track contains no vocals. “Ooh” and “aah” sounds are treated as
instrumental in this context. Rap or spoken word tracks are clearly “vocal”.
The closer the instrumentalness value is to 1.0, the greater likelihood the track
contains no vocal content. Values above 0.5 are intended to represent instrumental
tracks, but confidence is higher as the value approaches 1.0.
"""
@type instrumentalness :: float
@typedoc """
The key the track is in.
Integers map to pitches using standard Pitch
Class notation. E.g. 0 = C, 1 = C♯/D♭, 2 = D, and so on.
"""
@type key :: integer
@typedoc """
Mesurement of the likelihood the track is live.
Detects the presence of an audience in the recording. Higher liveness values
represent an increased probability that the track was performed live. A value
above 0.8 provides strong likelihood that the track is live.
"""
@type liveness :: float
@typedoc """
Relative Loudness of a track compared to other Spotify tracks.
The overall loudness of a track in decibels (dB). Loudness values are averaged
across the entire track and are useful for comparing relative loudness of tracks.
Loudness is the quality of a sound that is the primary psychological correlate of
physical strength (amplitude). Values typical range between -60 and 0 db.
"""
@type loudness :: float
@typedoc """
The modality of the track.
Mode indicates the modality (major or minor) of a track, the type of scale from
which its melodic content is derived. Major is represented by 1 and minor is 0.
"""
@type mode :: integer
@typedoc """
The detected precence of speech in a track.
Speechiness detects the presence of spoken words in a track. The more exclusively
speech-like the recording (e.g. talk show, audio book, poetry), the closer to 1.0
the attribute value. Values above 0.66 describe tracks that are probably made entirely
of spoken words. Values between 0.33 and 0.66 describe tracks that may contain both
music and speech, either in sections or layered, including such cases as rap music.
Values below 0.33 most likely represent music and other non-speech-like tracks.
"""
@type speechiness :: float
@typedoc """
The overall estimated tempo of a track in beats per minute (BPM).
In musical terminology, tempo is the speed or pace of a given
piece and derives directly from the average beat duration.
"""
@type tempo :: float
@typedoc """
An estimated overall time signature of a track.
The time signature (meter) is a notational convention to specify how many beats are in each bar (or measure).
"""
@type time_signature :: integer
@typedoc """
A link to the Web API endpoint providing full details of the track.
"""
@type track_href :: String.t
@typedoc """
The object type: `audio_features`
"""
@type type :: String.t
@typedoc """
The Spotify URI for the track.
"""
@type uri :: String.t
@typedoc """
The positiveness of a track.
A measure from 0.0 to 1.0 describing the musical positiveness conveyed by a track.
Tracks with high valence sound more positive (e.g. happy, cheerful, euphoric),
while tracks with low valence sound more negative (e.g. sad, depressed, angry).
"""
@type valence :: float
defstruct [
:acousticness,
:analysis_url,
:danceability,
:duration_ms,
:energy,
:id,
:instrumentalness,
:key,
:liveness,
:loudness,
:mode,
:speechiness,
:tempo,
:time_signature,
:track_href,
:type,
:uri,
:valence,
]
@typedoc """
The full Audio Features object.
Contains all the values listed in the
[Spotify Docs](https://beta.developer.spotify.com/documentation/web-api/reference/object-model/#audio-features-object)
"""
@type t :: %__MODULE__{
acousticness: __MODULE__.acousticness | nil,
analysis_url: __MODULE__.analysis_url | nil,
danceability: __MODULE__.danceability | nil,
duration_ms: __MODULE__.duration_ms | nil,
energy: __MODULE__.energy | nil,
id: __MODULE__.id | nil,
instrumentalness: __MODULE__.instrumentalness | nil,
key: __MODULE__.key | nil,
liveness: __MODULE__.liveness | nil,
loudness: __MODULE__.loudness | nil,
mode: __MODULE__.mode | nil,
speechiness: __MODULE__.speechiness | nil,
tempo: __MODULE__.tempo | nil,
time_signature: __MODULE__.time_signature | nil,
track_href: __MODULE__.track_href | nil,
type: __MODULE__.type | nil,
uri: __MODULE__.uri | nil,
valence: __MODULE__.valence | nil,
}
def as do
%__MODULE__{}
end
end
| 32.419512 | 122 | 0.676648 |
7310e5be2abd7ea46da422b93460e3c3511443d1 | 4,438 | ex | Elixir | clients/service_consumer_management/lib/google_api/service_consumer_management/v1/model/backend_rule.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/service_consumer_management/lib/google_api/service_consumer_management/v1/model/backend_rule.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/service_consumer_management/lib/google_api/service_consumer_management/v1/model/backend_rule.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ServiceConsumerManagement.V1.Model.BackendRule do
@moduledoc """
A backend rule provides configuration for an individual API element.
## Attributes
* `address` (*type:* `String.t`, *default:* `nil`) - The address of the API backend. The scheme is used to determine the backend protocol and security. The following schemes are accepted: SCHEME PROTOCOL SECURITY http:// HTTP None https:// HTTP TLS grpc:// gRPC None grpcs:// gRPC TLS It is recommended to explicitly include a scheme. Leaving out the scheme may cause constrasting behaviors across platforms. If the port is unspecified, the default is: - 80 for schemes without TLS - 443 for schemes with TLS For HTTP backends, use protocol to specify the protocol version.
* `deadline` (*type:* `float()`, *default:* `nil`) - The number of seconds to wait for a response from a request. The default varies based on the request protocol and deployment environment.
* `disableAuth` (*type:* `boolean()`, *default:* `nil`) - When disable_auth is true, a JWT ID token won't be generated and the original "Authorization" HTTP header will be preserved. If the header is used to carry the original token and is expected by the backend, this field must be set to true to preserve the header.
* `jwtAudience` (*type:* `String.t`, *default:* `nil`) - The JWT audience is used when generating a JWT ID token for the backend. This ID token will be added in the HTTP "authorization" header, and sent to the backend.
* `minDeadline` (*type:* `float()`, *default:* `nil`) - Minimum deadline in seconds needed for this method. Calls having deadline value lower than this will be rejected.
* `operationDeadline` (*type:* `float()`, *default:* `nil`) - The number of seconds to wait for the completion of a long running operation. The default is no deadline.
* `pathTranslation` (*type:* `String.t`, *default:* `nil`) -
* `protocol` (*type:* `String.t`, *default:* `nil`) - The protocol used for sending a request to the backend. The supported values are "http/1.1" and "h2". The default value is inferred from the scheme in the address field: SCHEME PROTOCOL http:// http/1.1 https:// http/1.1 grpc:// h2 grpcs:// h2 For secure HTTP backends (https://) that support HTTP/2, set this field to "h2" for improved performance. Configuring this field to non-default values is only supported for secure HTTP backends. This field will be ignored for all other backends. See https://www.iana.org/assignments/tls-extensiontype-values/tls-extensiontype-values.xhtml#alpn-protocol-ids for more details on the supported values.
* `selector` (*type:* `String.t`, *default:* `nil`) - Selects the methods to which this rule applies. Refer to selector for syntax details.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:address => String.t(),
:deadline => float(),
:disableAuth => boolean(),
:jwtAudience => String.t(),
:minDeadline => float(),
:operationDeadline => float(),
:pathTranslation => String.t(),
:protocol => String.t(),
:selector => String.t()
}
field(:address)
field(:deadline)
field(:disableAuth)
field(:jwtAudience)
field(:minDeadline)
field(:operationDeadline)
field(:pathTranslation)
field(:protocol)
field(:selector)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceConsumerManagement.V1.Model.BackendRule do
def decode(value, options) do
GoogleApi.ServiceConsumerManagement.V1.Model.BackendRule.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceConsumerManagement.V1.Model.BackendRule do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 62.507042 | 700 | 0.723073 |
731125418abe38b69c02a1722ddcbbe7e34ed282 | 353 | exs | Elixir | test/absinthe/integration/execution/aliases/all_caps_alias_test.exs | hauptbenutzer/absinthe | f1d76e5c249ca337187971b0c96b1870f53716ed | [
"MIT"
] | null | null | null | test/absinthe/integration/execution/aliases/all_caps_alias_test.exs | hauptbenutzer/absinthe | f1d76e5c249ca337187971b0c96b1870f53716ed | [
"MIT"
] | null | null | null | test/absinthe/integration/execution/aliases/all_caps_alias_test.exs | hauptbenutzer/absinthe | f1d76e5c249ca337187971b0c96b1870f53716ed | [
"MIT"
] | 1 | 2018-11-16T02:34:40.000Z | 2018-11-16T02:34:40.000Z | defmodule Elixir.Absinthe.Integration.Execution.Aliases.AllCapsAliasTest do
use ExUnit.Case, async: true
@query """
query {
thing(id: "foo") {
FOO: name
}
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"thing" => %{"FOO" => "Foo"}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
| 20.764706 | 75 | 0.580737 |
7311375a0cf6e0b15a7f2229d0045c3724425b26 | 3,726 | ex | Elixir | lib/socializer/demo_manager.ex | adam-h/socializer | 22bbb34a6e707af9cf608c5eeb6189d1f067a95b | [
"MIT"
] | 311 | 2019-04-21T22:15:08.000Z | 2022-01-23T14:07:03.000Z | lib/socializer/demo_manager.ex | adam-h/socializer | 22bbb34a6e707af9cf608c5eeb6189d1f067a95b | [
"MIT"
] | 9 | 2020-09-07T09:38:58.000Z | 2022-02-26T18:07:44.000Z | lib/socializer/demo_manager.ex | adam-h/socializer | 22bbb34a6e707af9cf608c5eeb6189d1f067a95b | [
"MIT"
] | 49 | 2019-04-22T01:29:50.000Z | 2022-03-23T04:34:35.000Z | defmodule Socializer.DemoManager do
import Ecto.Query
alias Socializer.{
DemoManager,
Repo,
Comment,
Conversation,
ConversationUser,
Message,
Post,
User
}
def reset_and_seed_database!(force \\ false) do
if force || System.get_env("CLEAR_DB_WEEKLY") do
DemoManager.reset_database!()
DemoManager.seed!()
end
end
def reset_database! do
# Deletion order is to avoid violating foreign-key
# constraints.
# Note: does not delete users.
Repo.delete_all(Comment)
Repo.delete_all(Post)
Repo.delete_all(Message)
Repo.delete_all(ConversationUser)
Repo.delete_all(Conversation)
end
def seed! do
users =
case Repo.aggregate(User, :count, :id) > 0 do
true ->
Repo.all(
from u in User,
where:
u.email in [
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]"
]
)
false ->
[
Repo.insert!(%User{
name: "Joe Smith",
email: "[email protected]",
password: random_password()
}),
Repo.insert!(%User{
name: "Jane Doe",
email: "[email protected]",
password: random_password()
}),
Repo.insert!(%User{
name: "Jeremy Peters",
email: "[email protected]",
password: random_password()
}),
Repo.insert!(%User{
name: "Jack Hawk",
email: "[email protected]",
password: :crypto.strong_rand_bytes(32) |> Base.encode64() |> binary_part(0, 32)
})
]
end
posts =
[
"You have your way. I have my way. As for the right way, the correct way, and the only way, it does not exist.",
"A concept is a brick. It can be used to build a courthouse of reason. Or it can be thrown through the window.",
"The struggle itself toward the heights is enough to fill a man’s heart. One must imagine Sisyphus happy."
]
|> Enum.map(fn body ->
Repo.insert!(%Post{
body: body,
user_id: Enum.random(users).id
})
end)
[
"We live in a world where there is more and more information, and less and less meaning.",
"Consider the cattle, grazing as they pass you by. They do not know what is meant by yesterday or today, they leap about, eat, rest, digest, leap about again, and so from morn till night and from day to day, fettered to the moment and its pleasure or displeasure, and thus neither melancholy nor bored.",
"Each day is a little life: every waking and rising a little birth, every fresh morning a little youth, every going to rest and sleep a little death.",
"It has always seemed to me that my existence consisted purely and exclusively of nothing but the most outrageous nonsense.",
"Hello babies. Welcome to Earth. It's hot in the summer and cold in the winter. It's round and wet and crowded. On the outside, babies, you've got a hundred years here. There's only one rule that I know of, babies - God damn it, you've got to be kind.",
"The place to improve the world is first in one's own heart and head and hands, and then work outward from there."
]
|> Enum.map(fn body ->
Repo.insert!(%Comment{
body: body,
post_id: Enum.random(posts).id,
user_id: Enum.random(users).id
})
end)
end
defp random_password do
:crypto.strong_rand_bytes(32)
|> Base.encode64()
|> binary_part(0, 32)
end
end
| 34.183486 | 310 | 0.589372 |
731144fbadef1ad5edec9f90265769b226d65f4a | 1,413 | ex | Elixir | clients/you_tube/lib/google_api/you_tube/v3/model/playlist_content_details.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/you_tube/lib/google_api/you_tube/v3/model/playlist_content_details.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/playlist_content_details.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.YouTube.V3.Model.PlaylistContentDetails do
@moduledoc """
## Attributes
* `itemCount` (*type:* `integer()`, *default:* `nil`) - The number of videos in the playlist.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:itemCount => integer()
}
field(:itemCount)
end
defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.PlaylistContentDetails do
def decode(value, options) do
GoogleApi.YouTube.V3.Model.PlaylistContentDetails.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.YouTube.V3.Model.PlaylistContentDetails do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 30.06383 | 97 | 0.739561 |
73115f96ef31ce65cf53895cead23931338ed034 | 370 | ex | Elixir | apps/nerves_hub_www/lib/nerves_hub_www_web/views/org_key_view.ex | tonnenpinguin/nerves_hub_web | 9d36921eb7e20d20a3e3bd308cc98ad7b60cfa72 | [
"Apache-2.0"
] | 111 | 2018-07-25T01:07:51.000Z | 2022-01-25T17:03:01.000Z | apps/nerves_hub_www/lib/nerves_hub_www_web/views/org_key_view.ex | tonnenpinguin/nerves_hub_web | 9d36921eb7e20d20a3e3bd308cc98ad7b60cfa72 | [
"Apache-2.0"
] | 361 | 2018-07-22T12:53:00.000Z | 2022-03-31T18:50:34.000Z | apps/nerves_hub_www/lib/nerves_hub_www_web/views/org_key_view.ex | tonnenpinguin/nerves_hub_web | 9d36921eb7e20d20a3e3bd308cc98ad7b60cfa72 | [
"Apache-2.0"
] | 54 | 2018-08-26T02:58:04.000Z | 2022-03-09T10:12:19.000Z | defmodule NervesHubWWWWeb.OrgKeyView do
use NervesHubWWWWeb, :view
def top_level_error_message(%Ecto.Changeset{errors: errors}) do
if Keyword.has_key?(errors, :firmwares) do
"Key is in use. You must delete any firmwares signed by the corresponding private key"
else
"Oops, something went wrong! Please check the errors below."
end
end
end
| 30.833333 | 92 | 0.737838 |
73119e5a3c43002a1f32144348e2c75c6d94d1d0 | 1,164 | ex | Elixir | lib/grapevine/statistics/player_statistic.ex | shanesveller/grapevine | fe74ade1adff88dfe4c1ab55fee3902dbb4664fe | [
"MIT"
] | null | null | null | lib/grapevine/statistics/player_statistic.ex | shanesveller/grapevine | fe74ade1adff88dfe4c1ab55fee3902dbb4664fe | [
"MIT"
] | null | null | null | lib/grapevine/statistics/player_statistic.ex | shanesveller/grapevine | fe74ade1adff88dfe4c1ab55fee3902dbb4664fe | [
"MIT"
] | null | null | null | defmodule Grapevine.Statistics.PlayerStatistic do
@moduledoc """
Player Statistic Schema
"""
use Ecto.Schema
import Ecto.Changeset
alias Grapevine.Games.Game
@type t :: %__MODULE__{}
schema "player_statistics" do
field(:type, :string)
field(:player_count, :integer)
field(:player_names, {:array, :string})
field(:recorded_at, :utc_datetime)
belongs_to(:game, Game)
end
def socket_changeset(struct, game, players, recorded_time) do
struct
|> change()
|> put_change(:type, "socket")
|> put_change(:game_id, game.id)
|> put_change(:player_count, length(players))
|> put_change(:player_names, players)
|> put_change(:recorded_at, DateTime.truncate(recorded_time, :second))
|> foreign_key_constraint(:game_id)
end
def mssp_changeset(struct, game, player_count, recorded_time) do
struct
|> change()
|> put_change(:type, "mssp")
|> put_change(:game_id, game.id)
|> put_change(:player_count, player_count)
|> put_change(:player_names, [])
|> put_change(:recorded_at, DateTime.truncate(recorded_time, :second))
|> foreign_key_constraint(:game_id)
end
end
| 25.866667 | 74 | 0.685567 |
7311ac6828a303ae635ec8947baebb7cd0f78be9 | 776 | exs | Elixir | elixir_http_server_starter/test/HttpServerStarter_test.exs | andrea-prearo/http_server_starter_collection | 229f2221ed941193e5a85486c662ea70b2267151 | [
"MIT"
] | null | null | null | elixir_http_server_starter/test/HttpServerStarter_test.exs | andrea-prearo/http_server_starter_collection | 229f2221ed941193e5a85486c662ea70b2267151 | [
"MIT"
] | null | null | null | elixir_http_server_starter/test/HttpServerStarter_test.exs | andrea-prearo/http_server_starter_collection | 229f2221ed941193e5a85486c662ea70b2267151 | [
"MIT"
] | null | null | null | defmodule HttpServerStarter.Test do
use ExUnit.Case, async: true
use Plug.Test
@opts HttpServerStarter.Router.init([])
test "returns Hello World!" do
conn = conn(:get, "/")
conn = HttpServerStarter.Router.call(conn, @opts)
assert conn.state == :sent
assert conn.status == 200
assert Enum.member?(conn.resp_headers, {"content-type", "application/json; charset=utf-8"})
assert conn.resp_body == "Hello World!"
end
test "returns Not Found" do
conn = conn(:get, "/notexisting")
conn = HttpServerStarter.Router.call(conn, @opts)
assert conn.state == :sent
assert conn.status == 404
assert Enum.member?(conn.resp_headers, {"content-type", "text/html; charset=utf-8"})
assert conn.resp_body == "Not Found"
end
end
| 26.758621 | 95 | 0.673969 |
7311d7aec1085a9642c9809711f284f04fc0ef66 | 568 | exs | Elixir | test/tp_link/local/message_test.exs | balexand/tp_link | 1005822b7b2a4d4194a8d166b808d99c0542b987 | [
"MIT"
] | null | null | null | test/tp_link/local/message_test.exs | balexand/tp_link | 1005822b7b2a4d4194a8d166b808d99c0542b987 | [
"MIT"
] | 3 | 2022-02-12T19:08:50.000Z | 2022-03-23T13:21:39.000Z | test/tp_link/local/message_test.exs | balexand/tp_link | 1005822b7b2a4d4194a8d166b808d99c0542b987 | [
"MIT"
] | null | null | null | defmodule TpLink.Local.MessageTest do
use ExUnit.Case, async: true
alias TpLink.Local.Message
test "decode" do
assert %{"system" => %{"set_relay_state" => %{"state" => 1}}} ==
"0PKB+Iv/mvfV75S2xaDUi/mc8JHot8Sw0aXA4tijgfKG55P21O7fot+i"
|> Base.decode64!()
|> Message.decode()
end
test "encode" do
assert "0PKB+Iv/mvfV75S2xaDUi/mc8JHot8Sw0aXA4tijgfKG55P21O7fot+i" ==
%{"system" => %{"set_relay_state" => %{"state" => 1}}}
|> Message.encode()
|> Base.encode64()
end
end
| 28.4 | 72 | 0.582746 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.