hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9eb9dc021f34ac7d03213267b9ebcaef2500ad6a | 1,577 | ex | Elixir | lib/grizzly/zwave/commands/association_set.ex | jellybob/grizzly | 290bee04cb16acbb9dc996925f5c501697b7ac94 | [
"Apache-2.0"
] | 76 | 2019-09-04T16:56:58.000Z | 2022-03-29T06:54:36.000Z | lib/grizzly/zwave/commands/association_set.ex | jellybob/grizzly | 290bee04cb16acbb9dc996925f5c501697b7ac94 | [
"Apache-2.0"
] | 124 | 2019-09-05T14:01:24.000Z | 2022-02-28T22:58:14.000Z | lib/grizzly/zwave/commands/association_set.ex | jellybob/grizzly | 290bee04cb16acbb9dc996925f5c501697b7ac94 | [
"Apache-2.0"
] | 10 | 2019-10-23T19:25:45.000Z | 2021-11-17T13:21:20.000Z | defmodule Grizzly.ZWave.Commands.AssociationSet do
@moduledoc """
Module for the `ASSOCIATION_SET` command
The node receiving this command should add the specified node ids to the
association group. However, if the association group for the node is full
this command is ignored.
Params:
- `:grouping_identifier` - the association grouping identifier (required)
- `:nodes` - list of nodes to add the grouping identifier (required)
"""
@behaviour Grizzly.ZWave.Command
alias Grizzly.ZWave
alias Grizzly.ZWave.Command
alias Grizzly.ZWave.CommandClasses.Association
# I think grouping identifier is list that gets masked, but for now we will
# just work as if if the identifier is only one.
@type param :: {:grouping_identifier, byte()} | {:nodes, [ZWave.node_id()]}
@impl true
@spec new([param]) :: {:ok, Command.t()}
def new(params) do
# TODO: validate params
command = %Command{
name: :association_set,
command_byte: 0x01,
command_class: Association,
params: params,
impl: __MODULE__
}
{:ok, command}
end
@impl true
@spec encode_params(Command.t()) :: binary()
def encode_params(command) do
nodes_bin = :erlang.list_to_binary(Command.param!(command, :nodes))
<<Command.param!(command, :grouping_identifier)>> <> nodes_bin
end
@impl true
@spec decode_params(binary) :: {:ok, [param()]}
def decode_params(<<grouping_identifier, nodes_bin::binary>>) do
{:ok, [grouping_identifier: grouping_identifier, nodes: :erlang.binary_to_list(nodes_bin)]}
end
end
| 29.754717 | 95 | 0.7026 |
9eb9fa856b99ab72c215b0f06624b6cf437cbab8 | 2,605 | ex | Elixir | clients/calendar/lib/google_api/calendar/v3/model/free_busy_request.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/calendar/lib/google_api/calendar/v3/model/free_busy_request.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/calendar/lib/google_api/calendar/v3/model/free_busy_request.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Calendar.V3.Model.FreeBusyRequest do
@moduledoc """
## Attributes
- calendarExpansionMax (integer()): Maximal number of calendars for which FreeBusy information is to be provided. Optional. Maximum value is 50. Defaults to: `null`.
- groupExpansionMax (integer()): Maximal number of calendar identifiers to be provided for a single group. Optional. An error is returned for a group with more members than this value. Maximum value is 100. Defaults to: `null`.
- items ([FreeBusyRequestItem]): List of calendars and/or groups to query. Defaults to: `null`.
- timeMax (DateTime.t): The end of the interval for the query formatted as per RFC3339. Defaults to: `null`.
- timeMin (DateTime.t): The start of the interval for the query formatted as per RFC3339. Defaults to: `null`.
- timeZone (String.t): Time zone used in the response. Optional. The default is UTC. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:calendarExpansionMax => any(),
:groupExpansionMax => any(),
:items => list(GoogleApi.Calendar.V3.Model.FreeBusyRequestItem.t()),
:timeMax => DateTime.t(),
:timeMin => DateTime.t(),
:timeZone => any()
}
field(:calendarExpansionMax)
field(:groupExpansionMax)
field(:items, as: GoogleApi.Calendar.V3.Model.FreeBusyRequestItem, type: :list)
field(:timeMax, as: DateTime)
field(:timeMin, as: DateTime)
field(:timeZone)
end
defimpl Poison.Decoder, for: GoogleApi.Calendar.V3.Model.FreeBusyRequest do
def decode(value, options) do
GoogleApi.Calendar.V3.Model.FreeBusyRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Calendar.V3.Model.FreeBusyRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.349206 | 229 | 0.728983 |
9eba61e0c7b499169bf21cc927d985634dff1949 | 186 | ex | Elixir | lib/kucoin/v1/sub_user.ex | insprac/elixir_kucoin | 1bfea2f014f4151e99998189b171ece7d896ea5e | [
"MIT"
] | null | null | null | lib/kucoin/v1/sub_user.ex | insprac/elixir_kucoin | 1bfea2f014f4151e99998189b171ece7d896ea5e | [
"MIT"
] | null | null | null | lib/kucoin/v1/sub_user.ex | insprac/elixir_kucoin | 1bfea2f014f4151e99998189b171ece7d896ea5e | [
"MIT"
] | null | null | null | defmodule KuCoin.V1.SubUser do
use Kina.Schema
schema do
field :user_id, :string, key: :userId
field :sub_name, :string, key: :subName
field :remarks, :string
end
end
| 18.6 | 43 | 0.682796 |
9eba6a27bf299ede4879e8838d82b239f32d8b81 | 9,504 | ex | Elixir | lib/aws/generated/resource_groups.ex | smanolloff/aws-elixir | c7cb6577802f5010be7e7b6ccb2c0f3c8c73ea84 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/resource_groups.ex | smanolloff/aws-elixir | c7cb6577802f5010be7e7b6ccb2c0f3c8c73ea84 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/resource_groups.ex | smanolloff/aws-elixir | c7cb6577802f5010be7e7b6ccb2c0f3c8c73ea84 | [
"Apache-2.0"
] | null | null | null | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.ResourceGroups do
@moduledoc """
AWS Resource Groups
AWS Resource Groups lets you organize AWS resources such as Amazon EC2
instances, Amazon Relational Database Service databases, and Amazon S3 buckets
into groups using criteria that you define as tags.
A resource group is a collection of resources that match the resource types
specified in a query, and share one or more tags or portions of tags. You can
create a group of resources based on their roles in your cloud infrastructure,
lifecycle stages, regions, application layers, or virtually any criteria.
Resource Groups enable you to automate management tasks, such as those in AWS
Systems Manager Automation documents, on tag-related resources in AWS Systems
Manager. Groups of tagged resources also let you quickly view a custom console
in AWS Systems Manager that shows AWS Config compliance and other monitoring
data about member resources.
To create a resource group, build a resource query, and specify tags that
identify the criteria that members of the group have in common. Tags are
key-value pairs.
For more information about Resource Groups, see the [AWS Resource Groups User Guide](https://docs.aws.amazon.com/ARG/latest/userguide/welcome.html).
AWS Resource Groups uses a REST-compliant API that you can use to perform the
following types of operations.
* Create, Read, Update, and Delete (CRUD) operations on resource
groups and resource query entities
* Applying, editing, and removing tags from resource groups
* Resolving resource group member ARNs so they can be returned as
search results
* Getting data about resources that are members of a group
* Searching AWS resources based on a resource query
"""
@doc """
Creates a resource group with the specified name and description.
You can optionally include a resource query, or a service configuration.
"""
def create_group(client, input, options \\ []) do
path_ = "/groups"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Deletes the specified resource group.
Deleting a resource group does not delete any resources that are members of the
group; it only deletes the group structure.
"""
def delete_group(client, input, options \\ []) do
path_ = "/delete-group"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns information about a specified resource group.
"""
def get_group(client, input, options \\ []) do
path_ = "/get-group"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns the service configuration associated with the specified resource group.
AWS Resource Groups supports configurations for the following resource group
types:
* `AWS::EC2::CapacityReservationPool` - Amazon EC2 capacity
reservation pools. For more information, see [Working with capacity reservation groups](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/capacity-reservations-using.html#create-cr-group)
in the *EC2 Users Guide*.
"""
def get_group_configuration(client, input, options \\ []) do
path_ = "/get-group-configuration"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Retrieves the resource query associated with the specified resource group.
"""
def get_group_query(client, input, options \\ []) do
path_ = "/get-group-query"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns a list of tags that are associated with a resource group, specified by
an ARN.
"""
def get_tags(client, arn, options \\ []) do
path_ = "/resources/#{URI.encode(arn)}/tags"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Adds the specified resources to the specified group.
"""
def group_resources(client, input, options \\ []) do
path_ = "/group-resources"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns a list of ARNs of the resources that are members of a specified resource
group.
"""
def list_group_resources(client, input, options \\ []) do
path_ = "/list-group-resources"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns a list of existing resource groups in your account.
"""
def list_groups(client, input, options \\ []) do
path_ = "/groups-list"
headers = []
{query_, input} =
[
{"MaxResults", "maxResults"},
{"NextToken", "nextToken"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns a list of AWS resource identifiers that matches the specified query.
The query uses the same format as a resource query in a CreateGroup or
UpdateGroupQuery operation.
"""
def search_resources(client, input, options \\ []) do
path_ = "/resources/search"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Adds tags to a resource group with the specified ARN.
Existing tags on a resource group are not changed if they are not specified in
the request parameters.
Do not store personally identifiable information (PII) or other confidential or
sensitive information in tags. We use tags to provide you with billing and
administration services. Tags are not intended to be used for private or
sensitive data.
"""
def tag(client, arn, input, options \\ []) do
path_ = "/resources/#{URI.encode(arn)}/tags"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Removes the specified resources from the specified group.
"""
def ungroup_resources(client, input, options \\ []) do
path_ = "/ungroup-resources"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Deletes tags from a specified resource group.
"""
def untag(client, arn, input, options \\ []) do
path_ = "/resources/#{URI.encode(arn)}/tags"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
Updates the description for an existing group.
You cannot update the name of a resource group.
"""
def update_group(client, input, options \\ []) do
path_ = "/update-group"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Updates the resource query of a group.
"""
def update_group_query(client, input, options \\ []) do
path_ = "/update-group-query"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "resource-groups"}
host = build_host("resource-groups", client)
url = host
|> build_url(path, client)
|> add_query(query, client)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(client, method, url, payload, headers, options, success_status_code)
end
defp perform_request(client, method, url, payload, headers, options, success_status_code) do
case AWS.Client.request(client, method, url, payload, headers, options) do
{:ok, %{status_code: status_code, body: body} = response}
when is_nil(success_status_code) and status_code in [200, 202, 204]
when status_code == success_status_code ->
body = if(body != "", do: decode!(client, body))
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, [], _client) do
url
end
defp add_query(url, query, client) do
querystring = encode!(client, query, :query)
"#{url}?#{querystring}"
end
defp encode!(client, payload, format \\ :json) do
AWS.Client.encode!(client, payload, format)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
| 32.772414 | 191 | 0.679293 |
9eba77f2d63070c143df4f8f6878471c7e7b0ac7 | 409 | exs | Elixir | test/diff_web/views/error_view_test.exs | hexpm/diff | 1a453d3578116af8ede4b7f37d3cc2e3e01dc731 | [
"Apache-2.0"
] | 59 | 2020-01-20T20:32:31.000Z | 2022-02-11T07:25:45.000Z | test/diff_web/views/error_view_test.exs | hexpm/diff | 1a453d3578116af8ede4b7f37d3cc2e3e01dc731 | [
"Apache-2.0"
] | 33 | 2020-01-20T19:38:09.000Z | 2021-11-19T10:08:25.000Z | test/diff_web/views/error_view_test.exs | hexpm/diff | 1a453d3578116af8ede4b7f37d3cc2e3e01dc731 | [
"Apache-2.0"
] | 12 | 2020-01-20T21:11:14.000Z | 2021-05-10T19:56:47.000Z | defmodule DiffWeb.ErrorViewTest do
use DiffWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(DiffWeb.ErrorView, "404.html", []) =~ "Page not found"
end
test "renders 500.html" do
assert render_to_string(DiffWeb.ErrorView, "500.html", []) =~ "Something went wrong"
end
end
| 27.266667 | 88 | 0.726161 |
9ebaaf2adbb9432ac140849a7fb005449e884f75 | 2,261 | ex | Elixir | backend/lib/spades_game/game_player.ex | mreishus/spades | 3e06fa5c2415ff43258ec4c231f8d6c49c683fe0 | [
"MIT"
] | 9 | 2019-10-28T08:48:50.000Z | 2021-03-05T09:44:46.000Z | backend/lib/spades_game/game_player.ex | mreishus/spades | 3e06fa5c2415ff43258ec4c231f8d6c49c683fe0 | [
"MIT"
] | 227 | 2019-10-28T08:52:58.000Z | 2022-02-27T04:31:42.000Z | backend/lib/spades_game/game_player.ex | mreishus/spades | 3e06fa5c2415ff43258ec4c231f8d6c49c683fe0 | [
"MIT"
] | 4 | 2020-04-18T19:38:37.000Z | 2021-08-02T19:43:03.000Z | defmodule SpadesGame.GamePlayer do
@moduledoc """
Represents a player inside a game of spades.
They will have a hand of cards, a bid etc.
"""
alias SpadesGame.{Deck, Card, GamePlayer}
@derive Jason.Encoder
defstruct [:hand, :tricks_won, :bid]
use Accessible
@type t :: %GamePlayer{
hand: Deck.t(),
tricks_won: integer,
bid: nil | integer
}
@doc """
new/0: Create a new player with an empty hand.
"""
@spec new() :: GamePlayer.t()
def new() do
%GamePlayer{
hand: Deck.new_empty(),
tricks_won: 0,
bid: nil
}
end
@doc """
new/1: Create a new player with the hand passed in.
"""
@spec new(Deck.t()) :: GamePlayer.t()
def new(hand) do
%GamePlayer{
hand: hand,
tricks_won: 0,
bid: nil
}
end
@doc """
won_trick/1: Increment the number of tricks won by 1.
"""
@spec won_trick(GamePlayer.t()) :: GamePlayer.t()
def won_trick(player) do
%GamePlayer{player | tricks_won: player.tricks_won + 1}
end
@spec set_bid(GamePlayer.t(), nil | integer) :: GamePlayer.t()
def set_bid(player, bid) when is_nil(bid) or (bid >= 0 and bid <= 13) do
%GamePlayer{player | bid: bid}
end
@doc """
play/1: Have a player move a card from their hand to a trick.
"""
@spec play(GamePlayer.t(), Card.t()) ::
{:ok, GamePlayer.t(), Card.t()} | {:error, GamePlayer.t()}
def play(player, card) do
case player.hand |> Enum.member?(card) do
true -> {:ok, _play(player, card), card}
false -> {:error, player}
end
end
# _play/1: Have a player move a card from their hand to a trick.
# Private. We've already validated they have the card in their hand.
@spec _play(GamePlayer.t(), Card.t()) :: GamePlayer.t()
defp _play(player, card) do
new_hand = player.hand |> Enum.reject(fn x -> x == card end)
%{player | hand: new_hand}
end
@spec has_suit?(GamePlayer.t(), :s | :h | :c | :d) :: boolean
def has_suit?(player, suit) do
player.hand |> Enum.any?(fn card -> card.suit == suit end)
end
def hand_length(player) do
player.hand |> length()
end
def spades_length(player) do
player.hand |> Enum.filter(fn card -> card.suit == :s end) |> length()
end
end
| 25.404494 | 74 | 0.606811 |
9ebac55a8f0b7cdd5a27549a5e07fcbd63518493 | 752 | ex | Elixir | lib/protein/utils.ex | katafrakt/protein-elixir | f5ea6e079904ea0defa40e99a0e5becaa02a5735 | [
"MIT"
] | 9 | 2018-01-16T12:25:58.000Z | 2021-11-24T13:19:35.000Z | lib/protein/utils.ex | katafrakt/protein-elixir | f5ea6e079904ea0defa40e99a0e5becaa02a5735 | [
"MIT"
] | 3 | 2018-09-07T16:09:57.000Z | 2019-02-19T10:37:13.000Z | lib/protein/utils.ex | katafrakt/protein-elixir | f5ea6e079904ea0defa40e99a0e5becaa02a5735 | [
"MIT"
] | 3 | 2018-05-27T08:56:52.000Z | 2021-11-24T13:19:37.000Z | defmodule Protein.Utils do
@moduledoc false
alias Confix
alias Protein.{AMQPAdapter, HTTPAdapter}
def get_config(opts, key, default \\ nil) do
opts
|> Keyword.get(key, default)
|> Confix.parse()
end
def get_config!(opts, key) do
opts
|> Keyword.fetch!(key)
|> Confix.parse()
end
def mocking_enabled? do
Application.get_env(:protein, :mocking_enabled, Mix.env() == :test)
end
def resolve_adapter(:amqp), do: AMQPAdapter
def resolve_adapter(:http), do: HTTPAdapter
def resolve_adapter(adapter_mod), do: adapter_mod
def resolve_adapter_server_mod(adapter_mod) do
:"#{adapter_mod}.Server"
end
def resolve_adapter_connection_mod(adapter_mod) do
:"#{adapter_mod}.Connection"
end
end
| 21.485714 | 71 | 0.700798 |
9ebacebcf75d8526b025b1bb433a0f3a78c38f7c | 3,953 | exs | Elixir | test/changelog/notifier_test.exs | boneskull/changelog.com | 2fa2e356bb0e8fcf038c46a4a947fef98822e37d | [
"MIT"
] | null | null | null | test/changelog/notifier_test.exs | boneskull/changelog.com | 2fa2e356bb0e8fcf038c46a4a947fef98822e37d | [
"MIT"
] | null | null | null | test/changelog/notifier_test.exs | boneskull/changelog.com | 2fa2e356bb0e8fcf038c46a4a947fef98822e37d | [
"MIT"
] | null | null | null | defmodule Changelog.NotifierTest do
use Changelog.DataCase
use Bamboo.Test
import Mock
alias Changelog.{Notifier, Slack}
alias ChangelogWeb.Email
describe "notify with episode item" do
setup_with_mocks([
{Slack.Client, [], [message: fn(_, _) -> true end]}
]) do
:ok
end
test "when episode has no guests" do
episode = insert(:published_episode)
item = episode |> episode_news_item |> insert
Notifier.notify(item)
assert_no_emails_delivered()
assert called Slack.Client.message("#main", :_)
end
test "when episode has guests but none of them have 'thanks' set" do
g1 = insert(:person)
g2 = insert(:person)
episode = insert(:published_episode)
insert(:episode_guest, episode: episode, person: g1, thanks: false)
insert(:episode_guest, episode: episode, person: g2, thanks: false)
item = episode |> episode_news_item |> insert
Notifier.notify(item)
assert_no_emails_delivered()
assert called Slack.Client.message("#main", :_)
end
test "when episode has guests and some of them have 'thanks' set" do
g1 = insert(:person)
g2 = insert(:person)
g3 = insert(:person)
episode = insert(:published_episode)
insert(:episode_guest, episode: episode, person: g1, thanks: false)
insert(:episode_guest, episode: episode, person: g2, thanks: true)
insert(:episode_guest, episode: episode, person: g3, thanks: true)
item = episode |> episode_news_item |> insert
Notifier.notify(item)
assert_delivered_email Email.guest_thanks(g2, episode)
assert_delivered_email Email.guest_thanks(g3, episode)
assert called Slack.Client.message("#main", :_)
end
end
describe "notify with regular item" do
test "when item has no submitter or author" do
item = insert(:news_item)
Notifier.notify(item)
assert_no_emails_delivered()
end
test "when submitter has email notifications enabled" do
person = insert(:person, settings: %{email_on_submitted_news: true})
item = insert(:news_item, submitter: person)
Notifier.notify(item)
assert_delivered_email Email.submitted_news_published(person, item)
end
test "when submitter has email notifications disabled" do
person = insert(:person, settings: %{email_on_submitted_news: false})
item = insert(:news_item, submitter: person)
Notifier.notify(item)
assert_no_emails_delivered()
end
test "when submitter and author are same person, notifications enabled" do
person = insert(:person, settings: %{email_on_submitted_news: true})
item = insert(:news_item, submitter: person, author: person)
Notifier.notify(item)
assert_delivered_email Email.submitted_news_published(person, item)
refute_delivered_email Email.authored_news_published(person, item)
end
test "when author has email notifications enabled" do
person = insert(:person, settings: %{email_on_authored_news: true})
item = insert(:news_item, author: person)
Notifier.notify(item)
assert_delivered_email Email.authored_news_published(person, item)
end
test "when author has email notifications disabled" do
person = insert(:person, settings: %{email_on_authored_news: false})
item = insert(:news_item, author: person)
Notifier.notify(item)
assert_no_emails_delivered()
end
test "when submitter and author both have notifications enabled" do
submitter = insert(:person, settings: %{email_on_submitted_news: true})
author = insert(:person, settings: %{email_on_authored_news: true})
item = insert(:news_item, author: author, submitter: submitter)
Notifier.notify(item)
assert_delivered_email Email.authored_news_published(author, item)
assert_delivered_email Email.submitted_news_published(submitter, item)
end
end
end
| 36.266055 | 78 | 0.698204 |
9ebb1a0736cc7214fe76cba96e8f80c20992d25f | 1,644 | ex | Elixir | lib/ark_client/client.ex | supaiku0/elixir-client | 2d5549ce3a876a18750a36a14ff769427688c5bb | [
"MIT"
] | null | null | null | lib/ark_client/client.ex | supaiku0/elixir-client | 2d5549ce3a876a18750a36a14ff769427688c5bb | [
"MIT"
] | null | null | null | lib/ark_client/client.ex | supaiku0/elixir-client | 2d5549ce3a876a18750a36a14ff769427688c5bb | [
"MIT"
] | null | null | null | defmodule ArkEcosystem.Client do
@moduledoc """
Documentation for ArkEcosystem.Client.
This module generates a Tesla.Client for use in future requests. Feel free
to make your own Tesla Client struct and pass that around.
"""
@doc """
Create a new instance.
## Examples
iex> ArkEcosystem.Client.new(client)
%Tesla.Client{
fun: nil,
post: [],
pre: [
{Tesla.Middleware.BaseUrl, :call, ["http://172.999.999.173:4002"]},
{Tesla.Middleware.Headers, :call,
[
[
{"Content-Type", "application/json"},
{"nethash",
"578e820911f24e039733b45e4882b73e301f813a0d2c31330dafda84534ffa23"},
{"version", "1.1.1"},
{"port", 1}
]
]},
{Tesla.Middleware.JSON, :call, [[]]},
{Tesla.Middleware.Logger, :call, [[log_level: :debug]]}
]
}
"""
@spec new(Map.t) :: Tesla.Client.t
def new(%{
host: host,
version: version
})
when is_bitstring(nethash)
and is_bitstring(host)
and is_bitstring(version) do
headers = [
{"Content-Type", "application/json"},
{"API-Version", version}
]
log_level =
case Application.get_env(:ark_elixir, :env, :prod) do
:dev -> :debug
_ -> :default
end
pre = [
{Tesla.Middleware.BaseUrl, host},
{Tesla.Middleware.Headers, headers},
{Tesla.Middleware.JSON, []},
{ArkEcosystem.Client.Middleware.Logger, [log_level: log_level]},
]
Tesla.build_client(pre)
end
def new(_invalid) do
:error
end
end
| 24.537313 | 84 | 0.562044 |
9ebb200f0f09fb01dc5d155b2bb1ade8295eb8ab | 3,615 | ex | Elixir | samples/client/petstore/elixir/lib/swagger_petstore/api/user.ex | wwadge/swagger-codegen | 777619d4d106b7b387f8ee8469f4ec43f3cdfdc7 | [
"Apache-2.0"
] | 13 | 2018-05-03T21:03:43.000Z | 2021-02-04T14:45:09.000Z | samples/client/petstore/elixir/lib/swagger_petstore/api/user.ex | wwadge/swagger-codegen | 777619d4d106b7b387f8ee8469f4ec43f3cdfdc7 | [
"Apache-2.0"
] | 5 | 2018-12-11T14:58:16.000Z | 2019-07-18T18:25:42.000Z | samples/client/petstore/elixir/lib/swagger_petstore/api/user.ex | wwadge/swagger-codegen | 777619d4d106b7b387f8ee8469f4ec43f3cdfdc7 | [
"Apache-2.0"
] | 11 | 2017-07-07T18:07:15.000Z | 2021-11-10T02:12:04.000Z | defmodule SwaggerPetstore.Api.User do
@moduledoc """
Documentation for SwaggerPetstore.Api.User.
"""
use Tesla
plug Tesla.Middleware.BaseUrl, "http://petstore.swagger.io:80/v2"
plug Tesla.Middleware.JSON
@doc """
Create user
This can only be done by the logged in user.
"""
def create_user(body) do
method = [method: :post]
url = [url: "/user"]
query_params = []
header_params = []
body_params = [body: body]
form_params = []
params = query_params ++ header_params ++ body_params ++ form_params
opts = []
options = method ++ url ++ params ++ opts
request(options)
end
@doc """
Creates list of users with given input array
"""
def create_users_with_array_input(body) do
method = [method: :post]
url = [url: "/user/createWithArray"]
query_params = []
header_params = []
body_params = [body: body]
form_params = []
params = query_params ++ header_params ++ body_params ++ form_params
opts = []
options = method ++ url ++ params ++ opts
request(options)
end
@doc """
Creates list of users with given input array
"""
def create_users_with_list_input(body) do
method = [method: :post]
url = [url: "/user/createWithList"]
query_params = []
header_params = []
body_params = [body: body]
form_params = []
params = query_params ++ header_params ++ body_params ++ form_params
opts = []
options = method ++ url ++ params ++ opts
request(options)
end
@doc """
Delete user
This can only be done by the logged in user.
"""
def delete_user(username) do
method = [method: :delete]
url = [url: "/user/#{username}"]
query_params = []
header_params = []
body_params = []
form_params = []
params = query_params ++ header_params ++ body_params ++ form_params
opts = []
options = method ++ url ++ params ++ opts
request(options)
end
@doc """
Get user by user name
"""
def get_user_by_name(username) do
method = [method: :get]
url = [url: "/user/#{username}"]
query_params = []
header_params = []
body_params = []
form_params = []
params = query_params ++ header_params ++ body_params ++ form_params
opts = []
options = method ++ url ++ params ++ opts
request(options)
end
@doc """
Logs user into the system
"""
def login_user(username, password) do
method = [method: :get]
url = [url: "/user/login"]
query_params = [query: [{:"username", username}, {:"password", password}]]
header_params = []
body_params = []
form_params = []
params = query_params ++ header_params ++ body_params ++ form_params
opts = []
options = method ++ url ++ params ++ opts
request(options)
end
@doc """
Logs out current logged in user session
"""
def logout_user() do
method = [method: :get]
url = [url: "/user/logout"]
query_params = []
header_params = []
body_params = []
form_params = []
params = query_params ++ header_params ++ body_params ++ form_params
opts = []
options = method ++ url ++ params ++ opts
request(options)
end
@doc """
Updated user
This can only be done by the logged in user.
"""
def update_user(username, body) do
method = [method: :put]
url = [url: "/user/#{username}"]
query_params = []
header_params = []
body_params = [body: body]
form_params = []
params = query_params ++ header_params ++ body_params ++ form_params
opts = []
options = method ++ url ++ params ++ opts
request(options)
end
end
| 23.627451 | 78 | 0.609959 |
9ebb5cd969e45a0cd07982f8fbd63392ccd4f48a | 2,985 | ex | Elixir | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p5beta1_product_search_results.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p5beta1_product_search_results.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p5beta1_product_search_results.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p5beta1ProductSearchResults do
@moduledoc """
Results for a product search request.
## Attributes
* `indexTime` (*type:* `DateTime.t`, *default:* `nil`) - Timestamp of the index which provided these results. Products added to the
product set and products removed from the product set after this time are
not reflected in the current results.
* `productGroupedResults` (*type:* `list(GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p5beta1ProductSearchResultsGroupedResult.t)`, *default:* `nil`) - List of results grouped by products detected in the query image. Each entry
corresponds to one bounding polygon in the query image, and contains the
matching products specific to that region. There may be duplicate product
matches in the union of all the per-product results.
* `results` (*type:* `list(GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p5beta1ProductSearchResultsResult.t)`, *default:* `nil`) - List of results, one for each product match.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:indexTime => DateTime.t(),
:productGroupedResults =>
list(
GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p5beta1ProductSearchResultsGroupedResult.t()
),
:results =>
list(
GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p5beta1ProductSearchResultsResult.t()
)
}
field(:indexTime, as: DateTime)
field(
:productGroupedResults,
as: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p5beta1ProductSearchResultsGroupedResult,
type: :list
)
field(
:results,
as: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p5beta1ProductSearchResultsResult,
type: :list
)
end
defimpl Poison.Decoder,
for: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p5beta1ProductSearchResults do
def decode(value, options) do
GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p5beta1ProductSearchResults.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p5beta1ProductSearchResults do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.78481 | 231 | 0.735008 |
9ebb6825a3a9c43f8362c6dfa38abe496e579843 | 1,766 | ex | Elixir | clients/double_click_bid_manager/lib/google_api/double_click_bid_manager/v1/model/upload_status.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/double_click_bid_manager/lib/google_api/double_click_bid_manager/v1/model/upload_status.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/double_click_bid_manager/lib/google_api/double_click_bid_manager/v1/model/upload_status.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.DoubleClickBidManager.V1.Model.UploadStatus do
@moduledoc """
Represents the status of upload.
## Attributes
- errors ([String.t]): Reasons why upload can't be completed. Defaults to: `null`.
- rowStatus ([RowStatus]): Per-row upload status. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:errors => list(any()),
:rowStatus => list(GoogleApi.DoubleClickBidManager.V1.Model.RowStatus.t())
}
field(:errors, type: :list)
field(:rowStatus, as: GoogleApi.DoubleClickBidManager.V1.Model.RowStatus, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.DoubleClickBidManager.V1.Model.UploadStatus do
def decode(value, options) do
GoogleApi.DoubleClickBidManager.V1.Model.UploadStatus.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DoubleClickBidManager.V1.Model.UploadStatus do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.627451 | 88 | 0.742922 |
9ebb7e2c225f0f07833289965c3ab47b24d354d3 | 3,506 | ex | Elixir | clients/apigee/lib/google_api/apigee/v1/model/google_cloud_apigee_v1_developer_app_key.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/apigee/lib/google_api/apigee/v1/model/google_cloud_apigee_v1_developer_app_key.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/apigee/lib/google_api/apigee/v1/model/google_cloud_apigee_v1_developer_app_key.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1DeveloperAppKey do
@moduledoc """
## Attributes
* `apiProducts` (*type:* `list(any())`, *default:* `nil`) - List of API products for which the credential can be used. **Note**: Do not specify the list of API products when creating a consumer key and secret for a developer app. Instead, use the UpdateDeveloperAppKey API to make the association after the consumer key and secret are created.
* `attributes` (*type:* `list(GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1Attribute.t)`, *default:* `nil`) - List of attributes associated with the credential.
* `consumerKey` (*type:* `String.t`, *default:* `nil`) - Consumer key.
* `consumerSecret` (*type:* `String.t`, *default:* `nil`) - Secret key.
* `expiresAt` (*type:* `String.t`, *default:* `nil`) - Time the developer app expires in milliseconds since epoch.
* `expiresInSeconds` (*type:* `String.t`, *default:* `nil`) - Input only. Expiration time, in seconds, for the consumer key. If not set or left to the default value of `-1`, the API key never expires. The expiration time can't be updated after it is set.
* `issuedAt` (*type:* `String.t`, *default:* `nil`) - Time the developer app was created in milliseconds since epoch.
* `scopes` (*type:* `list(String.t)`, *default:* `nil`) - Scopes to apply to the app. The specified scope names must already be defined for the API product that you associate with the app.
* `status` (*type:* `String.t`, *default:* `nil`) - Status of the credential. Valid values include `approved` or `revoked`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:apiProducts => list(any()) | nil,
:attributes => list(GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1Attribute.t()) | nil,
:consumerKey => String.t() | nil,
:consumerSecret => String.t() | nil,
:expiresAt => String.t() | nil,
:expiresInSeconds => String.t() | nil,
:issuedAt => String.t() | nil,
:scopes => list(String.t()) | nil,
:status => String.t() | nil
}
field(:apiProducts, type: :list)
field(:attributes, as: GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1Attribute, type: :list)
field(:consumerKey)
field(:consumerSecret)
field(:expiresAt)
field(:expiresInSeconds)
field(:issuedAt)
field(:scopes, type: :list)
field(:status)
end
defimpl Poison.Decoder, for: GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1DeveloperAppKey do
def decode(value, options) do
GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1DeveloperAppKey.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1DeveloperAppKey do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 49.380282 | 347 | 0.701369 |
9ebb806c7fe823f0e504a6e6f06f8a6711aae33c | 65 | exs | Elixir | phoenix/pfu/test/test_helper.exs | cenezaraujo/unifor_prog_funcional | 08b5d75c535b10b7dca8ac26efc1dba14fe2a4f1 | [
"Apache-2.0"
] | null | null | null | phoenix/pfu/test/test_helper.exs | cenezaraujo/unifor_prog_funcional | 08b5d75c535b10b7dca8ac26efc1dba14fe2a4f1 | [
"Apache-2.0"
] | null | null | null | phoenix/pfu/test/test_helper.exs | cenezaraujo/unifor_prog_funcional | 08b5d75c535b10b7dca8ac26efc1dba14fe2a4f1 | [
"Apache-2.0"
] | null | null | null | ExUnit.start()
Ecto.Adapters.SQL.Sandbox.mode(Pfu.Repo, :manual)
| 21.666667 | 49 | 0.769231 |
9ebbcde879b4c938a52effb4c8cde522dbd99ae8 | 4,968 | exs | Elixir | test/jalka2021_web/controllers/user_settings_controller_test.exs | kriips/jalka2021 | f4d968e20cae116fd4056bff2f937cd036421977 | [
"MIT"
] | null | null | null | test/jalka2021_web/controllers/user_settings_controller_test.exs | kriips/jalka2021 | f4d968e20cae116fd4056bff2f937cd036421977 | [
"MIT"
] | null | null | null | test/jalka2021_web/controllers/user_settings_controller_test.exs | kriips/jalka2021 | f4d968e20cae116fd4056bff2f937cd036421977 | [
"MIT"
] | null | null | null | defmodule Jalka2021Web.UserSettingsControllerTest do
use Jalka2021Web.ConnCase, async: true
alias Jalka2021.Accounts
import Jalka2021.AccountsFixtures
setup :register_and_log_in_user
describe "GET /users/settings" do
test "renders settings page", %{conn: conn} do
conn = get(conn, Routes.user_settings_path(conn, :edit))
response = html_response(conn, 200)
assert response =~ "<h1>Settings</h1>"
end
test "redirects if user is not logged in" do
conn = build_conn()
conn = get(conn, Routes.user_settings_path(conn, :edit))
assert redirected_to(conn) == Routes.user_session_path(conn, :new)
end
end
describe "PUT /users/settings (change password form)" do
test "updates the user password and resets tokens", %{conn: conn, user: user} do
new_password_conn =
put(conn, Routes.user_settings_path(conn, :update), %{
"action" => "update_password",
"current_password" => valid_user_password(),
"user" => %{
"password" => "new valid password",
"password_confirmation" => "new valid password"
}
})
assert redirected_to(new_password_conn) == Routes.user_settings_path(conn, :edit)
assert get_session(new_password_conn, :user_token) != get_session(conn, :user_token)
assert get_flash(new_password_conn, :info) =~ "Password updated successfully"
assert Accounts.get_user_by_email_and_password(user.email, "new valid password")
end
test "does not update password on invalid data", %{conn: conn} do
old_password_conn =
put(conn, Routes.user_settings_path(conn, :update), %{
"action" => "update_password",
"current_password" => "invalid",
"user" => %{
"password" => "too short",
"password_confirmation" => "does not match"
}
})
response = html_response(old_password_conn, 200)
assert response =~ "<h1>Settings</h1>"
assert response =~ "should be at least 12 character(s)"
assert response =~ "does not match password"
assert response =~ "is not valid"
assert get_session(old_password_conn, :user_token) == get_session(conn, :user_token)
end
end
describe "PUT /users/settings (change email form)" do
@tag :capture_log
test "updates the user email", %{conn: conn, user: user} do
conn =
put(conn, Routes.user_settings_path(conn, :update), %{
"action" => "update_email",
"current_password" => valid_user_password(),
"user" => %{"email" => unique_user_email()}
})
assert redirected_to(conn) == Routes.user_settings_path(conn, :edit)
assert get_flash(conn, :info) =~ "A link to confirm your email"
assert Accounts.get_user_by_email(user.email)
end
test "does not update email on invalid data", %{conn: conn} do
conn =
put(conn, Routes.user_settings_path(conn, :update), %{
"action" => "update_email",
"current_password" => "invalid",
"user" => %{"email" => "with spaces"}
})
response = html_response(conn, 200)
assert response =~ "<h1>Settings</h1>"
assert response =~ "must have the @ sign and no spaces"
assert response =~ "is not valid"
end
end
describe "GET /users/settings/confirm_email/:token" do
setup %{user: user} do
email = unique_user_email()
token =
extract_user_token(fn url ->
Accounts.deliver_update_email_instructions(%{user | email: email}, user.email, url)
end)
%{token: token, email: email}
end
test "updates the user email once", %{conn: conn, user: user, token: token, email: email} do
conn = get(conn, Routes.user_settings_path(conn, :confirm_email, token))
assert redirected_to(conn) == Routes.user_settings_path(conn, :edit)
assert get_flash(conn, :info) =~ "Email changed successfully"
refute Accounts.get_user_by_email(user.email)
assert Accounts.get_user_by_email(email)
conn = get(conn, Routes.user_settings_path(conn, :confirm_email, token))
assert redirected_to(conn) == Routes.user_settings_path(conn, :edit)
assert get_flash(conn, :error) =~ "Email change link is invalid or it has expired"
end
test "does not update email with invalid token", %{conn: conn, user: user} do
conn = get(conn, Routes.user_settings_path(conn, :confirm_email, "oops"))
assert redirected_to(conn) == Routes.user_settings_path(conn, :edit)
assert get_flash(conn, :error) =~ "Email change link is invalid or it has expired"
assert Accounts.get_user_by_email(user.email)
end
test "redirects if user is not logged in", %{token: token} do
conn = build_conn()
conn = get(conn, Routes.user_settings_path(conn, :confirm_email, token))
assert redirected_to(conn) == Routes.user_session_path(conn, :new)
end
end
end
| 38.215385 | 96 | 0.651369 |
9ebbd7307a9a224f25440c07eec47246370a492c | 6,237 | ex | Elixir | test/support/adapter_case.ex | rzane/file_store | 6ff9e35043d46e655cb532032f4a77a9cd4a3d14 | [
"MIT"
] | 17 | 2019-10-11T05:50:28.000Z | 2022-01-25T23:28:02.000Z | test/support/adapter_case.ex | rzane/file_store | 6ff9e35043d46e655cb532032f4a77a9cd4a3d14 | [
"MIT"
] | 8 | 2019-10-25T04:19:42.000Z | 2022-01-27T02:32:47.000Z | test/support/adapter_case.ex | rzane/file_store | 6ff9e35043d46e655cb532032f4a77a9cd4a3d14 | [
"MIT"
] | 3 | 2019-10-25T02:30:22.000Z | 2022-01-24T21:24:08.000Z | defmodule FileStore.AdapterCase do
use ExUnit.CaseTemplate
@tmp Path.join(System.tmp_dir!(), "file_store")
setup do
File.rm_rf!(@tmp)
File.mkdir!(@tmp)
{:ok, tmp: @tmp}
end
using do
quote location: :keep do
import FileStore.AdapterCase
describe "write/3" do
test "writes a file", %{store: store} do
assert :ok = FileStore.write(store, "foo", "bar")
assert {:ok, "bar"} = FileStore.read(store, "foo")
end
test "overwrites a file", %{store: store} do
assert :ok = FileStore.write(store, "foo", "bar")
assert {:ok, "bar"} = FileStore.read(store, "foo")
assert :ok = FileStore.write(store, "foo", "baz")
assert {:ok, "baz"} = FileStore.read(store, "foo")
end
end
describe "read/3" do
test "reads a file", %{store: store} do
assert :ok = FileStore.write(store, "foo", "bar")
assert {:ok, "bar"} = FileStore.read(store, "foo")
end
test "errors when file does not exist", %{store: store} do
assert {:error, _} = FileStore.read(store, "does-not-exist")
end
end
describe "upload/3" do
test "uploads a file", %{store: store} do
bar = write("bar.txt", "bar")
assert :ok = FileStore.upload(store, bar, "foo")
assert {:ok, "bar"} = FileStore.read(store, "foo")
end
test "overwrites a file", %{store: store} do
bar = write("bar.txt", "bar")
baz = write("baz.txt", "baz")
assert :ok = FileStore.upload(store, bar, "foo")
assert {:ok, "bar"} = FileStore.read(store, "foo")
assert :ok = FileStore.upload(store, baz, "foo")
assert {:ok, "baz"} = FileStore.read(store, "foo")
end
test "fails when the source file is missing", %{store: store} do
assert {:error, _} = FileStore.upload(store, "doesnotexist.txt", "foo")
end
end
describe "download/3" do
test "downloads a file", %{store: store} do
download = join("download.txt")
assert :ok = FileStore.write(store, "foo", "bar")
assert :ok = FileStore.download(store, "foo", download)
assert File.read!(download) == "bar"
end
end
describe "stat/2" do
test "retrieves file info", %{store: store} do
assert :ok = FileStore.write(store, "foo", "bar")
assert {:ok, stat} = FileStore.stat(store, "foo")
assert stat.key == "foo"
assert stat.size == 3
assert stat.etag == "37b51d194a7513e45b56f6524f2d51f2"
end
test "fails when the file is missing", %{store: store} do
assert {:error, _} = FileStore.stat(store, "completegarbage")
end
end
describe "delete/2" do
test "deletes the file", %{store: store} do
assert :ok = FileStore.write(store, "foo", "bar")
assert :ok = FileStore.delete(store, "foo")
end
test "indicates success for non-existent keys", %{store: store} do
assert :ok = FileStore.delete(store, "non-existent")
assert :ok = FileStore.delete(store, "non/existent")
end
end
describe "delete_all/2" do
test "deletes all files", %{store: store} do
assert :ok = FileStore.write(store, "foo", "")
assert :ok = FileStore.write(store, "bar/buzz", "")
assert :ok = FileStore.delete_all(store)
assert {:error, _} = FileStore.stat(store, "foo")
assert {:error, _} = FileStore.stat(store, "bar/buzz")
end
test "deletes files under prefix", %{store: store} do
assert :ok = FileStore.write(store, "foo", "")
assert :ok = FileStore.write(store, "bar/buzz", "")
assert :ok = FileStore.write(store, "bar/baz", "")
assert :ok = FileStore.delete_all(store, prefix: "bar")
assert {:ok, _} = FileStore.stat(store, "foo")
assert {:error, _} = FileStore.stat(store, "bar/buzz")
assert {:error, _} = FileStore.stat(store, "bar/baz")
end
test "indicates success for non-existent keys", %{store: store} do
assert :ok = FileStore.delete_all(store, prefix: "non-existent")
end
end
describe "get_public_url/2" do
test "returns a URL", %{store: store} do
assert :ok = FileStore.write(store, "foo", "bar")
assert url = FileStore.get_public_url(store, "foo")
assert is_valid_url(url)
end
end
describe "get_signed_url/3" do
test "returns a URL", %{store: store} do
assert :ok = FileStore.write(store, "foo", "bar")
assert {:ok, url} = FileStore.get_signed_url(store, "foo")
assert is_valid_url(url)
end
end
describe "list!/2" do
test "lists keys in the store", %{store: store} do
assert :ok = FileStore.write(store, "foo", "")
assert "foo" in Enum.to_list(FileStore.list!(store))
end
test "lists nested keys in the store", %{store: store} do
assert :ok = FileStore.write(store, "foo/bar", "")
assert "foo/bar" in Enum.to_list(FileStore.list!(store))
end
test "lists keys matching prefix", %{store: store} do
assert :ok = FileStore.write(store, "bar", "")
assert :ok = FileStore.write(store, "foo/bar", "")
keys = Enum.to_list(FileStore.list!(store, prefix: "foo"))
refute "bar" in keys
assert "foo/bar" in keys
end
end
end
end
def join(name) do
Path.join(@tmp, name)
end
def write(name, data) do
path = join(name)
File.write!(path, data)
path
end
def is_valid_url(value) do
case URI.parse(value) do
%URI{scheme: nil} -> false
%URI{host: nil} -> false
%URI{scheme: scheme} -> scheme =~ ~r"^https?$"
end
end
def get_query(url, param) do
url
|> URI.parse()
|> Map.fetch!(:query)
|> URI.decode_query()
|> Map.fetch!(param)
end
def omit_query(url) do
url
|> URI.parse()
|> Map.put(:query, nil)
|> URI.to_string()
end
end
| 31.5 | 81 | 0.55764 |
9ebbec56348219a7773312ebb19ebbe8c78b9c1b | 426 | ex | Elixir | api/web/views/error_helpers.ex | AlexYanai/microblogger | 833320759cddd276bc31dabaec6f0c9e2eabb05a | [
"MIT"
] | null | null | null | api/web/views/error_helpers.ex | AlexYanai/microblogger | 833320759cddd276bc31dabaec6f0c9e2eabb05a | [
"MIT"
] | null | null | null | api/web/views/error_helpers.ex | AlexYanai/microblogger | 833320759cddd276bc31dabaec6f0c9e2eabb05a | [
"MIT"
] | null | null | null | defmodule Microblogger.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
if count = opts[:count] do
Gettext.dngettext(Microblogger.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(Microblogger.Gettext, "errors", msg, opts)
end
end
end
| 25.058824 | 78 | 0.690141 |
9ebc14f3ec227b873f59a6bde9d24ab2ca6fb105 | 4,268 | ex | Elixir | apps/fw/lib/cat_feeder/proximity_worker.ex | wsmoak/cat_feeder | e9157563ff3294fd9fd4c9d13f956cbe1dd718c2 | [
"MIT"
] | 20 | 2016-01-16T17:08:00.000Z | 2021-01-15T15:46:23.000Z | apps/fw/lib/cat_feeder/proximity_worker.ex | wsmoak/cat_feeder | e9157563ff3294fd9fd4c9d13f956cbe1dd718c2 | [
"MIT"
] | null | null | null | apps/fw/lib/cat_feeder/proximity_worker.ex | wsmoak/cat_feeder | e9157563ff3294fd9fd4c9d13f956cbe1dd718c2 | [
"MIT"
] | 5 | 2016-01-09T16:52:44.000Z | 2017-02-09T16:03:25.000Z | defmodule CatFeeder.ProximityWorker do
require Logger
use GenServer
use Timex
@active_hours 10..18 # 10am to 6:59pm
@timezone "America/New_York"
@wait 900000 # 15 min * 60 sec * 1000 ms
# register address
@cmd 0x80
@prox_result_h 0x87
@prox_result_l 0x88
@int_ctrl 0x89
@low_thresh_h 0x8A # register #10
@low_thresh_l 0x8B
@high_thresh_h 0x8C
@high_thresh_l 0x8D
@int_status 0x8E # register #14
# Client
def start_link() do
GenServer.start_link(__MODULE__, [], name: ProximityChecker)
end
# Server Callbacks
def init(_opts) do
pid = Process.whereis( ProximitySensor )
# set bits 0 and 1 to turn on periodic proximity measurements
I2c.write(pid, <<@cmd, 0x03>> )
# set the low threshold
I2c.write(pid, <<@low_thresh_h, 0x00>> )
I2c.write(pid, <<@low_thresh_l, 0x00>> )
# set the high threshold, 2100 is 0x834
I2c.write(pid, <<@high_thresh_h, 0x08 >> )
I2c.write(pid, <<@high_thresh_l, 0x34 >> )
# configure the chip to interrupt when threshold is exceeded
I2c.write(pid, <<@int_ctrl, 0x02 >>) # 0000 0010
int_pid = Process.whereis( InterruptPin )
# tell gpio_rpi that the pin needs to be pulled up by default
GpioRpi.set_mode(int_pid, :up)
# ... and that we want an interrupt when it goes low
GpioRpi.set_int(int_pid, :falling)
# By default, elixir_ale/gpio_rpi will send an initial message about the state of the interrupt pin, and we need to ignore it. See below.
# set the initial state.
{:ok, %{:status => :starting}}
end
def terminate(reason, _state) do
Logger.debug "Received call to terminate for #{reason}"
pid = Process.whereis(ProximitySensor)
# turn off proximity sensing
I2c.write(pid, <<@cmd, 0x00>> )
end
# the official timer ended, so change the state
def handle_info(:time_is_up, state) do
Logger.debug "Time is up! Ready to feed again"
{:noreply, Map.update!(state, :status, fn _x -> :idle end) }
end
def handle_info({:gpio_interrupt, _pin, :falling}, state = %{status: :waiting}) do
time = CatFeederData.Worker.get_last_fed_at
display_time = Timex.format!(time, "{h12}:{m}")
Logger.debug "Interrupted, but still waiting. Last fed at #{display_time}"
clear_interrupt_status
{:noreply, state}
end
def handle_info({:gpio_interrupt, _pin, :falling}, state = %{status: :idle} ) do
time = Timex.now(@timezone)
if time.hour in @active_hours do
Logger.debug "FEED THE CAT!"
CatFeederData.Worker.set_last_fed_at(time)
# turn the motor
pid = Process.whereis( StepperTurner )
Process.send(pid, :bump, [])
# wait before feeding again
Process.send_after(ProximityChecker, :time_is_up, @wait)
clear_interrupt_status
{:noreply, Map.update!(state, :status, fn x -> :waiting end) }
else
Logger.debug "Outside of allowed hours, not feeding"
clear_interrupt_status
{:noreply, state}
end
end
def handle_info({:gpio_interrupt, _pin, :rising}, state = %{status: :starting} ) do
# This is the initial message from the interrupt pin, after which we switch to the :idle state
{:noreply, Map.update!(state, :status, fn _old_state -> :idle end) }
end
def handle_info(msg, state) do
IO.write "in generic handle_info, msg is "
IO.inspect msg
IO.write " ... and state is "
IO.inspect state
{:noreply, state}
end
# Helper Functions
def check_proximity do
pid = Process.whereis(ProximitySensor)
<< val :: 16 >> = I2c.write_read(pid, <<@prox_result_h>>, 2)
Logger.debug "Proximity value #{val}"
val
end
def check_interrupt_status do
pid = Process.whereis(ProximitySensor)
<< val :: 8 >> = I2c.write_read(pid, <<@int_status>>, 1)
Logger.debug "Interrupt Status #{inspect(val, base: :hex)}"
end
def clear_interrupt_status do
pid = Process.whereis(ProximitySensor)
<< val :: 8 >> = I2c.write_read(pid, <<@int_status>>, 1)
# if any of the bits are set, clear them by writing a 1 back to them
if val > 0 do
Logger.debug "Clearing interrupt status, was #{inspect(val, base: :hex)}"
I2c.write(pid, <<@int_status, val >> )
end
end
end
| 30.927536 | 142 | 0.660731 |
9ebc1cfbaac87787990f54e62d1629a3d9853ff0 | 2,132 | exs | Elixir | apps/tracing_2/test/kv_registry_test.exs | WhiteRookPL/elixir-fire-brigade-workshop | 1c6183339fc623842a09f4d10be75bcecf2c37e7 | [
"MIT"
] | 14 | 2017-08-09T14:21:47.000Z | 2022-03-11T04:10:49.000Z | apps/tracing_2/test/kv_registry_test.exs | nicholasjhenry/elixir-fire-brigade-workshop | 1c6183339fc623842a09f4d10be75bcecf2c37e7 | [
"MIT"
] | null | null | null | apps/tracing_2/test/kv_registry_test.exs | nicholasjhenry/elixir-fire-brigade-workshop | 1c6183339fc623842a09f4d10be75bcecf2c37e7 | [
"MIT"
] | 15 | 2017-09-05T15:43:53.000Z | 2020-04-13T16:20:18.000Z | defmodule KV.RegistryTest do
use ExUnit.Case, async: true
setup context do
{:ok, _} = KV.Registry.start_link(context.test)
{:ok, registry: context.test}
end
test "spawns buckets", %{registry: registry} do
assert KV.Registry.lookup(registry, "shopping") == :error
KV.Registry.create(registry, "shopping")
assert {:ok, bucket} = KV.Registry.lookup(registry, "shopping")
KV.Bucket.put(bucket, "milk", 1)
assert KV.Bucket.get(bucket, "milk") == 1
end
test "removes buckets on exit", %{registry: registry} do
KV.Registry.create(registry, "shopping")
{:ok, bucket} = KV.Registry.lookup(registry, "shopping")
Agent.stop(bucket)
# Do a sync to ensure the registry processed the down message
_ = KV.Registry.create(registry, "bogus")
assert KV.Registry.lookup(registry, "shopping") == :error
end
test "removes bucket on crash", %{registry: registry} do
KV.Registry.create(registry, "shopping")
{:ok, bucket} = KV.Registry.lookup(registry, "shopping")
# Stop the bucket with non-normal reason
Agent.stop(bucket, :shutdown)
# Do a sync to ensure the registry processed the down message
_ = KV.Registry.create(registry, "bogus")
assert KV.Registry.lookup(registry, "shopping") == :error
end
test "deletes existing bucket", %{registry: registry} do
KV.Registry.create(registry, "shopping")
assert KV.Registry.delete(registry, "shopping") == :bucket_deleted
# Do a sync to ensure the registry processed the down message
_ = KV.Registry.create(registry, "bogus")
assert KV.Registry.lookup(registry, "shopping") == :error
end
test "complains when delete non-existing bucket", %{registry: registry} do
assert KV.Registry.delete(registry, "shopping") == :no_such_bucket
end
test "lists all buckets", %{registry: registry} do
KV.Registry.create(registry, "foo")
KV.Registry.create(registry, "bar")
KV.Registry.create(registry, "baz")
KV.Registry.create(registry, "qux")
KV.Registry.delete(registry, "bar")
assert KV.Registry.buckets(registry) == [ "baz", "foo", "qux" ]
end
end
| 32.8 | 76 | 0.684334 |
9ebc27a59e93ed48b41e47978fbaa30fc4e6d225 | 4,083 | ex | Elixir | platform/target/configurator/validator.ex | bahanni/custom_rpi4 | ddefa85d30bacaae40151a63a9a0ebbf4ad30ed5 | [
"MIT"
] | 843 | 2016-10-05T23:46:05.000Z | 2022-03-14T04:31:55.000Z | platform/target/configurator/validator.ex | bahanni/custom_rpi4 | ddefa85d30bacaae40151a63a9a0ebbf4ad30ed5 | [
"MIT"
] | 455 | 2016-10-15T08:49:16.000Z | 2022-03-15T12:23:04.000Z | platform/target/configurator/validator.ex | bahanni/custom_rpi4 | ddefa85d30bacaae40151a63a9a0ebbf4ad30ed5 | [
"MIT"
] | 261 | 2016-10-10T04:37:06.000Z | 2022-03-13T21:07:38.000Z | defmodule FarmbotOS.Platform.Target.Configurator.Validator do
@moduledoc """
VintageNet.Technology that handles turning Farmbot's internal
network representation into either a VintageNetEthernet
or VintageNetWiFi RawConfig.
"""
@behaviour VintageNet.Technology
@impl VintageNet.Technology
def normalize(
%{
network_type: _,
ssid: _,
security: _,
psk: _,
identity: _,
password: _,
domain: _,
name_servers: _,
ipv4_method: _,
ipv4_address: _,
ipv4_gateway: _,
ipv4_subnet_mask: _,
regulatory_domain: _
} = config
) do
config
end
def normalize(_) do
raise "Could not normalize farmbot network config"
end
@impl VintageNet.Technology
def to_raw_config(ifname, %{network_type: type} = config, opts) do
case type do
"wired" -> to_wired_raw_config(ifname, config, opts)
"wireless" -> to_wireless_raw_config(ifname, config, opts)
end
end
def to_wired_raw_config(ifname, config, opts) do
config = %{
type: VintageNetEthernet,
ipv4: to_ipv4(config)
}
vintage_ethernet(ifname, config, opts)
end
def to_wireless_raw_config(ifname, config, opts) do
config = %{
type: VintageNetWiFi,
ipv4: to_ipv4(config),
vintage_net_wifi: to_vintage_net_wifi(config)
}
vintage_wifi(ifname, config, opts)
end
@impl VintageNet.Technology
def check_system(_opts) do
:ok
end
@impl true
def ioctl(_ifname, _ioctl, _args) do
{:error, :unsupported}
end
defp to_ipv4(%{
ipv4_method: "static",
name_servers: name_servers,
domain: domain,
ipv4_address: ipv4_address,
ipv4_gateway: ipv4_gateway,
ipv4_subnet_mask: ipv4_subnet_mask
}) do
%{
method: :static,
address: ipv4_address,
netmask: ipv4_subnet_mask,
gateway: ipv4_gateway,
name_servers: name_servers,
domain: domain
}
end
defp to_ipv4(%{ipv4_method: "dhcp"}) do
%{method: :dhcp}
end
defp to_vintage_net_wifi(%{
security: "NONE",
ssid: ssid,
regulatory_domain: reg_domain
}) do
%{
networks: [
%{
key_mgmt: :none,
ssid: ssid
}
],
scan_ssid: 1,
bgscan: :simple,
regulatory_domain: reg_domain
}
end
defp to_vintage_net_wifi(%{
security: "WPA-PSK",
ssid: ssid,
psk: psk,
regulatory_domain: reg_domain
}) do
%{
networks: [
%{
ssid: ssid,
psk: psk,
key_mgmt: :wpa_psk
}
],
scan_ssid: 1,
bgscan: :simple,
regulatory_domain: reg_domain
}
end
defp to_vintage_net_wifi(%{
security: "WPA2-PSK",
ssid: ssid,
psk: psk,
regulatory_domain: reg_domain
}) do
%{
networks: [
%{
ssid: ssid,
key_mgmt: :wpa_psk,
psk: psk
}
],
scan_ssid: 1,
bgscan: :simple,
regulatory_domain: reg_domain
}
end
defp to_vintage_net_wifi(%{
security: "WPA-EAP",
ssid: ssid,
identity: id,
password: pw,
regulatory_domain: reg_domain
}) do
%{
networks: [
%{
ssid: ssid,
key_mgmt: :wpa_eap,
pairwise: "CCMP TKIP",
group: "CCMP TKIP",
eap: "PEAP",
phase1: "peapver=auto",
phase2: "MSCHAPV2",
identity: id,
password: pw
}
],
scan_ssid: 1,
bgscan: :simple,
regulatory_domain: reg_domain
}
end
defp vintage_ethernet(ifname, config, opts) do
config = VintageNetEthernet.normalize(config)
VintageNetEthernet.to_raw_config(ifname, config, opts)
end
defp vintage_wifi(ifname, config, opts) do
config = VintageNetWiFi.normalize(config)
VintageNetWiFi.to_raw_config(ifname, config, opts)
end
end
| 21.603175 | 68 | 0.571639 |
9ebc6086450ad37c891b93bf75a58d8d0bf0f55e | 1,356 | ex | Elixir | test/support/data_case.ex | lukkor/exlog | f2812d25e7712c41db5a9bf5bf3977255916c8e1 | [
"MIT"
] | null | null | null | test/support/data_case.ex | lukkor/exlog | f2812d25e7712c41db5a9bf5bf3977255916c8e1 | [
"MIT"
] | null | null | null | test/support/data_case.ex | lukkor/exlog | f2812d25e7712c41db5a9bf5bf3977255916c8e1 | [
"MIT"
] | null | null | null | defmodule Exlog.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias Exlog.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Exlog.DataCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Exlog.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Exlog.Repo, {:shared, self()})
end
:ok
end
@doc """
A helper that transform changeset errors to a map of messages.
changeset = Accounts.create_user(%{password: "short"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Enum.reduce(opts, message, fn {key, value}, acc ->
String.replace(acc, "%{#{key}}", to_string(value))
end)
end)
end
end
| 25.111111 | 74 | 0.677729 |
9ebc7985f85fb4a1c72b59ecf722a7a069b1310f | 3,968 | ex | Elixir | lib/livebook_web/live/session_live/persistence_component.ex | danhuynhdev/livebook | d20d4f6bf123d58e4666c064027b55e3b300702f | [
"Apache-2.0"
] | null | null | null | lib/livebook_web/live/session_live/persistence_component.ex | danhuynhdev/livebook | d20d4f6bf123d58e4666c064027b55e3b300702f | [
"Apache-2.0"
] | null | null | null | lib/livebook_web/live/session_live/persistence_component.ex | danhuynhdev/livebook | d20d4f6bf123d58e4666c064027b55e3b300702f | [
"Apache-2.0"
] | null | null | null | defmodule LivebookWeb.SessionLive.PersistenceComponent do
use LivebookWeb, :live_component
alias Livebook.{Session, SessionSupervisor, LiveMarkdown}
@impl true
def mount(socket) do
session_summaries = SessionSupervisor.get_session_summaries()
running_paths = Enum.map(session_summaries, & &1.path)
{:ok, assign(socket, running_paths: running_paths)}
end
@impl true
def render(assigns) do
~H"""
<div class="p-6 pb-4 flex flex-col space-y-3">
<h3 class="text-2xl font-semibold text-gray-800">
File
</h3>
<div class="w-full flex-col space-y-5">
<p class="text-gray-700">
Specify where the notebook should be automatically persisted.
</p>
<div class="flex space-x-4">
<.choice_button
active={@path != nil}
phx-click="set_persistence_type"
phx-value-type="file"
phx-target={@myself}>
Save to file
</.choice_button>
<.choice_button
active={@path == nil}
phx-click="set_persistence_type"
phx-value-type="memory"
phx-target={@myself}>
Memory only
</.choice_button>
</div>
<%= if @path != nil do %>
<div class="h-full h-52">
<%= live_component LivebookWeb.PathSelectComponent,
id: "path_select",
path: @path,
extnames: [LiveMarkdown.extension()],
running_paths: @running_paths,
phx_target: @myself,
phx_submit: if(disabled?(@path, @current_path, @running_paths), do: nil, else: "save") %>
</div>
<% end %>
<div class="flex flex-col space-y-2">
<%= if @path != nil do %>
<div class="text-gray-500 text-sm">
File: <%= normalize_path(@path) %>
</div>
<% end %>
<div>
<button class="button button-blue mt-2"
phx-click="save"
phx-target={@myself}
disabled={disabled?(@path, @current_path, @running_paths)}>
Save
</button>
</div>
</div>
</div>
</div>
"""
end
@impl true
def handle_event("set_persistence_type", %{"type" => type}, socket) do
path =
case type do
"file" -> socket.assigns.current_path || default_path()
"memory" -> nil
end
{:noreply, assign(socket, path: path)}
end
def handle_event("set_path", %{"path" => path}, socket) do
{:noreply, assign(socket, path: path)}
end
def handle_event("save", %{}, socket) do
path = normalize_path(socket.assigns.path)
Session.set_path(socket.assigns.session_id, path)
Session.save_sync(socket.assigns.session_id)
running_paths =
if path do
[path | socket.assigns.running_paths]
else
List.delete(socket.assigns.running_paths, path)
end
# After saving the file reload the directory contents,
# so that the new file gets shown.
send_update(LivebookWeb.PathSelectComponent,
id: "path_select",
running_paths: running_paths,
force_reload: true
)
{:noreply, assign(socket, running_paths: running_paths)}
end
defp default_path() do
Livebook.Config.root_path() |> Path.join("notebook")
end
defp path_savable?(nil, _running_paths), do: true
defp path_savable?(path, running_paths) do
if File.exists?(path) do
File.regular?(path) and path not in running_paths
else
true
end
end
defp normalize_path(nil), do: nil
defp normalize_path(path) do
if String.ends_with?(path, LiveMarkdown.extension()) do
path
else
path <> LiveMarkdown.extension()
end
end
defp disabled?(path, current_path, running_paths) do
not path_savable?(normalize_path(path), running_paths) or normalize_path(path) == current_path
end
end
| 28.963504 | 107 | 0.586946 |
9ebc7ba01fabc568ee3f0730e88600bb1e22bef9 | 82 | ex | Elixir | lib/ecto_tenancy_enforcer/tenancy_violation.ex | sb8244/ecto_tenancy_enforcer | 7dac523d0560ec4a57bcf3800b788d312d70d6e6 | [
"MIT"
] | 36 | 2019-12-30T23:02:59.000Z | 2022-03-26T14:38:41.000Z | lib/ecto_tenancy_enforcer/tenancy_violation.ex | sb8244/ecto_tenancy_enforcer | 7dac523d0560ec4a57bcf3800b788d312d70d6e6 | [
"MIT"
] | 1 | 2021-01-13T05:01:04.000Z | 2021-01-13T05:01:04.000Z | lib/ecto_tenancy_enforcer/tenancy_violation.ex | sb8244/ecto_tenancy_enforcer | 7dac523d0560ec4a57bcf3800b788d312d70d6e6 | [
"MIT"
] | 1 | 2021-04-25T16:50:16.000Z | 2021-04-25T16:50:16.000Z | defmodule EctoTenancyEnforcer.TenancyViolation do
defexception message: nil
end
| 20.5 | 49 | 0.865854 |
9ebc92ff92a23e0ab41b6bc2fdabcb009477a94e | 583 | exs | Elixir | exercises/practice/all-your-base/mix.exs | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 343 | 2017-06-22T16:28:28.000Z | 2022-03-25T21:33:32.000Z | exercises/practice/all-your-base/mix.exs | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 583 | 2017-06-19T10:48:40.000Z | 2022-03-28T21:43:12.000Z | exercises/practice/all-your-base/mix.exs | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 228 | 2017-07-05T07:09:32.000Z | 2022-03-27T08:59:08.000Z | defmodule AllYourBase.MixProject do
use Mix.Project
def project do
[
app: :all_your_base,
version: "0.1.0",
# elixir: "~> 1.8",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
]
end
end
| 20.103448 | 87 | 0.581475 |
9ebc9cba89aaf2785c3e8a10c564ddb91b4affd9 | 2,389 | exs | Elixir | lib/logger/test/logger/error_handler_test.exs | chulkilee/elixir | 699231dcad52916a76f38856cbd7cf7c7bdadc51 | [
"Apache-2.0"
] | 1 | 2021-04-28T21:35:01.000Z | 2021-04-28T21:35:01.000Z | lib/logger/test/logger/error_handler_test.exs | chulkilee/elixir | 699231dcad52916a76f38856cbd7cf7c7bdadc51 | [
"Apache-2.0"
] | null | null | null | lib/logger/test/logger/error_handler_test.exs | chulkilee/elixir | 699231dcad52916a76f38856cbd7cf7c7bdadc51 | [
"Apache-2.0"
] | 8 | 2018-02-20T18:30:53.000Z | 2019-06-18T14:23:31.000Z | defmodule Logger.ErrorHandlerTest do
use Logger.Case
test "survives after crashes" do
expected_result =
"[error] :gen_event handler Logger.ErrorHandler installed at :error_logger\n" <>
"** (exit) an exception was raised:"
assert capture_log(fn ->
:error_logger.info_msg("~p~n", [])
wait_for_handler(:error_logger, Logger.ErrorHandler)
end) =~ expected_result
assert error_log(:info_msg, "~p~n", [:hello]) =~ msg("[info] :hello")
end
test "survives after Logger exit" do
Process.whereis(Logger) |> Process.exit(:kill)
wait_for_logger()
wait_for_handler(:error_logger, Logger.ErrorHandler)
assert error_log(:info_msg, "~p~n", [:hello]) =~ msg("[info] :hello")
end
test "formats error_logger info message" do
assert error_log(:info_msg, "hello", []) =~ msg("[info] hello")
assert error_log(:info_msg, "~p~n", [:hello]) =~ msg("[info] :hello")
end
test "formats error_logger info report" do
assert error_log(:info_report, "hello") =~ msg("[info] \"hello\"")
assert error_log(:info_report, :hello) =~ msg("[info] :hello")
assert error_log(:info_report, :special, :hello) == ""
end
test "formats error_logger error message" do
assert error_log(:error_msg, "hello", []) =~ msg("[error] hello")
assert error_log(:error_msg, "~p~n", [:hello]) =~ msg("[error] :hello")
end
test "formats error_logger error report" do
assert error_log(:error_report, "hello") =~ msg("[error] \"hello\"")
assert error_log(:error_report, :hello) =~ msg("[error] :hello")
assert error_log(:error_report, :special, :hello) == ""
end
test "formats error_logger warning message" do
assert error_log(:warning_msg, "hello", []) =~ msg("[warn] hello")
assert error_log(:warning_msg, "~p~n", [:hello]) =~ msg("[warn] :hello")
end
test "formats error_logger warning report" do
assert error_log(:warning_report, "hello") =~ msg("[warn] \"hello\"")
assert error_log(:warning_report, :hello) =~ msg("[warn] :hello")
assert error_log(:warning_report, :special, :hello) == ""
end
defp error_log(fun, format) do
do_error_log(fun, [format])
end
defp error_log(fun, format, args) do
do_error_log(fun, [format, args])
end
defp do_error_log(fun, args) do
capture_log(fn -> apply(:error_logger, fun, args) end)
end
end
| 34.623188 | 86 | 0.648807 |
9ebca3207eebc8e86be317c636758a1f89044f89 | 1,209 | ex | Elixir | lib/ecto/adapters/riak/validators.ex | TanYewWei/ecto | 916c6467d5f7368fa10ecd7cfcfd2d4a9924a282 | [
"Apache-2.0"
] | 1 | 2015-08-27T13:17:10.000Z | 2015-08-27T13:17:10.000Z | lib/ecto/adapters/riak/validators.ex | TanYewWei/ecto | 916c6467d5f7368fa10ecd7cfcfd2d4a9924a282 | [
"Apache-2.0"
] | null | null | null | lib/ecto/adapters/riak/validators.ex | TanYewWei/ecto | 916c6467d5f7368fa10ecd7cfcfd2d4a9924a282 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Adapters.Riak.Validators do
defmacro riak_validate(x) do
quote do
riak_validate(unquote(x), [])
end
end
defmacro riak_validate(x, fields) do
quote do
validate unquote(x),
[ { :primary_key, unquote(__MODULE__).validate_is_binary },
{ :riak_version, unquote(__MODULE__).validate_is_integer },
{ :riak_vclock, unquote(__MODULE__).maybe_validate_is_binary },
{ :riak_context, unquote(__MODULE__).validate_is_list },
unquote_splicing(fields) ]
end
end
def maybe_validate_is_binary(attr, value, opts \\ []) do
if nil?(value) do
[]
else
validate_is_binary(attr, value, opts)
end
end
def validate_is_binary(attr, value, opts \\ []) do
if is_binary(value) do
[]
else
[{ attr, opts[:message] || "is not a string" }]
end
end
def validate_is_integer(attr, value, opts \\ []) do
if is_integer(value) do
[]
else
[{ attr, opts[:message] || "is not an integer" }]
end
end
def validate_is_list(attr, value, opts \\ []) do
if is_list(value) do
[]
else
[{ attr, opts[:message] || "is not a list" }]
end
end
end | 23.25 | 73 | 0.608768 |
9ebcab821572d11d9a79a573ca70d380a233f448 | 675 | exs | Elixir | mix.exs | timdeputter/Rendezvous | 52e82484c4f4e5249d52b35d1c713ef505b4f4c0 | [
"MIT"
] | 9 | 2015-09-30T09:34:09.000Z | 2022-01-28T08:27:53.000Z | mix.exs | timdeputter/Rendezvous | 52e82484c4f4e5249d52b35d1c713ef505b4f4c0 | [
"MIT"
] | 1 | 2015-04-20T17:38:50.000Z | 2015-04-20T17:38:50.000Z | mix.exs | timdeputter/Rendezvous | 52e82484c4f4e5249d52b35d1c713ef505b4f4c0 | [
"MIT"
] | 2 | 2015-10-07T03:19:11.000Z | 2018-01-04T15:49:27.000Z | defmodule Rendezvous.Mixfile do
use Mix.Project
def project do
[app: :rendezvous,
version: "0.0.1",
description: "Implementation of the Rendezvous or Highest Random Weight (HRW) hashing algorithm",
elixir: "~> 1.0",
package: package,
deps: deps,
test_coverage: [tool: ExCoveralls]
]
end
defp deps do
[{:excoveralls, "~> 0.3", only: [:dev, :test]},{:fitex, "~> 0.0.1"}]
end
defp package do
[files: ["lib", "mix.exs", "README*", "readme*", "LICENSE*", "license*"],
contributors: ["Tim de Putter"],
licenses: ["The MIT License"],
links: %{"GitHub" => "https://github.com/Puddah/Rendezvous"}]
end
end
| 25.961538 | 102 | 0.601481 |
9ebce393b68dcc1da18a5de90fdf88536b5ec837 | 460 | ex | Elixir | lib/spaceapi.ex | geistesk/spaceapi | 7506dfa9f8f95abef87c65b9dd61014a56fdc125 | [
"MIT"
] | null | null | null | lib/spaceapi.ex | geistesk/spaceapi | 7506dfa9f8f95abef87c65b9dd61014a56fdc125 | [
"MIT"
] | null | null | null | lib/spaceapi.ex | geistesk/spaceapi | 7506dfa9f8f95abef87c65b9dd61014a56fdc125 | [
"MIT"
] | null | null | null | defmodule SpaceApi do
alias SpaceApi.Space
import SpaceApi.JsonParser, only: [parse_json: 2]
@moduledoc """
This module provides access to the _from_string_-function.
"""
@doc """
A given string (should contain a valid Space API-JSON) will be converted
into a Space-structure.
"""
@spec from_string(str: String, space: Space) :: Space
def from_string(str, space \\ %Space{}) do
Poison.decode!(str)
|> parse_json(space)
end
end
| 25.555556 | 74 | 0.693478 |
9ebce5b36870bf5dac40862e6795be52803babff | 3,598 | ex | Elixir | clients/you_tube/lib/google_api/you_tube/v3/model/activity_snippet.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/activity_snippet.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/activity_snippet.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.YouTube.V3.Model.ActivitySnippet do
@moduledoc """
Basic details about an activity, including title, description, thumbnails,
activity type and group.
Next ID: 12
## Attributes
* `channelId` (*type:* `String.t`, *default:* `nil`) - The ID that YouTube uses to uniquely identify the channel associated
with the activity.
* `channelTitle` (*type:* `String.t`, *default:* `nil`) - Channel title for the channel responsible for this activity
* `description` (*type:* `String.t`, *default:* `nil`) - The description of the resource primarily associated with the activity.
@mutable youtube.activities.insert
* `groupId` (*type:* `String.t`, *default:* `nil`) - The group ID associated with the activity. A group ID identifies user
events that are associated with the same user and resource. For example,
if a user rates a video and marks the same video as a favorite, the
entries for those events would have the same group ID in the user's
activity feed. In your user interface, you can avoid repetition by
grouping events with the same <code>groupId</code> value.
* `publishedAt` (*type:* `String.t`, *default:* `nil`) - The date and time that the video was uploaded. The value is specified
in <a href="//www.w3.org/TR/NOTE-datetime">ISO 8601</a>
format.
* `thumbnails` (*type:* `GoogleApi.YouTube.V3.Model.ThumbnailDetails.t`, *default:* `nil`) - A map of thumbnail images associated with the resource that is primarily
associated with the activity. For each object in the map, the key is the
name of the thumbnail image, and the value is an object that contains
other information about the thumbnail.
* `title` (*type:* `String.t`, *default:* `nil`) - The title of the resource primarily associated with the activity.
* `type` (*type:* `String.t`, *default:* `nil`) - The type of activity that the resource describes.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:channelId => String.t(),
:channelTitle => String.t(),
:description => String.t(),
:groupId => String.t(),
:publishedAt => String.t(),
:thumbnails => GoogleApi.YouTube.V3.Model.ThumbnailDetails.t(),
:title => String.t(),
:type => String.t()
}
field(:channelId)
field(:channelTitle)
field(:description)
field(:groupId)
field(:publishedAt)
field(:thumbnails, as: GoogleApi.YouTube.V3.Model.ThumbnailDetails)
field(:title)
field(:type)
end
defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.ActivitySnippet do
def decode(value, options) do
GoogleApi.YouTube.V3.Model.ActivitySnippet.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.YouTube.V3.Model.ActivitySnippet do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 43.878049 | 169 | 0.699277 |
9ebd016927eb42d9df1b3c369b6d30f477ee369e | 2,485 | exs | Elixir | mix.exs | dstockdale/timex | 024f8392e9debf3b8d40d1977b6a24182079e546 | [
"MIT"
] | null | null | null | mix.exs | dstockdale/timex | 024f8392e9debf3b8d40d1977b6a24182079e546 | [
"MIT"
] | null | null | null | mix.exs | dstockdale/timex | 024f8392e9debf3b8d40d1977b6a24182079e546 | [
"MIT"
] | null | null | null | defmodule Timex.Mixfile do
use Mix.Project
@version "3.6.2"
def project do
[
app: :timex,
version: @version,
elixir: "~> 1.6",
description: description(),
package: package(),
deps: deps(),
docs: docs(),
compilers: [:gettext] ++ Mix.compilers(),
test_coverage: [tool: ExCoveralls],
elixirc_paths: elixirc_paths(Mix.env()),
preferred_cli_env: [
"hex.publish": :docs,
docs: :docs,
coveralls: :test,
"coveralls.html": :test,
"coveralls.detail": :test,
"coveralls.json": :test,
"coveralls.post": :test
]
]
end
def application do
[
applications: [:logger, :tzdata, :gettext, :combine],
env: [local_timezone: nil, default_locale: "en"],
mod: {Timex, []}
]
end
defp description do
"""
Timex is a rich, comprehensive Date/Time library for Elixir projects, with full timezone support via the :tzdata package.
If you need to manipulate dates, times, datetimes, timestamps, etc., then Timex is for you!
"""
end
defp package do
[
files: ["lib", "priv", "mix.exs", "README.md", "LICENSE.md"],
maintainers: ["Paul Schoenfelder", "Chris Hildebrand"],
licenses: ["MIT"],
links: %{
Changelog: "https://github.com/bitwalker/timex/blob/master/CHANGELOG.md",
GitHub: "https://github.com/bitwalker/timex"
}
]
end
def deps do
[
{:tzdata, "~> 0.1.8 or ~> 0.5 or ~> 1.0.0"},
{:combine, "~> 0.10"},
{:gettext, "~> 0.10"},
{:ex_doc, "~> 0.13", only: [:docs]},
{:benchfella, "~> 0.3", only: [:bench]},
{:dialyxir, "~> 0.5", only: [:dev]},
{:excoveralls, "~> 0.4", only: [:test]},
{:stream_data, "~> 0.4", only: [:test]}
]
end
defp docs do
[
main: "getting-started",
formatter_opts: [gfm: true],
source_ref: @version,
source_url: "https://github.com/bitwalker/timex",
extras: [
"docs/Getting Started.md",
"CHANGELOG.md",
"docs/Basic Usage.md",
"docs/Erlang Interop.md",
"docs/Formatting.md",
"docs/Parsing.md",
"docs/FAQ.md",
"docs/Using with Ecto.md",
"docs/Custom Parsers.md",
"docs/Custom Formatters.md"
]
]
end
# Specifies which paths to compile per environment
defp elixirc_paths(:test), do: ["lib", "test/helpers"]
defp elixirc_paths(_), do: ["lib"]
end
| 26.157895 | 125 | 0.552515 |
9ebd19911d0478ee6672f0c31a9ed284db56a091 | 317 | ex | Elixir | server/lib/freedom_account_web/schema/user_types.ex | randycoulman/freedomAccount | c1c51a765052aa318ad3a504a396e8d07a770195 | [
"MIT"
] | 6 | 2019-04-03T19:16:01.000Z | 2020-08-10T09:38:24.000Z | server/lib/freedom_account_web/schema/user_types.ex | randycoulman/freedomAccount | c1c51a765052aa318ad3a504a396e8d07a770195 | [
"MIT"
] | 32 | 2019-03-19T02:45:45.000Z | 2021-12-05T06:58:07.000Z | server/lib/freedom_account_web/schema/user_types.ex | randycoulman/freedomAccount | c1c51a765052aa318ad3a504a396e8d07a770195 | [
"MIT"
] | null | null | null | defmodule FreedomAccountWeb.Schema.UserTypes do
@moduledoc """
GraphQL type definitions for users.
"""
use Absinthe.Schema.Notation
@desc "A user"
object :user do
@desc "The user's unique ID"
field :id, non_null(:id)
@desc "The name of the user"
field :name, non_null(:string)
end
end
| 19.8125 | 47 | 0.675079 |
9ebd21ab8e5bdb98e9884804860cf027607ed1bc | 718 | exs | Elixir | example/mix.exs | parody/chronik | ec2dbc5b415433d8732575db6ce24b02ebaf5da3 | [
"MIT"
] | 28 | 2017-09-12T13:54:17.000Z | 2021-07-20T22:04:28.000Z | example/mix.exs | parody/chronik | ec2dbc5b415433d8732575db6ce24b02ebaf5da3 | [
"MIT"
] | 18 | 2017-09-22T14:06:00.000Z | 2018-06-09T14:18:35.000Z | example/mix.exs | parody/chronik | ec2dbc5b415433d8732575db6ce24b02ebaf5da3 | [
"MIT"
] | 2 | 2018-05-11T09:36:50.000Z | 2019-09-23T18:29:58.000Z | defmodule Example.Mixfile do
use Mix.Project
def project do
[
app: :example,
version: "0.1.0",
elixir: "~> 1.5-rc",
start_permanent: Mix.env == :prod,
dialyzer: dialyzer(),
deps: deps()
]
end
def application do
[
extra_applications: [:logger],
mod: {Example.Application, []}
]
end
defp dialyzer do
[
flags: ["-Wunmatched_returns",
:error_handling,
:race_conditions,
:underspecs]
]
end
defp deps do
[
{:dialyxir, "> 0.0.0", only: :dev},
{:chronik, path: "../"},
{:excoveralls, "> 0.0.0", only: :test},
{:credo, "> 0.0.0", only: :dev}
]
end
end
| 17.512195 | 45 | 0.497214 |
9ebd66e84ebaeda70420500e33ff7baabc420f56 | 372 | ex | Elixir | test/support/biblio/book.ex | jan-sti/gim | 1b8be6c2163577f375825170cc9b01674e59b646 | [
"ECL-2.0",
"Apache-2.0"
] | 4 | 2020-01-21T09:15:24.000Z | 2021-02-04T21:21:56.000Z | test/support/biblio/book.ex | jan-sti/gim | 1b8be6c2163577f375825170cc9b01674e59b646 | [
"ECL-2.0",
"Apache-2.0"
] | 2 | 2020-04-06T05:20:09.000Z | 2020-06-09T09:56:20.000Z | test/support/biblio/book.ex | jan-sti/gim | 1b8be6c2163577f375825170cc9b01674e59b646 | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2020-04-22T08:44:35.000Z | 2020-04-22T08:44:35.000Z | defmodule GimTest.Biblio.Book do
@moduledoc false
use Gim.Schema
alias GimTest.Biblio.Author
alias GimTest.Biblio.Publisher
schema do
property(:title, index: :unique)
property(:body)
has_edges(:similar_to, __MODULE__)
has_edge(:authored_by, Author, reflect: :author_of)
has_edges(:published_by, Publisher, reflect: :publisher_of)
end
end
| 23.25 | 63 | 0.736559 |
9ebd81033885bb7c8022e5746cf9477b52719ad2 | 934 | ex | Elixir | apps/blockchain_web/lib/blockchain_web/schema.ex | sandhose/elixir-blockchain | 4b5c91816ca0710524c352b57fcf2bb37c64c728 | [
"MIT"
] | null | null | null | apps/blockchain_web/lib/blockchain_web/schema.ex | sandhose/elixir-blockchain | 4b5c91816ca0710524c352b57fcf2bb37c64c728 | [
"MIT"
] | null | null | null | apps/blockchain_web/lib/blockchain_web/schema.ex | sandhose/elixir-blockchain | 4b5c91816ca0710524c352b57fcf2bb37c64c728 | [
"MIT"
] | null | null | null | defmodule BlockchainWeb.Schema do
use Absinthe.Schema
use Absinthe.Relay.Schema, :modern
import_types(BlockchainWeb.Schema.ContentTypes)
alias Blockchain.{Chain, Block, Transaction}
alias BlockchainWeb.Resolvers
node interface do
resolve_type(fn
%Block{}, _ -> :block
%Transaction{}, _ -> :transaction
_, _ -> nil
end)
end
query do
node field do
resolve(fn
%{type: :block, id: id}, _ ->
case Chain.lookup(BlockchainWeb.Application.chain(), id) do
nil -> :error
block -> {:ok, block}
end
%{type: :transaction, id: id}, _ ->
case Chain.find_tx(BlockchainWeb.Application.chain(), id) do
nil -> :error
{tx, _} -> {:ok, tx}
end
end)
end
connection field(:blocks, node_type: :block, paginate: :forward) do
resolve(&Resolvers.Blocks.list/3)
end
end
end
| 23.35 | 71 | 0.586724 |
9ebd97f083de6d9adfc864c2518bf2e01c539cd3 | 260 | ex | Elixir | lib/hl7/2.5/segments/dsp.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.5/segments/dsp.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.5/segments/dsp.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | defmodule HL7.V2_5.Segments.DSP do
@moduledoc false
require Logger
use HL7.Segment,
fields: [
segment: nil,
set_id_dsp: nil,
display_level: nil,
data_line: nil,
logical_break_point: nil,
result_id: nil
]
end
| 16.25 | 34 | 0.626923 |
9ebda9bdc4d673006f4568c06ef236173f890b41 | 93 | ex | Elixir | code/basics/comments.ex | kjwenger/a-is-like-b | 9675b0dfe3a56bc0961a679fa6fdc3ef48297396 | [
"MIT"
] | 1 | 2018-01-26T19:00:59.000Z | 2018-01-26T19:00:59.000Z | code/basics/comments.ex | kjwenger/a-is-like-b | 9675b0dfe3a56bc0961a679fa6fdc3ef48297396 | [
"MIT"
] | null | null | null | code/basics/comments.ex | kjwenger/a-is-like-b | 9675b0dfe3a56bc0961a679fa6fdc3ef48297396 | [
"MIT"
] | null | null | null | # single line
@moduledoc """
module documentation lines
"""
@doc """
documentation lines
"""
| 11.625 | 26 | 0.688172 |
9ebdee4fde2cf0f09bbfe61d37e047b01c85d79a | 4,005 | exs | Elixir | test/vapor/provider/dotenv_test.exs | autodidaddict/vapor | 319853bfce86c9b164adfc6c5ccf2455d4e1d771 | [
"MIT"
] | 533 | 2018-05-27T17:54:58.000Z | 2021-09-26T12:21:20.000Z | test/vapor/provider/dotenv_test.exs | autodidaddict/vapor | 319853bfce86c9b164adfc6c5ccf2455d4e1d771 | [
"MIT"
] | 68 | 2018-05-28T14:26:52.000Z | 2021-09-11T23:11:34.000Z | test/vapor/provider/dotenv_test.exs | autodidaddict/vapor | 319853bfce86c9b164adfc6c5ccf2455d4e1d771 | [
"MIT"
] | 37 | 2018-06-22T00:08:38.000Z | 2021-10-06T17:14:19.000Z | defmodule Vapor.Provider.DotenvTest do
use ExUnit.Case, async: false
alias Vapor.Provider.Dotenv
setup do
System.delete_env("FOO")
System.delete_env("BAR")
System.delete_env("BAZ")
File.rm(".env")
File.rm(".env.test")
File.rm(".env.dev")
on_exit fn ->
File.rm(".env")
File.rm(".env.test")
File.rm(".env.dev")
System.delete_env("FOO")
System.delete_env("BAR")
System.delete_env("BAZ")
end
:ok
end
test "reads the file in as variables" do
contents = """
FOO=foo
BAR = bar
BAZ =this is a baz
"""
File.write(".env", contents)
plan = %Dotenv{}
assert {:ok, %{}} == Vapor.Provider.load(plan)
assert System.get_env("FOO") == "foo"
assert System.get_env("BAR") == "bar"
assert System.get_env("BAZ") == "this is a baz"
end
test "returns correctly if the file doesn't exist" do
plan = %Dotenv{}
{:ok, envs} = Vapor.Provider.load(plan)
assert envs == %{}
end
test "ignores any malformed data" do
contents = """
FOO=foo=
BAR
=this is a baz
"""
File.write(".env", contents)
plan = %Dotenv{}
Vapor.Provider.load(plan)
assert System.get_env("FOO") == "foo="
assert System.get_env("BAR") == nil
assert System.get_env("BAZ") == nil
end
test "ignores comment lines" do
contents = """
# This is a comment
FOO=foo
# BAR=bar
# BAZ=comment with indentation
"""
File.write(".env", contents)
plan = %Dotenv{}
Vapor.Provider.load(plan)
assert System.get_env("FOO") == "foo"
assert System.get_env("BAR") == nil
assert System.get_env("BAZ") == nil
end
test "does not overwrite existing env variables by default" do
contents = """
# This is a comment
FOO=foo
BAR=bar
"""
File.write(".env", contents)
System.put_env("FOO", "existing foo")
plan = %Dotenv{}
Vapor.Provider.load(plan)
assert System.get_env("FOO") == "existing foo"
assert System.get_env("BAR") == "bar"
end
test "overwrites existing variables if specified" do
contents = """
# This is a comment
FOO=foo
BAR=bar
"""
File.write(".env", contents)
System.put_env("FOO", "existing")
plan = %Dotenv{overwrite: true}
Vapor.Provider.load(plan)
assert System.get_env("FOO") == "foo"
assert System.get_env("BAR") == "bar"
assert System.get_env("BAZ") == nil
end
test "stacks multiple files together" do
base_contents = """
FOO=foo
BAR=bar
"""
File.write!(".env", base_contents)
test_contents = """
BAR=test bar
BAZ=test baz
"""
File.write!(".env.test", test_contents)
System.put_env("FOO", "existing")
Vapor.Provider.load(%Dotenv{})
assert System.get_env("FOO") == "existing"
assert System.get_env("BAR") == "test bar"
assert System.get_env("BAZ") == "test baz"
end
test "allows custom files" do
contents = """
FOO=foo
BAR = bar
BAZ =this is a baz
"""
File.write(".env.dev", contents)
plan = %Dotenv{filename: ".env.dev"}
Vapor.Provider.load(plan)
assert System.get_env("FOO") == "foo"
assert System.get_env("BAR") == "bar"
assert System.get_env("BAZ") == "this is a baz"
end
test "reads variables from bash style heredocs" do
contents = """
FOO=<< 'EOF'
I am a quoted
multiline variable
inside a heredoc
EOF
BAR=bar
BAZ= << EOF
I am an unquoted
multiline variable
inside a heredoc
# with something that could be a comment
EOF
"""
File.write(".env", contents)
plan = %Dotenv{}
Vapor.Provider.load(plan)
assert System.get_env("FOO") == " I am a quoted\n multiline variable\n inside a heredoc"
assert System.get_env("BAR") == "bar"
assert System.get_env("BAZ") == " I am an unquoted\n multiline variable\n inside a heredoc\n # with something that could be a comment"
end
end
| 23.284884 | 144 | 0.595006 |
9ebe62b50f49aac09e6b02b546f6dd432cc38b9c | 2,150 | ex | Elixir | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_content_item.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_content_item.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_content_item.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2ContentItem do
@moduledoc """
Container structure for the content to inspect.
## Attributes
* `byteItem` (*type:* `GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2ByteContentItem.t`, *default:* `nil`) - Content data to inspect or redact. Replaces `type` and `data`.
* `table` (*type:* `GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2Table.t`, *default:* `nil`) - Structured content for inspection. See https://cloud.google.com/dlp/docs/inspecting-text#inspecting_a_table to learn more.
* `value` (*type:* `String.t`, *default:* `nil`) - String data to inspect or redact.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:byteItem => GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2ByteContentItem.t() | nil,
:table => GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2Table.t() | nil,
:value => String.t() | nil
}
field(:byteItem, as: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2ByteContentItem)
field(:table, as: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2Table)
field(:value)
end
defimpl Poison.Decoder, for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2ContentItem do
def decode(value, options) do
GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2ContentItem.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2ContentItem do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.566038 | 217 | 0.739535 |
9ebe72ad8cbf8dca9f14e90457a0903fe7ca593a | 15,293 | ex | Elixir | lib/phoenix/socket/transport.ex | mirego/phoenix | 5871888b77b9d34f9a4a33a3644b87f91c8b30ed | [
"MIT"
] | 1 | 2018-07-26T10:42:26.000Z | 2018-07-26T10:42:26.000Z | lib/phoenix/socket/transport.ex | mirego/phoenix | 5871888b77b9d34f9a4a33a3644b87f91c8b30ed | [
"MIT"
] | null | null | null | lib/phoenix/socket/transport.ex | mirego/phoenix | 5871888b77b9d34f9a4a33a3644b87f91c8b30ed | [
"MIT"
] | null | null | null | defmodule Phoenix.Socket.Transport do
@moduledoc """
Outlines the Socket <-> Transport communication.
This module specifies a behaviour that all sockets must implement.
`Phoenix.Socket` is just one possible implementation of a socket
that multiplexes events over multiple channels. Developers can
implement their own sockets as long as they implement the behaviour
outlined here.
Developers interested in implementing custom transports must invoke
the socket API defined in this module. This module also provides
many conveniences to make it easier to build custom transports.
## Workflow
Whenever your endpoint starts, it will automatically invoke the
`child_spec/1` on each listed socket and start that specification
under the endpoint supervisor. For this reason, custom transports
that are manually started in the supervision tree must be listed
after the endpoint.
Whenever the transport receives a connection, it should invoke the
`c:connect/1` callback with a map of metadata. Different sockets may
require different metadatas.
If the connection is accepted, the transport can move the connection
to another process, if so desires, or keep using the same process. The
process responsible for managing the socket should then call `c:init/1`.
For each message received from the client, the transport must call
`c:handle_in/2` on the socket. For each informational message the
transport receives, it should call `c:handle_info/2` on the socket.
On termination, `c:terminate/2` must be called. A special atom with
reason `:closed` can be used to specify that the client terminated
the connection.
## Example
Here is a simple pong socket implementation:
defmodule PingSocket do
@behaviour Phoenix.Socket.Transport
def child_spec(opts) do
# We won't spawn any process, so let's return a dummy task
%{id: Task, start: {Task, :start_link, [fn -> :ok end]}, restart: :transient}
end
def connect(map) do
# Callback to retrieve relevant data from the connection.
# The map contains options, params, transport and endpoint keys.
{:ok, state}
end
def init(state) do
# Now we are effectively inside the process that maintains the socket.
{:ok, state}
end
def handle_in({"ping", _opts}, state) do
{:reply, :ok, {:text, "pong"}, state}
end
def handle_info(_, state) do
{:ok, state}
end
def terminate(_reason, _state) do
:ok
end
end
It can be mounted in your endpoint like any other socket:
socket "/socket", PingSocket, websocket: true, longpoll: true
You can now interact with the socket under `/socket/websocket`
and `/socket/longpoll`.
## Security
This module also provides functions to enable a secure environment
on transports that, at some point, have access to a `Plug.Conn`.
The functionality provided by this module helps in performing "origin"
header checks and ensuring only SSL connections are allowed.
"""
@type state :: term()
@doc """
Returns a child specification for socket management.
This is invoked only once per socket regardless of
the number of transports and should be responsible
for setting up any process structure used exclusively
by the socket regardless of transports.
Each socket connection is started by the transport
and the process that controls the socket likely
belongs to the transport. However, some sockets spawn
new processes, such as `Phoenix.Socket` which spawns
channels, and this gives the ability to start a
supervision tree associated to the socket.
It receives the socket options from the endpoint,
for example:
socket "/my_app", MyApp.Socket, shutdown: 5000
means `child_spec([shutdown: 5000])` will be invoked.
"""
@callback child_spec(keyword) :: :supervisor.child_spec
@doc """
Connects to the socket.
The transport passes a map of metadata and the socket
returns `{:ok, state}` or `:error`. The state must be
stored by the transport and returned in all future
operations.
This function is used for authorization purposes and it
may be invoked outside of the process that effectively
runs the socket.
In the default `Phoenix.Socket` implementation, the
metadata expects the following keys:
* endpoint - the application endpoint
* transport - the transport name
* params - the connection parameters
* options - a keyword list of transport options, often
given by developers when configuring the transport.
It must include a `:serializer` field with the list of
serializers and their requirements
"""
@callback connect(transport_info :: map) :: {:ok, state} | :error
@doc """
Initializes the socket state.
This must be executed from the process that will effectively
operate the socket.
"""
@callback init(state) :: {:ok, state}
@doc """
Handles incoming socket messages.
The message is represented as `{payload, options}`. It must
return one of:
* `{:ok, state}` - continues the socket with no reply
* `{:reply, status, reply, state}` - continues the socket with reply
* `{:stop, reason, state}` - stops the socket
The `reply` is a tuple contain an `opcode` atom and a message that can
be any term. The built-in websocket transport supports both `:text` and
`:binary` opcode and the message must be always iodata. Long polling only
supports text opcode.
"""
@callback handle_in({message :: term, opts :: keyword}, state) ::
{:ok, state}
| {:reply, :ok | :error, {opcode :: atom, message :: term}, state}
| {:stop, reason :: term, state}
@doc """
Handles info messages.
The message is a term. It must return one of:
* `{:ok, state}` - continues the socket with no reply
* `{:push, reply, state}` - continues the socket with reply
* `{:stop, reason, state}` - stops the socket
The `reply` is a tuple contain an `opcode` atom and a message that can
be any term. The built-in websocket transport supports both `:text` and
`:binary` opcode and the message must be always iodata. Long polling only
supports text opcode.
"""
@callback handle_info(message :: term, state) ::
{:ok, state}
| {:push, {opcode :: atom, message :: term}, state}
| {:stop, reason :: term, state}
@doc """
Invoked on termination.
If `reason` is `:closed`, it means the client closed the socket.
"""
@callback terminate(reason :: term, state) :: :ok
require Logger
alias Phoenix.Socket.{Reply, Message}
@doc false
def protocol_version do
IO.warn "Phoenix.Socket.Transport.protocol_version/0 is deprecated"
"2.0.0"
end
@doc false
def connect(endpoint, handler, _transport_name, transport, serializers, params, _pid \\ self()) do
IO.warn "Phoenix.Socket.Transport.connect/7 is deprecated"
handler.connect(%{
endpoint: endpoint,
transport: transport,
options: [serializer: serializers],
params: params
})
end
@doc false
def dispatch(msg, channels, socket)
def dispatch(%{ref: ref, topic: "phoenix", event: "heartbeat"}, _channels, socket) do
IO.warn "Phoenix.Socket.Transport.dispatch/3 is deprecated"
{:reply, %Reply{join_ref: socket.join_ref, ref: ref, topic: "phoenix", status: :ok, payload: %{}}}
end
def dispatch(%Message{} = msg, channels, socket) do
IO.warn "Phoenix.Socket.Transport.dispatch/3 is deprecated"
channels
|> Map.get(msg.topic)
|> do_dispatch(msg, socket)
end
defp do_dispatch(nil, %{event: "phx_join", topic: topic, ref: ref} = msg, socket) do
case socket.handler.__channel__(topic) do
{channel, opts} ->
case Phoenix.Channel.Server.join(socket, channel, msg, opts) do
{:ok, reply, pid} ->
{:joined, pid, %Reply{join_ref: ref, ref: ref, topic: topic, status: :ok, payload: reply}}
{:error, reply} ->
{:error, reply, %Reply{join_ref: ref, ref: ref, topic: topic, status: :error, payload: reply}}
end
nil ->
reply_ignore(msg, socket)
end
end
defp do_dispatch({pid, _ref}, %{event: "phx_join"} = msg, socket) when is_pid(pid) do
Logger.debug "Duplicate channel join for topic \"#{msg.topic}\" in #{inspect(socket.handler)}. " <>
"Closing existing channel for new join."
:ok = Phoenix.Channel.Server.close([pid])
do_dispatch(nil, msg, socket)
end
defp do_dispatch(nil, msg, socket) do
reply_ignore(msg, socket)
end
defp do_dispatch({channel_pid, _ref}, msg, _socket) do
send(channel_pid, msg)
:noreply
end
defp reply_ignore(msg, socket) do
Logger.warn fn -> "Ignoring unmatched topic \"#{msg.topic}\" in #{inspect(socket.handler)}" end
{:error, :unmatched_topic, %Reply{join_ref: socket.join_ref, ref: msg.ref, topic: msg.topic, status: :error,
payload: %{reason: "unmatched topic"}}}
end
@doc false
def on_exit_message(topic, join_ref, _reason) do
IO.warn "Phoenix.Socket.Transport.on_exit_mesage/3 is deprecated"
%Message{join_ref: join_ref, ref: join_ref, topic: topic, event: "phx_error", payload: %{}}
end
@doc false
def on_exit_message(topic, reason) do
IO.warn "Phoenix.Transport.on_exit_message/2 is deprecated"
on_exit_message(topic, nil, reason)
end
@doc """
Runs the code reloader if enabled.
"""
def code_reload(conn, endpoint, opts) do
reload? = Keyword.get(opts, :code_reloader, endpoint.config(:code_reloader))
reload? && Phoenix.CodeReloader.reload!(endpoint)
conn
end
@doc """
Forces SSL in the socket connection.
Uses the endpoint configuration to decide so. It is a
noop if the connection has been halted.
"""
def force_ssl(%{halted: true} = conn, _socket, _endpoint, _opts) do
conn
end
def force_ssl(conn, socket, endpoint, opts) do
if force_ssl = force_ssl_config(socket, endpoint, opts) do
Plug.SSL.call(conn, force_ssl)
else
conn
end
end
defp force_ssl_config(socket, endpoint, opts) do
Phoenix.Config.cache(endpoint, {:force_ssl, socket}, fn _ ->
opts =
if force_ssl = Keyword.get(opts, :force_ssl, endpoint.config(:force_ssl)) do
force_ssl
|> Keyword.put_new(:host, {endpoint, :host, []})
|> Plug.SSL.init()
end
{:cache, opts}
end)
end
@doc """
Logs the transport request.
Available for transports that generate a connection.
"""
def transport_log(conn, level) do
if level do
Plug.Logger.call(conn, Plug.Logger.init(log: level))
else
conn
end
end
@doc """
Checks the origin request header against the list of allowed origins.
Should be called by transports before connecting when appropriate.
If the origin header matches the allowed origins, no origin header was
sent or no origin was configured, it will return the given connection.
Otherwise a 403 Forbidden response will be sent and the connection halted.
It is a noop if the connection has been halted.
"""
def check_origin(conn, handler, endpoint, opts, sender \\ &Plug.Conn.send_resp/1)
def check_origin(%Plug.Conn{halted: true} = conn, _handler, _endpoint, _opts, _sender),
do: conn
def check_origin(conn, handler, endpoint, opts, sender) do
import Plug.Conn
origin = conn |> get_req_header("origin") |> List.first()
check_origin = check_origin_config(handler, endpoint, opts)
cond do
is_nil(origin) or check_origin == false ->
conn
origin_allowed?(check_origin, URI.parse(origin), endpoint) ->
conn
true ->
Logger.error """
Could not check origin for Phoenix.Socket transport.
This happens when you are attempting a socket connection to
a different host than the one configured in your config/
files. For example, in development the host is configured
to "localhost" but you may be trying to access it from
"127.0.0.1". To fix this issue, you may either:
1. update [url: [host: ...]] to your actual host in the
config file for your current environment (recommended)
2. pass the :check_origin option when configuring your
endpoint or when configuring the transport in your
UserSocket module, explicitly outlining which origins
are allowed:
check_origin: ["https://example.com",
"//another.com:888", "//other.com"]
"""
resp(conn, :forbidden, "")
|> sender.()
|> halt()
end
end
defp check_origin_config(handler, endpoint, opts) do
Phoenix.Config.cache(endpoint, {:check_origin, handler}, fn _ ->
check_origin =
case Keyword.get(opts, :check_origin, endpoint.config(:check_origin)) do
origins when is_list(origins) ->
Enum.map(origins, &parse_origin/1)
boolean when is_boolean(boolean) ->
boolean
{module, function, arguments} ->
{module, function, arguments}
invalid ->
raise ArgumentError, "check_origin expects a boolean, list of hosts, or MFA tuple, got: #{inspect(invalid)}"
end
{:cache, check_origin}
end)
end
defp parse_origin(origin) do
case URI.parse(origin) do
%{host: nil} ->
raise ArgumentError,
"invalid check_origin: #{inspect origin}. " <>
"Expected an origin with a host that is parsable by URI.parse/1. For example: " <>
"[\"https://example.com\", \"//another.com:888\", \"//other.com\"]"
%{scheme: scheme, port: port, host: host} ->
{scheme, host, port}
end
end
defp origin_allowed?({module, function, arguments}, uri, _endpoint),
do: apply(module, function, [uri | arguments])
defp origin_allowed?(_check_origin, %{host: nil}, _endpoint),
do: false
defp origin_allowed?(true, uri, endpoint),
do: compare?(uri.host, host_to_binary(endpoint.config(:url)[:host]))
defp origin_allowed?(check_origin, uri, _endpoint) when is_list(check_origin),
do: origin_allowed?(uri, check_origin)
defp origin_allowed?(uri, allowed_origins) do
%{scheme: origin_scheme, host: origin_host, port: origin_port} = uri
Enum.any?(allowed_origins, fn {allowed_scheme, allowed_host, allowed_port} ->
compare?(origin_scheme, allowed_scheme) and
compare?(origin_port, allowed_port) and
compare_host?(origin_host, allowed_host)
end)
end
defp compare?(request_val, allowed_val) do
is_nil(allowed_val) or request_val == allowed_val
end
defp compare_host?(_request_host, nil),
do: true
defp compare_host?(request_host, "*." <> allowed_host),
do: String.ends_with?(request_host, allowed_host)
defp compare_host?(request_host, allowed_host),
do: request_host == allowed_host
# TODO: Deprecate {:system, env_var} once we require Elixir v1.7+
defp host_to_binary({:system, env_var}), do: host_to_binary(System.get_env(env_var))
defp host_to_binary(host), do: host
end
| 33.463895 | 120 | 0.671549 |
9ebe8b8392232fd361f625fc4308b20ae2f5959d | 117 | exs | Elixir | config/test.exs | qgadrian/locux | 77404174c5c66bd27fed0d07404f916c11060393 | [
"MIT"
] | null | null | null | config/test.exs | qgadrian/locux | 77404174c5c66bd27fed0d07404f916c11060393 | [
"MIT"
] | null | null | null | config/test.exs | qgadrian/locux | 77404174c5c66bd27fed0d07404f916c11060393 | [
"MIT"
] | null | null | null | use Mix.Config
config :logger, level: :debug
config :locust, num_of_workers: 1
config :locust, num_of_requests: 10
| 16.714286 | 35 | 0.769231 |
9ebedffd6b8dec174a1ef94c1edc7620feb0b7cb | 1,433 | exs | Elixir | test/absinthe/type/deprecation_test.exs | maartenvanvliet/absinthe | ebe820717200f53756e225b3dffbfefe924a94d3 | [
"MIT"
] | null | null | null | test/absinthe/type/deprecation_test.exs | maartenvanvliet/absinthe | ebe820717200f53756e225b3dffbfefe924a94d3 | [
"MIT"
] | 2 | 2020-07-21T05:23:37.000Z | 2020-08-26T04:56:12.000Z | test/absinthe/type/deprecation_test.exs | jlgeering/absinthe | a3dbc29640d613928398626ad75a8f03203a1720 | [
"MIT"
] | null | null | null | defmodule Absinthe.Type.DeprecationTest do
use Absinthe.Case, async: true
alias Absinthe.Type
defmodule TestSchema do
use Absinthe.Schema
query do
# Query type must exist
end
input_object :profile do
description "A profile"
field :name, :string
field :profile_picture,
type: :string,
args: [
width: [type: :integer],
height: [type: :integer],
size: [type: :string, deprecate: "Not explicit enough"],
source: [type: :string, deprecate: true]
]
field :email_address, :string do
deprecate "privacy"
end
field :address, :string, deprecate: true
end
end
describe "fields" do
test "can be deprecated" do
obj = TestSchema.__absinthe_type__(:profile)
assert Type.deprecated?(obj.fields.email_address)
assert "privacy" == obj.fields.email_address.deprecation.reason
assert Type.deprecated?(obj.fields.address)
assert nil == obj.fields.address.deprecation.reason
end
end
describe "arguments" do
test "can be deprecated" do
field = TestSchema.__absinthe_type__(:profile).fields.profile_picture
assert Type.deprecated?(field.args.size)
assert "Not explicit enough" == field.args.size.deprecation.reason
assert Type.deprecated?(field.args.source)
assert nil == field.args.source.deprecation.reason
end
end
end
| 26.054545 | 75 | 0.656664 |
9ebef61a5d135d6d4498a89a22143ce563dbe4c4 | 565 | ex | Elixir | lib/roger/partition_supervisor.ex | jnylen/roger | 074338eceae4783221088e8b235a635452708ef1 | [
"MIT"
] | null | null | null | lib/roger/partition_supervisor.ex | jnylen/roger | 074338eceae4783221088e8b235a635452708ef1 | [
"MIT"
] | null | null | null | lib/roger/partition_supervisor.ex | jnylen/roger | 074338eceae4783221088e8b235a635452708ef1 | [
"MIT"
] | null | null | null | defmodule Roger.PartitionSupervisor do
@moduledoc """
The supervisor for all partitions.
"""
use Supervisor
def start_link() do
Supervisor.start_link(__MODULE__, [], name: __MODULE__)
end
def init([]) do
children = [
supervisor(Roger.Partition.ContainingSupervisor, [], restart: :transient)
]
supervise(children, strategy: :simple_one_for_one)
end
def start_child(partition) do
Supervisor.start_child(__MODULE__, [partition])
end
def stop_child(pid) do
Supervisor.terminate_child(__MODULE__, pid)
end
end
| 20.178571 | 79 | 0.709735 |
9ebf28eb836dd108bc2e5514db17080e9d803bac | 1,710 | exs | Elixir | apps/ewallet/test/ewallet/exporters/csv_exporter_test.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/ewallet/test/ewallet/exporters/csv_exporter_test.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/ewallet/test/ewallet/exporters/csv_exporter_test.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | # Copyright 2018-2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWallet.CSVExporterTest do
use EWallet.DBCase
import Ecto.Query
import EWalletDB.Factory
alias EWallet.CSVExporter
alias EWallet.Web.V1.CSV.TransactionSerializer
alias EWalletDB.{Export, Transaction}
describe "start/4" do
test "returns the pid and the export record" do
# Insert transactions with a specific token to avoid side effects.
token = insert(:token)
transactions = insert_list(5, :transaction, to_token: token)
user = insert(:user)
{:ok, export} =
Export.insert(%{
schema: "some_schema",
format: "csv",
status: Export.new(),
completion: 0,
originator: user,
params: %{},
user_uuid: user.uuid
})
query = from(t in Transaction, where: t.to_token_uuid == ^token.uuid)
{res, pid, export} = CSVExporter.start(export, export.schema, query, TransactionSerializer)
assert res == :ok
assert is_pid(pid)
assert %Export{} = export
assert export.estimated_size >= 0
assert export.total_count == length(transactions)
end
end
end
| 31.090909 | 97 | 0.681287 |
9ebf2d12f375f2dcd51aa1f68a75a647956d4bdc | 1,274 | exs | Elixir | mix.exs | polymetis/Elixir-HttpBuilder | aecaa8e1c66ee6153f88d18e51ccaad6e91d745c | [
"MIT"
] | null | null | null | mix.exs | polymetis/Elixir-HttpBuilder | aecaa8e1c66ee6153f88d18e51ccaad6e91d745c | [
"MIT"
] | null | null | null | mix.exs | polymetis/Elixir-HttpBuilder | aecaa8e1c66ee6153f88d18e51ccaad6e91d745c | [
"MIT"
] | null | null | null | defmodule HttpBuilder.Mixfile do
use Mix.Project
def project do
[
app: :http_builder,
name: "HttpBuilder",
source_url: "https://github.com/matthewoden/Elixir-HttpBuilder",
docs: docs(),
package: package(),
description: description(),
version: "0.3.0",
elixir: "~> 1.5",
start_permanent: Mix.env == :prod,
deps: deps()
]
end
def application do
[]
end
defp package do
[
licenses: ["MIT"],
maintainers: ["Matthew Potter"],
links: %{"GitHub" => "https://github.com/matthewoden/Elixir-HttpBuilder"}
]
end
def description do
"A simple DSL for composing HTTP requests. Based off the lovely Elm-Http-Builder."
end
defp docs do
[
main: "HttpBuilder",
extras: ["README.md"],
logo: "images/logo.png"
]
end
defp deps do
[
{:hackney, "~> 1.10", optional: true},
{:httpoison, "~> 0.13.0", optional: true},
{:poison, "~> 3.0", optional: true},
{:httpotion, "~> 3.0", optional: true},
{:ibrowse, "~> 4.4.0", optional: true},
{:ex_doc, "~> 0.16", only: :dev, runtime: false},
{:httparrot, "~> 1.0", only: :test},
{:proxy, only: :test, github: "matthewoden/proxy"}
]
end
end
| 22.75 | 86 | 0.5573 |
9ebf53decd6629d13a7ce20cc900567dfd08542f | 62,844 | exs | Elixir | test/gradient/ast_specifier_test.exs | Fl4m3Ph03n1x/gradient | 60d7d3fe2ebdf68747325c1e852959f8b92fdcee | [
"Apache-2.0"
] | null | null | null | test/gradient/ast_specifier_test.exs | Fl4m3Ph03n1x/gradient | 60d7d3fe2ebdf68747325c1e852959f8b92fdcee | [
"Apache-2.0"
] | null | null | null | test/gradient/ast_specifier_test.exs | Fl4m3Ph03n1x/gradient | 60d7d3fe2ebdf68747325c1e852959f8b92fdcee | [
"Apache-2.0"
] | null | null | null | defmodule Gradient.AstSpecifierTest do
use ExUnit.Case
doctest Gradient.AstSpecifier
alias Gradient.AstSpecifier
import Gradient.TestHelpers
setup_all state do
{:ok, state}
end
describe "run_mappers/2" do
test "messy test on simple_app" do
{tokens, ast} = example_data()
new_ast = AstSpecifier.run_mappers(ast, tokens)
assert is_list(new_ast)
end
test "integer" do
{tokens, ast} = load("basic/Elixir.Basic.Int.beam", "basic/int.ex")
[block, inline | _] = AstSpecifier.run_mappers(ast, tokens) |> Enum.reverse()
assert {:function, 2, :int, 0, [{:clause, 2, [], [], [{:integer, 2, 1}]}]} = inline
assert {:function, 4, :int_block, 0, [{:clause, 4, [], [], [{:integer, 5, 2}]}]} = block
end
test "float" do
{tokens, ast} = load("basic/Elixir.Basic.Float.beam", "basic/float.ex")
[block, inline | _] = AstSpecifier.run_mappers(ast, tokens) |> Enum.reverse()
assert {:function, 2, :float, 0, [{:clause, 2, [], [], [{:float, 2, 0.12}]}]} = inline
assert {:function, 4, :float_block, 0, [{:clause, 4, [], [], [{:float, 5, 0.12}]}]} = block
end
test "atom" do
{tokens, ast} = load("basic/Elixir.Basic.Atom.beam", "basic/atom.ex")
[block, inline | _] = AstSpecifier.run_mappers(ast, tokens) |> Enum.reverse()
assert {:function, 2, :atom, 0, [{:clause, 2, [], [], [{:atom, 2, :ok}]}]} = inline
assert {:function, 4, :atom_block, 0, [{:clause, 4, [], [], [{:atom, 5, :ok}]}]} = block
end
test "char" do
{tokens, ast} = load("basic/Elixir.Basic.Char.beam", "basic/char.ex")
[block, inline | _] = AstSpecifier.run_mappers(ast, tokens) |> Enum.reverse()
assert {:function, 2, :char, 0, [{:clause, 2, [], [], [{:integer, 2, 99}]}]} = inline
assert {:function, 4, :char_block, 0, [{:clause, 4, [], [], [{:integer, 5, 99}]}]} = block
end
test "charlist" do
{tokens, ast} = load("basic/Elixir.Basic.Charlist.beam", "basic/charlist.ex")
[block, inline | _] = AstSpecifier.run_mappers(ast, tokens) |> Enum.reverse()
# TODO propagate location to each charlist element
assert {:function, 2, :charlist, 0,
[
{:clause, 2, [], [],
[
{:cons, 2, {:integer, 2, 97},
{:cons, 2, {:integer, 2, 98}, {:cons, 2, {:integer, 2, 99}, {nil, 2}}}}
]}
]} = inline
assert {:function, 4, :charlist_block, 0,
[
{:clause, 4, [], [],
[
{:cons, 5, {:integer, 5, 97},
{:cons, 5, {:integer, 5, 98}, {:cons, 5, {:integer, 5, 99}, {nil, 5}}}}
]}
]} = block
end
test "string" do
{tokens, ast} = load("basic/Elixir.Basic.String.beam", "basic/string.ex")
[block, inline | _] = AstSpecifier.run_mappers(ast, tokens) |> Enum.reverse()
assert {:function, 2, :string, 0,
[
{:clause, 2, [], [],
[{:bin, 2, [{:bin_element, 2, {:string, 2, 'abc'}, :default, :default}]}]}
]} = inline
assert {:function, 4, :string_block, 0,
[
{:clause, 4, [], [],
[{:bin, 5, [{:bin_element, 5, {:string, 5, 'abc'}, :default, :default}]}]}
]} = block
end
test "tuple" do
{tokens, ast} = load("Elixir.Tuple.beam", "tuple.ex")
[tuple_in_str2, tuple_in_str, tuple_in_list, _list_in_tuple, tuple | _] =
AstSpecifier.run_mappers(ast, tokens) |> Enum.reverse()
# FIXME
assert {:function, 18, :tuple_in_str2, 0,
[
{:clause, 18, [], [],
[
{:match, 19, {:var, 19, :_msg@1},
{:bin, 20,
[
{:bin_element, 20, {:string, 20, '\nElixir formatter not exist for '},
:default, :default},
{:bin_element, 20,
{:call, 20, {:remote, 20, {:atom, 20, Kernel}, {:atom, 20, :inspect}},
[
{:tuple, 20, []},
{:cons, 20, {:tuple, 20, [{:atom, 20, :pretty}, {:atom, 20, true}]},
{:cons, 20,
{:tuple, 20, [{:atom, 20, :limit}, {:atom, 20, :infinity}]},
{nil, 20}}}
]}, :default, [:binary]},
{:bin_element, 20, {:string, 20, ' using default \n'}, :default, :default}
]}},
{:call, 22, {:remote, 22, {:atom, 22, String}, {:atom, 22, :to_charlist}},
[
{:bin, 22,
[
{:bin_element, 22,
{:call, 22,
{:remote, 22, {:atom, 22, IO.ANSI}, {:atom, 22, :light_yellow}}, []},
:default, [:binary]},
{:bin_element, 22, {:var, 22, :_msg@1}, :default, [:binary]},
{:bin_element, 22,
{:call, 22, {:remote, 22, {:atom, 22, IO.ANSI}, {:atom, 22, :reset}},
[]}, :default, [:binary]}
]}
]}
]}
]} = tuple_in_str2
assert {:function, 14, :tuple_in_str, 0,
[
{:clause, 14, [], [],
[
{:bin, 15,
[
{:bin_element, 15, {:string, 15, 'abc '}, :default, :default},
{:bin_element, 15,
{:call, 15, {:remote, 15, {:atom, 15, Kernel}, {:atom, 15, :inspect}},
[
{:atom, 15, :abc},
{:cons, 15, {:tuple, 15, [{:atom, 15, :limit}, {:atom, 15, :infinity}]},
{:cons, 15,
{:tuple, 15,
[
{:atom, 15, :label},
{:bin, 15,
[
{:bin_element, 15, {:string, 15, 'abc '}, :default, :default},
{:bin_element, 15,
{:case, [generated: true, location: 15], {:integer, 15, 13},
[
{:clause, [generated: true, location: 15],
[{:var, [generated: true, location: 15], :_@1}],
[
[
{:call, [generated: true, location: 15],
{:remote, [generated: true, location: 15],
{:atom, [generated: true, location: 15], :erlang},
{:atom, [generated: true, location: 15], :is_binary}},
[{:var, [generated: true, location: 15], :_@1}]}
]
], [{:var, [generated: true, location: 15], :_@1}]},
{:clause, [generated: true, location: 15],
[{:var, [generated: true, location: 15], :_@1}], [],
[
{:call, [generated: true, location: 15],
{:remote, [generated: true, location: 15],
{:atom, [generated: true, location: 15], String.Chars},
{:atom, [generated: true, location: 15], :to_string}},
[{:var, [generated: true, location: 15], :_@1}]}
]}
]}, :default, [:binary]}
]}
]}, {nil, 15}}}
]}, :default, [:binary]},
{:bin_element, 15, {:integer, 15, 12}, :default, [:integer]}
]}
]}
]} = tuple_in_str
assert {:function, 10, :tuple_in_list, 0,
[
{:clause, 10, [], [],
[
{:cons, 11, {:tuple, 11, [{:atom, 11, :a}, {:integer, 11, 12}]},
{:cons, 11, {:tuple, 11, [{:atom, 11, :b}, {:atom, 11, :ok}]}, {nil, 11}}}
]}
]} = tuple_in_list
assert {:function, 2, :tuple, 0,
[{:clause, 2, [], [], [{:tuple, 3, [{:atom, 3, :ok}, {:integer, 3, 12}]}]}]} = tuple
end
test "binary" do
{tokens, ast} = load("basic/Elixir.Basic.Binary.beam", "basic/binary.ex")
[complex2, complex, bin_block, bin | _] =
AstSpecifier.run_mappers(ast, tokens)
|> Enum.reverse()
assert {:function, 13, :complex2, 0,
[
{:clause, 13, [], [],
[
{:bin, 14,
[
{:bin_element, 14, {:string, 14, 'abc '}, :default, :default},
{:bin_element, 14,
{:call, 14, {:remote, 14, {:atom, 14, Kernel}, {:atom, 14, :inspect}},
[{:integer, 14, 12}]}, :default, [:binary]},
{:bin_element, 14, {:string, 14, ' cba'}, :default, :default}
]}
]}
]} = complex2
assert {:function, 8, :complex, 0,
[
{:clause, 8, [], [],
[
{:match, 9, {:var, 9, :_x@2},
{:fun, 9,
{:clauses,
[
{:clause, 9, [{:var, 9, :_x@1}], [],
[{:op, 9, :+, {:var, 9, :_x@1}, {:integer, 9, 1}}]}
]}}},
{:bin, 10,
[
{:bin_element, 10, {:integer, 10, 49}, :default, [:integer]},
{:bin_element, 10, {:integer, 10, 48}, :default, [:integer]},
{:bin_element, 10, {:call, 10, {:var, 10, :_x@2}, [{:integer, 10, 50}]},
:default, [:integer]}
]}
]}
]} = complex
assert {:function, 4, :bin_block, 0,
[
{:clause, 4, [], [],
[
{:bin, 5,
[
{:bin_element, 5, {:integer, 5, 49}, :default, [:integer]},
{:bin_element, 5, {:integer, 5, 48}, :default, [:integer]},
{:bin_element, 5, {:integer, 5, 48}, :default, [:integer]}
]}
]}
]} = bin_block
assert {:function, 2, :bin, 0,
[
{:clause, 2, [], [],
[
{:bin, 2,
[
{:bin_element, 2, {:integer, 2, 49}, :default, [:integer]},
{:bin_element, 2, {:integer, 2, 48}, :default, [:integer]},
{:bin_element, 2, {:integer, 2, 48}, :default, [:integer]}
]}
]}
]} = bin
end
test "case conditional" do
{tokens, ast} = load("conditional/Elixir.Conditional.Case.beam", "conditional/case.ex")
[block, inline | _] = AstSpecifier.run_mappers(ast, tokens) |> Enum.reverse()
assert {:function, 2, :case_, 0,
[
{:clause, 2, [], [],
[
{:case, 4, {:integer, 4, 5},
[
{:clause, 5, [{:integer, 5, 5}], [], [{:atom, 5, :ok}]},
{:clause, 6, [{:var, 6, :_}], [], [{:atom, 6, :error}]}
]}
]}
]} = inline
assert {:function, 9, :case_block, 0,
[
{:clause, 9, [], [],
[
{:case, 10, {:integer, 10, 5},
[
{:clause, 11, [{:integer, 11, 5}], [], [{:atom, 11, :ok}]},
{:clause, 12, [{:var, 12, :_}], [], [{:atom, 12, :error}]}
]}
]}
]} = block
end
test "if conditional" do
{tokens, ast} = load("conditional/Elixir.Conditional.If.beam", "conditional/if.ex")
[block, inline, if_ | _] = AstSpecifier.run_mappers(ast, tokens) |> Enum.reverse()
assert {:function, 12, :if_block, 0,
[
{:clause, 12, [], [],
[
{:case, 13, {:op, 13, :<, {:integer, 13, 1}, {:integer, 13, 5}},
[
{:clause, [generated: true, location: 13],
[{:atom, [generated: true, location: 13], false}], [],
[{:atom, 16, :error}]},
{:clause, [generated: true, location: 13],
[{:atom, [generated: true, location: 13], true}], [], [{:atom, 14, :ok}]}
]}
]}
]} = block
assert {:function, 10, :if_inline, 0,
[
{:clause, 10, [], [],
[
{:case, 10, {:op, 10, :<, {:integer, 10, 1}, {:integer, 10, 5}},
[
{:clause, [generated: true, location: 10],
[{:atom, [generated: true, location: 10], false}], [],
[{:atom, 10, :error}]},
{:clause, [generated: true, location: 10],
[{:atom, [generated: true, location: 10], true}], [], [{:atom, 10, :ok}]}
]}
]}
]} = inline
assert {:function, 2, :if_, 0,
[
{:clause, 2, [], [],
[
{:case, 4, {:op, 4, :<, {:integer, 4, 1}, {:integer, 4, 5}},
[
{:clause, [generated: true, location: 4],
[{:atom, [generated: true, location: 4], false}], [],
[{:atom, 7, :error}]},
{:clause, [generated: true, location: 4],
[{:atom, [generated: true, location: 4], true}], [], [{:atom, 5, :ok}]}
]}
]}
]} = if_
end
test "unless conditional" do
{tokens, ast} = load("conditional/Elixir.Conditional.Unless.beam", "conditional/unless.ex")
[block | _] = AstSpecifier.run_mappers(ast, tokens) |> Enum.reverse()
assert {
:function,
2,
:unless_block,
0,
[
{:clause, 2, [], [],
[
{:case, 3, {:atom, 3, false},
[
{:clause, [generated: true, location: 3],
[{:atom, [generated: true, location: 3], false}], [], [{:atom, 4, :ok}]},
{:clause, [generated: true, location: 3],
[{:atom, [generated: true, location: 3], true}], [], [{:atom, 6, :error}]}
]}
]}
]
} = block
end
test "cond conditional" do
{tokens, ast} = load("conditional/Elixir.Conditional.Cond.beam", "conditional/cond.ex")
[block, inline | _] = AstSpecifier.run_mappers(ast, tokens) |> Enum.reverse()
assert {:function, 2, :cond_, 1,
[
{:clause, 2, [{:var, 2, :_a@1}], [],
[
{:case, 4, {:op, 5, :==, {:var, 5, :_a@1}, {:atom, 5, :ok}},
[
{:clause, 5, [{:atom, 5, true}], [], [{:atom, 5, :ok}]},
{:clause, 6, [{:atom, 6, false}], [],
[
{:case, 6, {:op, 6, :>, {:var, 6, :_a@1}, {:integer, 6, 5}},
[
{:clause, 6, [{:atom, 6, true}], [], [{:atom, 6, :ok}]},
{:clause, 7, [{:atom, 7, false}], [],
[
{:case, 7, {:atom, 7, true},
[
{:clause, 7, [{:atom, 7, true}], [], [{:atom, 7, :error}]},
{:clause, [generated: true, location: 7],
[{:atom, [generated: true, location: 7], false}], [],
[
{:call, 7,
{:remote, 7, {:atom, 7, :erlang}, {:atom, 7, :error}},
[{:atom, 7, :cond_clause}]}
]}
]}
]}
]}
]}
]}
]}
]} = inline
assert {:function, 10, :cond_block, 0,
[
{:clause, 10, [], [],
[
{:match, 11, {:var, 11, :_a@1}, {:integer, 11, 5}},
{:case, 13, {:op, 14, :==, {:var, 14, :_a@1}, {:atom, 14, :ok}},
[
{:clause, 14, [{:atom, 14, true}], [], [{:atom, 14, :ok}]},
{:clause, 15, [{:atom, 15, false}], [],
[
{:case, 15, {:op, 15, :>, {:var, 15, :_a@1}, {:integer, 15, 5}},
[
{:clause, 15, [{:atom, 15, true}], [], [{:atom, 15, :ok}]},
{:clause, 16, [{:atom, 16, false}], [],
[
{:case, 16, {:atom, 16, true},
[
{:clause, 16, [{:atom, 16, true}], [], [{:atom, 16, :error}]},
{:clause, [generated: true, location: 16],
[{:atom, [generated: true, location: 16], false}], [],
[
{:call, 16,
{:remote, 16, {:atom, 16, :erlang}, {:atom, 16, :error}},
[{:atom, 16, :cond_clause}]}
]}
]}
]}
]}
]}
]}
]}
]} = block
end
test "with conditional" do
{tokens, ast} = load("conditional/Elixir.Conditional.With.beam", "conditional/with.ex")
[block | _] = AstSpecifier.run_mappers(ast, tokens) |> Enum.reverse()
assert {:function, 6, :test_with, 0,
[
{:clause, 6, [], [],
[
{:case, [generated: true, location: 7], {:call, 7, {:atom, 7, :ok_res}, []},
[
{:clause, 7, [{:tuple, 7, [{:atom, 7, :ok}, {:var, 7, :__a@1}]}], [],
[{:integer, 8, 12}]},
{:clause, [generated: true, location: 7], [{:var, 10, :_}], [],
[
{:block, 7,
[
{:call, 11, {:remote, 11, {:atom, 11, IO}, {:atom, 11, :puts}},
[
{:bin, 11,
[{:bin_element, 11, {:string, 11, 'error'}, :default, :default}]}
]},
{:cons, 12, {:integer, 12, 49},
{:cons, 12, {:integer, 12, 50}, {nil, 12}}}
]}
]}
]}
]}
]} == block
end
@tag :skip
test "basic function return" do
ex_file = "basic.ex"
beam_file = "Elixir.Basic.beam"
{tokens, ast} = load(beam_file, ex_file)
specified_ast = AstSpecifier.run_mappers(ast, tokens)
IO.inspect(specified_ast)
assert is_list(specified_ast)
end
end
test "specify_line/2" do
{tokens, _} = example_data()
opts = [end_line: -1]
assert {{:integer, 21, 12}, tokens} =
AstSpecifier.specify_line({:integer, 21, 12}, tokens, opts)
assert {{:integer, 22, 12}, _tokens} =
AstSpecifier.specify_line({:integer, 20, 12}, tokens, opts)
end
test "cons_to_charlist/1" do
cons =
{:cons, 0, {:integer, 0, 49},
{:cons, 0, {:integer, 0, 48}, {:cons, 0, {:integer, 0, 48}, {nil, 0}}}}
assert '100' == AstSpecifier.cons_to_charlist(cons)
end
describe "test that prints result" do
@tag :skip
test "specify/1" do
{_tokens, forms} = example_data()
AstSpecifier.specify(forms)
|> IO.inspect()
end
@tag :skip
test "display forms" do
{_, forms} = example_data()
IO.inspect(forms)
end
end
test "function call" do
{tokens, ast} = load("Elixir.Call.beam", "call.ex")
[call, _ | _] = AstSpecifier.run_mappers(ast, tokens) |> Enum.reverse()
assert {:function, 5, :call, 0,
[
{:clause, 5, [], [],
[
{:call, 6, {:atom, 6, :get_x},
[
{:bin, 7, [{:bin_element, 7, {:string, 7, 'ala'}, :default, :default}]},
{:cons, 8, {:integer, 8, 97},
{:cons, 8, {:integer, 8, 108}, {:cons, 8, {:integer, 8, 97}, {nil, 8}}}},
{:integer, 9, 12}
]}
]}
]} = call
end
test "pipe" do
{tokens, ast} = load("Elixir.Pipe.beam", "pipe_op.ex")
[block | _] = AstSpecifier.run_mappers(ast, tokens) |> Enum.reverse()
assert {:function, 2, :pipe, 0,
[
{:clause, 2, [], [],
[
{:call, 5, {:remote, 5, {:atom, 5, :erlang}, {:atom, 5, :length}},
[
{:call, 4, {:remote, 4, {:atom, 4, Enum}, {:atom, 4, :filter}},
[
{:cons, 4, {:integer, 4, 1},
{:cons, 4,
{
:integer,
4,
2
}, {:cons, 4, {:integer, 4, 3}, {nil, 4}}}},
{:fun, 4,
{:clauses,
[
{:clause, 4, [{:var, 4, :_x@1}], [],
[{:op, 4, :<, {:var, 4, :_x@1}, {:integer, 4, 3}}]}
]}}
]}
]}
]}
]} = block
end
test "guards" do
{tokens, ast} = load("conditional/Elixir.Conditional.Guard.beam", "conditional/guards.ex")
[guarded_case, guarded_fun | _] = AstSpecifier.run_mappers(ast, tokens) |> Enum.reverse()
assert {:function, 3, :guarded_fun, 1,
[
{:clause, 3, [{:var, 3, :_x@1}],
[
[
{:call, 3, {:remote, 3, {:atom, 3, :erlang}, {:atom, 3, :is_integer}},
[{:var, 3, :_x@1}]}
],
[
{:op, 3, :andalso, {:op, 3, :>, {:var, 3, :_x@1}, {:integer, 3, 3}},
{:op, 3, :<, {:var, 3, :_x@1}, {:integer, 3, 6}}}
]
], [{:atom, 3, :ok}]}
]} = guarded_fun
assert {:function, 6, :guarded_case, 1,
[
{:clause, 6, [{:var, 6, :_x@1}], [],
[
{:case, 7, {:var, 7, :_x@1},
[
{:clause, 8, [{:integer, 8, 0}], [],
[{:tuple, 8, [{:atom, 8, :ok}, {:integer, 8, 1}]}]},
{:clause, 9, [{:var, 9, :_i@1}],
[[{:op, 9, :>, {:var, 9, :_i@1}, {:integer, 9, 0}}]],
[
{:tuple, 9,
[{:atom, 9, :ok}, {:op, 9, :+, {:var, 9, :_i@1}, {:integer, 9, 1}}]}
]},
{:clause, 10, [{:var, 10, :__otherwise@1}], [], [{:atom, 10, :error}]}
]}
]}
]} = guarded_case
end
test "range" do
{tokens, ast} = load("Elixir.RangeEx.beam", "range.ex")
[to_list, match_range, rev_range_step, range_step, range | _] =
AstSpecifier.run_mappers(ast, tokens) |> Enum.reverse()
assert {:function, 18, :to_list, 0,
[
{:clause, 18, [], [],
[
{:call, 19, {:remote, 19, {:atom, 19, Enum}, {:atom, 19, :to_list}},
[
{:map, 19,
[
{:map_field_assoc, 19, {:atom, 19, :__struct__}, {:atom, 19, Range}},
{:map_field_assoc, 19, {:atom, 19, :first}, {:integer, 19, 1}},
{:map_field_assoc, 19, {:atom, 19, :last}, {:integer, 19, 100}},
{:map_field_assoc, 19, {:atom, 19, :step}, {:integer, 19, 5}}
]}
]}
]}
]} = to_list
assert {:function, 14, :match_range, 0,
[
{:clause, 14, [], [],
[
{:match, 15,
{:map, 15,
[
{:map_field_exact, 15, {:atom, 15, :__struct__}, {:atom, 15, Range}},
{:map_field_exact, 15, {:atom, 15, :first}, {:var, 15, :_first@1}},
{:map_field_exact, 15, {:atom, 15, :last}, {:var, 15, :_last@1}},
{:map_field_exact, 15, {:atom, 15, :step}, {:var, 15, :_step@1}}
]}, {:call, 15, {:atom, 15, :range_step}, []}}
]}
]} = match_range
assert {:function, 10, :rev_range_step, 0,
[
{:clause, 10, [], [],
[
{:map, 11,
[
{:map_field_assoc, 11, {:atom, 11, :__struct__}, {:atom, 11, Range}},
{:map_field_assoc, 11, {:atom, 11, :first}, {:integer, 11, 12}},
{:map_field_assoc, 11, {:atom, 11, :last}, {:integer, 11, 1}},
{:map_field_assoc, 11, {:atom, 11, :step}, {:integer, 11, -2}}
]}
]}
]} = rev_range_step
assert {:function, 6, :range_step, 0,
[
{:clause, 6, [], [],
[
{:map, 7,
[
{:map_field_assoc, 7, {:atom, 7, :__struct__}, {:atom, 7, Range}},
{:map_field_assoc, 7, {:atom, 7, :first}, {:integer, 7, 1}},
{:map_field_assoc, 7, {:atom, 7, :last}, {:integer, 7, 12}},
{:map_field_assoc, 7, {:atom, 7, :step}, {:integer, 7, 2}}
]}
]}
]} = range_step
assert {:function, 2, :range, 0,
[
{:clause, 2, [], [],
[
{:map, 3,
[
{:map_field_assoc, 3, {:atom, 3, :__struct__}, {:atom, 3, Range}},
{:map_field_assoc, 3, {:atom, 3, :first}, {:integer, 3, 1}},
{:map_field_assoc, 3, {:atom, 3, :last}, {:integer, 3, 12}},
{:map_field_assoc, 3, {:atom, 3, :step}, {:integer, 3, 1}}
]}
]}
]} = range
end
test "list comprehension" do
{tokens, ast} = load("Elixir.ListComprehension.beam", "list_comprehension.ex")
[block | _] = AstSpecifier.run_mappers(ast, tokens) |> Enum.reverse()
assert {:function, 11, :lc_complex, 0,
[
{:clause, 11, [], [],
[
{:call, 12, {:remote, 12, {:atom, 12, :lists}, {:atom, 12, :reverse}},
[
{:call, 12, {:remote, 12, {:atom, 12, Enum}, {:atom, 12, :reduce}},
[
{:map, 12,
[
{:map_field_assoc, 12, {:atom, 12, :__struct__}, {:atom, 12, Range}},
{:map_field_assoc, 12, {:atom, 12, :first}, {:integer, 12, 0}},
{:map_field_assoc, 12, {:atom, 12, :last}, {:integer, 12, 5}},
{:map_field_assoc, 12, {:atom, 12, :step}, {:integer, 12, 1}}
]},
{nil, 12},
{:fun, 12,
{:clauses,
[
{:clause, 12, [{:var, 12, :_n@1}, {:var, 12, :_@1}], [],
[
{:case, [generated: true, location: 12],
{:op, 12, :==,
{:op, 12, :rem, {:var, 12, :_n@1}, {:integer, 12, 3}},
{:integer, 12, 0}},
[
{:clause, [generated: true, location: 12],
[{:atom, [generated: true, location: 12], true}], [],
[
{:cons, 12,
{:op, 12, :*, {:var, 12, :_n@1}, {:var, 12, :_n@1}},
{:var, 12, :_@1}}
]},
{:clause, [generated: true, location: 12],
[{:atom, [generated: true, location: 12], false}], [],
[{:var, 12, :_@1}]}
]}
]}
]}}
]}
]}
]}
]} = block
end
test "list" do
{tokens, ast} = load("Elixir.ListEx.beam", "list.ex")
[ht2, ht, list, _wrap | _] = AstSpecifier.run_mappers(ast, tokens) |> Enum.reverse()
assert {:function, 5, :list, 0,
[
{:clause, 5, [], [],
[
{:cons, 6,
{:cons, 6, {:integer, 6, 49}, {:cons, 6, {:integer, 6, 49}, {nil, 6}}},
{:cons, 6,
{:bin, 6, [{:bin_element, 6, {:string, 6, '12'}, :default, :default}]},
{:cons, 6, {:integer, 6, 1},
{:cons, 6, {:integer, 6, 2},
{:cons, 6, {:integer, 6, 3},
{:cons, 6, {:call, 6, {:atom, 6, :wrap}, [{:integer, 6, 4}]}, {nil, 6}}}}}}}
]}
]} = list
assert {:function, 9, :ht, 1,
[
{:clause, 9, [{:cons, 9, {:var, 9, :_a@1}, {:var, 9, :_}}], [],
[
{:cons, 10, {:var, 10, :_a@1},
{:cons, 10, {:integer, 10, 1},
{:cons, 10, {:integer, 10, 2}, {:cons, 10, {:integer, 10, 3}, {nil, 10}}}}}
]}
]} = ht
assert {:function, 13, :ht2, 1,
[
{:clause, 13, [{:cons, 13, {:var, 13, :_a@1}, {:var, 13, :_}}], [],
[
{:cons, 14, {:var, 14, :_a@1},
{:call, 14, {:atom, 14, :wrap}, [{:integer, 14, 1}]}}
]}
]} = ht2
end
test "try" do
{tokens, ast} = load("Elixir.Try.beam", "try.ex")
[body_after, try_after, try_else, try_rescue | _] =
AstSpecifier.run_mappers(ast, tokens) |> Enum.reverse()
assert {:function, 2, :try_rescue, 0,
[
{:clause, 2, [], [],
[
{:try, 3,
[
{:case, 4, {:atom, 4, true},
[
{:clause, [generated: true, location: 4],
[{:atom, [generated: true, location: 4], false}], [],
[
{:call, 7, {:remote, 7, {:atom, 7, :erlang}, {:atom, 7, :error}},
[
{:call, 7,
{:remote, 7, {:atom, 7, RuntimeError}, {:atom, 7, :exception}},
[
{:bin, 7,
[
{:bin_element, 7, {:string, 7, 'oops'}, :default, :default}
]}
]}
]}
]},
{:clause, [generated: true, location: 4],
[{:atom, [generated: true, location: 4], true}], [],
[
{:call, 5, {:remote, 5, {:atom, 5, :erlang}, {:atom, 5, :throw}},
[
{:bin, 5,
[{:bin_element, 5, {:string, 5, 'good'}, :default, :default}]}
]}
]}
]}
], [],
[
{:clause, 10,
[
{:tuple, 10,
[
{:atom, 10, :error},
{:var, 10, :_@1},
{:var, 10, :___STACKTRACE__@1}
]}
],
[
[
{:op, 10, :andalso,
{:op, 10, :==,
{:call, 10, {:remote, 10, {:atom, 10, :erlang}, {:atom, 10, :map_get}},
[{:atom, 10, :__struct__}, {:var, 10, :_@1}]},
{:atom, 10, RuntimeError}},
{:call, 10, {:remote, 10, {:atom, 10, :erlang}, {:atom, 10, :map_get}},
[{:atom, 10, :__exception__}, {:var, 10, :_@1}]}}
]
],
[
{:match, 10, {:var, 10, :_e@1}, {:var, 10, :_@1}},
{:integer, 11, 11},
{:var, 12, :_e@1}
]},
{:clause, 14,
[
{:tuple, 14,
[
{:atom, 14, :throw},
{:var, 14, :_val@1},
{:var, 14, :___STACKTRACE__@1}
]}
], [], [{:integer, 15, 12}, {:var, 16, :_val@1}]}
], []}
]}
]} = try_rescue
assert {:function, 20, :try_else, 0,
[
{:clause, 20, [], [],
[
{:match, 21, {:var, 21, :_x@1}, {:integer, 21, 2}},
{:try, 23, [{:op, 24, :/, {:integer, 24, 1}, {:var, 24, :_x@1}}],
[
{:clause, 30, [{:var, 30, :_y@1}],
[
[
{:op, 30, :andalso, {:op, 30, :<, {:var, 30, :_y@1}, {:integer, 30, 1}},
{:op, 30, :>, {:var, 30, :_y@1}, {:op, 30, :-, {:integer, 30, 1}}}}
]
], [{:integer, 31, 2}, {:atom, 32, :small}]},
{:clause, 34, [{:var, 34, :_}], [], [{:integer, 35, 3}, {:atom, 36, :large}]}
],
[
{:clause, 26,
[
{:tuple, 26,
[
{:atom, 26, :error},
{:var, 26, :_@1},
{:var, 26, :___STACKTRACE__@1}
]}
],
[
[{:op, 26, :==, {:var, 26, :_@1}, {:atom, 26, :badarith}}],
[
{:op, 26, :andalso,
{:op, 26, :==,
{:call, 26, {:remote, 26, {:atom, 26, :erlang}, {:atom, 26, :map_get}},
[{:atom, 26, :__struct__}, {:var, 26, :_@1}]},
{:atom, 26, ArithmeticError}},
{:call, 26, {:remote, 26, {:atom, 26, :erlang}, {:atom, 26, :map_get}},
[{:atom, 26, :__exception__}, {:var, 26, :_@1}]}}
]
], [{:integer, 27, 1}, {:atom, 28, :infinity}]}
], []}
]}
]} = try_else
assert {:function, 40, :try_after, 0,
[
{:clause, 40, [], [],
[
{:match, 41, {:tuple, 41, [{:atom, 41, :ok}, {:var, 41, :_file@1}]},
{:call, 41, {:remote, 41, {:atom, 41, File}, {:atom, 41, :open}},
[
{:bin, 41,
[{:bin_element, 41, {:string, 41, 'sample'}, :default, :default}]},
{:cons, 41, {:atom, 41, :utf8}, {:cons, 41, {:atom, 41, :write}, {nil, 41}}}
]}},
{:try, 43,
[
{:call, 44, {:remote, 44, {:atom, 44, IO}, {:atom, 44, :write}},
[
{:var, 44, :_file@1},
{:bin, 44,
[
{:bin_element, 44, {:string, 44, [111, 108, 195, 161]}, :default,
:default}
]}
]},
{:call, 45, {:remote, 45, {:atom, 45, :erlang}, {:atom, 45, :error}},
[
{:call, 45,
{:remote, 45, {:atom, 45, RuntimeError}, {:atom, 45, :exception}},
[
{:bin, 45,
[
{:bin_element, 45, {:string, 45, 'oops, something went wrong'},
:default, :default}
]}
]}
]}
], [], [],
[
{:call, 47, {:remote, 47, {:atom, 47, File}, {:atom, 47, :close}},
[{:var, 47, :_file@1}]}
]}
]}
]} = try_after
assert {:function, 51, :body_after, 0,
[
{:clause, 51, [], [],
[
{:try, 51,
[
{:call, 52, {:remote, 52, {:atom, 52, :erlang}, {:atom, 52, :error}},
[
{:call, 52, {:remote, 52, {:atom, 52, Kernel.Utils}, {:atom, 52, :raise}},
[
{:cons, 52, {:integer, 52, 49},
{:cons, 52, {:integer, 52, 50}, {nil, 52}}}
]}
]},
{:integer, 53, 1}
], [], [], [{:op, 55, :-, {:integer, 55, 1}}]}
]}
]} = body_after
end
test "map" do
{tokens, ast} = load("Elixir.MapEx.beam", "map.ex")
[pattern_matching_str, pattern_matching, test_map_str, test_map, empty_map | _] =
AstSpecifier.run_mappers(ast, tokens) |> Enum.reverse()
assert {:function, 2, :empty_map, 0, [{:clause, 2, [], [], [{:map, 3, []}]}]} = empty_map
assert {:function, 6, :test_map, 0,
[
{:clause, 6, [], [],
[
{:map, 7,
[
{:map_field_assoc, 7, {:atom, 7, :a}, {:integer, 7, 12}},
{:map_field_assoc, 7, {:atom, 7, :b}, {:call, 7, {:atom, 7, :empty_map}, []}}
]}
]}
]} = test_map
assert {:function, 10, :test_map_str, 0,
[
{:clause, 10, [], [],
[
{:map, 11,
[
{:map_field_assoc, 11,
{:bin, 11, [{:bin_element, 11, {:string, 11, 'a'}, :default, :default}]},
{:integer, 11, 12}},
{:map_field_assoc, 11,
{:bin, 11, [{:bin_element, 11, {:string, 11, 'b'}, :default, :default}]},
{:integer, 11, 0}}
]}
]}
]} = test_map_str
assert {:function, 14, :pattern_matching, 0,
[
{:clause, 14, [], [],
[
{:match, 15,
{:map, 15, [{:map_field_exact, 15, {:atom, 15, :a}, {:var, 15, :_a@1}}]},
{:call, 15, {:atom, 15, :test_map}, []}},
{:match, 16,
{:map, 16, [{:map_field_exact, 16, {:atom, 16, :b}, {:var, 16, :_a@1}}]},
{:call, 16, {:atom, 16, :test_map}, []}}
]}
]} = pattern_matching
assert {:function, 19, :pattern_matching_str, 0,
[
{:clause, 19, [], [],
[
{:match, 20,
{:map, 20,
[
{:map_field_exact, 20,
{:bin, 20, [{:bin_element, 20, {:string, 20, 'a'}, :default, :default}]},
{:var, 20, :_a@1}}
]}, {:call, 20, {:atom, 20, :test_map}, []}}
]}
]} = pattern_matching_str
end
test "struct" do
{tokens, ast} = load("struct/Elixir.StructEx.beam", "struct/struct.ex")
[get2, get, update, empty, struct | _] =
AstSpecifier.run_mappers(ast, tokens) |> Enum.reverse()
assert {:function, 8, :update, 0,
[
{:clause, 8, [], [],
[
{:map, 9, {:call, 9, {:atom, 9, :empty}, []},
[{:map_field_exact, 9, {:atom, 9, :x}, {:integer, 9, 13}}]}
]}
]} = update
assert {:function, 16, :get2, 0,
[
{:clause, 16, [], [],
[
{:match, 17, {:var, 17, :_x@1},
{:case, [generated: true, location: 17], {:call, 17, {:atom, 17, :update}, []},
[
{:clause, [generated: true, location: 17],
[
{:map, 17,
[
{:map_field_exact, 17, {:atom, [generated: true, location: 17], :x},
{:var, [generated: true, location: 17], :_@1}}
]}
], [], [{:var, [generated: true, location: 17], :_@1}]},
{:clause, [generated: true, location: 17],
[{:var, [generated: true, location: 17], :_@1}],
[
[
{:call, [generated: true, location: 17],
{:remote, [generated: true, location: 17],
{:atom, [generated: true, location: 17], :erlang},
{:atom, [generated: true, location: 17], :is_map}},
[{:var, [generated: true, location: 17], :_@1}]}
]
],
[
{:call, 17, {:remote, 17, {:atom, 17, :erlang}, {:atom, 17, :error}},
[
{:tuple, 17,
[
{:atom, 17, :badkey},
{:atom, 17, :x},
{:var, [generated: true, location: 17], :_@1}
]}
]}
]},
{:clause, [generated: true, location: 17],
[{:var, [generated: true, location: 17], :_@1}], [],
[
{:call, [generated: true, location: 17],
{:remote, [generated: true, location: 17],
{:var, [generated: true, location: 17], :_@1}, {:atom, 17, :x}}, []}
]}
]}}
]}
]} = get2
assert {:function, 12, :get, 0,
[
{:clause, 12, [], [],
[
{:match, 13,
{:map, 13,
[
{:map_field_exact, 13, {:atom, 13, :__struct__}, {:atom, 13, StructEx}},
{:map_field_exact, 13, {:atom, 13, :x}, {:var, 13, :_x@1}}
]}, {:call, 13, {:atom, 13, :update}, []}}
]}
]} = get
assert {:function, 4, :empty, 0,
[
{:clause, 4, [], [],
[
{:map, 5,
[
{:map_field_assoc, 5, {:atom, 5, :__struct__}, {:atom, 5, StructEx}},
{:map_field_assoc, 5, {:atom, 5, :x}, {:integer, 5, 0}},
{:map_field_assoc, 5, {:atom, 5, :y}, {:integer, 5, 0}}
]}
]}
]} = empty
assert {:function, 2, :__struct__, 1,
[
{:clause, 2, [{:var, 2, :_@1}], [],
[
{:call, 2, {:remote, 2, {:atom, 2, Enum}, {:atom, 2, :reduce}},
[
{:var, 2, :_@1},
{:map, 2,
[
{:map_field_assoc, 2, {:atom, 2, :__struct__}, {:atom, 2, StructEx}},
{:map_field_assoc, 2, {:atom, 2, :x}, {:integer, 2, 0}},
{:map_field_assoc, 2, {:atom, 2, :y}, {:integer, 2, 0}}
]},
{:fun, 2,
{:clauses,
[
{:clause, 2,
[{:tuple, 2, [{:var, 2, :_@2}, {:var, 2, :_@3}]}, {:var, 2, :_@4}], [],
[
{:call, 2, {:remote, 2, {:atom, 2, :maps}, {:atom, 2, :update}},
[{:var, 2, :_@2}, {:var, 2, :_@3}, {:var, 2, :_@4}]}
]}
]}}
]}
]}
]} = struct
end
test "record" do
{tokens, ast} = load("record/Elixir.RecordEx.beam", "record/record.ex")
[update, init, empty, macro3, macro2, macro1 | _] =
AstSpecifier.run_mappers(ast, tokens) |> Enum.reverse()
assert {:function, 7, :empty, 0,
[
{:clause, 7, [], [],
[{:tuple, 8, [{:atom, 8, :record_ex}, {:integer, 8, 0}, {:integer, 8, 0}]}]}
]} = empty
assert {:function, 11, :init, 0,
[
{:clause, 11, [], [],
[{:tuple, 12, [{:atom, 12, :record_ex}, {:integer, 12, 1}, {:integer, 12, 0}]}]}
]} = init
elixir_env_arg = if System.version() >= "1.13", do: :to_caller, else: :linify
assert {:function, 5, :"MACRO-record_ex", 1,
[
{:clause, 5, [{:var, 5, :_@CALLER}], [],
[
{:match, 5, {:var, 5, :__CALLER__},
{:call, 5, {:remote, 5, {:atom, 5, :elixir_env}, {:atom, 5, ^elixir_env_arg}},
[{:var, 5, :_@CALLER}]}},
{:call, 5, {:atom, 5, :"MACRO-record_ex"}, [{:var, 5, :__CALLER__}, {nil, 5}]}
]}
]} = macro1
assert {:function, 5, :"MACRO-record_ex", 2,
[
{:clause, 5, [{:var, 5, :_@CALLER}, {:var, 5, :_@1}], [],
[
{:match, 5, {:var, 5, :__CALLER__},
{:call, 5, {:remote, 5, {:atom, 5, :elixir_env}, {:atom, 5, ^elixir_env_arg}},
[{:var, 5, :_@CALLER}]}},
{:call, 5, {:remote, 5, {:atom, 5, Record}, {:atom, 5, :__access__}},
[
{:atom, 5, :record_ex},
{:cons, 5, {:tuple, 5, [{:atom, 5, :x}, {:integer, 5, 0}]},
{:cons, 5, {:tuple, 5, [{:atom, 5, :y}, {:integer, 5, 0}]}, {nil, 5}}},
{:var, 5, :_@1},
{:var, 5, :__CALLER__}
]}
]}
]} = macro2
assert {:function, 5, :"MACRO-record_ex", 3,
[
{:clause, 5, [{:var, 5, :_@CALLER}, {:var, 5, :_@1}, {:var, 5, :_@2}], [],
[
{:match, 5, {:var, 5, :__CALLER__},
{:call, 5, {:remote, 5, {:atom, 5, :elixir_env}, {:atom, 5, ^elixir_env_arg}},
[{:var, 5, :_@CALLER}]}},
{:call, 5, {:remote, 5, {:atom, 5, Record}, {:atom, 5, :__access__}},
[
{:atom, 5, :record_ex},
{:cons, 5, {:tuple, 5, [{:atom, 5, :x}, {:integer, 5, 0}]},
{:cons, 5, {:tuple, 5, [{:atom, 5, :y}, {:integer, 5, 0}]}, {nil, 5}}},
{:var, 5, :_@1},
{:var, 5, :_@2},
{:var, 5, :__CALLER__}
]}
]}
]} = macro3
assert {:function, 16, :update, 1,
[
{:clause, 16, [{:var, 16, :_record@1}], [],
[
{:call, 17, {:remote, 17, {:atom, 17, :erlang}, {:atom, 17, :setelement}},
[
{:integer, 17, 2},
{:call, 17, {:remote, 17, {:atom, 17, :erlang}, {:atom, 17, :setelement}},
[{:integer, 17, 3}, {:var, 17, :_record@1}, {:integer, 17, 3}]},
{:integer, 17, 2}
]}
]}
]} = update
end
test "receive" do
{tokens, ast} = load("Elixir.Receive.beam", "receive.ex")
[recv, recv2 | _] = AstSpecifier.run_mappers(ast, tokens) |> Enum.reverse()
assert {:function, 2, :recv2, 0,
[
{:clause, 2, [], [],
[
{:call, 3, {:remote, 3, {:atom, 3, :erlang}, {:atom, 3, :send}},
[
{:call, 3, {:remote, 3, {:atom, 3, :erlang}, {:atom, 3, :self}}, []},
{:tuple, 3,
[
{:atom, 3, :hello},
{:bin, 3, [{:bin_element, 3, {:string, 3, 'All'}, :default, :default}]}
]}
]},
{:receive, 5,
[
{:clause, 6, [{:tuple, 6, [{:atom, 6, :hello}, {:var, 6, :_to@1}]}], [],
[
{:call, 7, {:remote, 7, {:atom, 7, IO}, {:atom, 7, :puts}},
[
{:bin, 7,
[
{:bin_element, 7, {:string, 7, 'Hello, '}, :default, :default},
{:bin_element, 7, {:var, 7, :_to@1}, :default, [:binary]}
]}
]}
]},
{:clause, 9, [{:atom, 9, :skip}], [], [{:atom, 10, :ok}]}
], {:integer, 12, 1000},
[
{:call, 13, {:remote, 13, {:atom, 13, IO}, {:atom, 13, :puts}},
[
{:bin, 13,
[{:bin_element, 13, {:string, 13, 'Timeout'}, :default, :default}]}
]}
]}
]}
]} = recv2
assert {:function, 17, :recv, 0,
[
{:clause, 17, [], [],
[{:receive, 18, [{:clause, 19, [{:atom, 19, :ok}], [], [{:atom, 19, :ok}]}]}]}
]} = recv
end
test "typespec when" do
{tokens, ast} = load("/Elixir.TypespecWhen.beam", "/typespec_when.ex")
[spec | _] =
AstSpecifier.run_mappers(ast, tokens)
|> filter_attributes(:spec)
|> Enum.reverse()
assert {:attribute, 2, :spec,
{{:foo, 1},
[
{:type, 2, :bounded_fun,
[
{:type, 2, :fun,
[
{:type, 2, :product, [{:type, 2, :tuple, [{:atom, 2, :a}, {:var, 2, :x}]}]},
{:type, 2, :union,
[
{:type, 2, :tuple, [{:atom, 2, :a}, {:var, 2, :x}]},
{:type, 2, :tuple, [{:atom, 2, :b}, {:var, 2, :x}]}
]}
]},
[
{:type, 2, :constraint,
[{:atom, 2, :is_subtype}, [{:var, 2, :x}, {:type, 2, :term, []}]]}
]
]}
]}} = spec
end
test "typespec behavior" do
{tokens, ast} = load("/Elixir.TypespecBeh.beam", "/typespec_beh.ex")
[callback1, callback2 | _] =
AstSpecifier.run_mappers(ast, tokens)
|> filter_attributes(:callback)
|> Enum.reverse()
assert {:attribute, 4, :callback,
{{:"MACRO-non_vital_macro", 2},
[
{:type, 4, :fun,
[
{:type, 4, :product,
[
{:type, 4, :term, []},
{:ann_type, 4, [{:var, 4, :arg}, {:type, 4, :any, []}]}
]},
{:remote_type, 4, [{:atom, 4, Macro}, {:atom, 4, :t}, []]}
]}
]}} = callback1
assert {:attribute, 3, :callback,
{{:non_vital_fun, 0},
[
{:type, 3, :bounded_fun,
[
{:type, 3, :fun, [{:type, 3, :product, []}, {:var, 3, :a}]},
[
{:type, 3, :constraint,
[
{:atom, 3, :is_subtype},
[
{:var, 3, :a},
{:type, 3, :tuple, [{:type, 3, :integer, []}, {:type, 3, :atom, []}]}
]
]}
]
]}
]}} = callback2
end
test "typespec" do
{tokens, ast} = load("Elixir.Typespec.beam", "typespec.ex")
result =
AstSpecifier.run_mappers(ast, tokens)
|> filter_attributes(:spec)
|> make_spec_map()
assert {:attribute, 6, :spec,
{{:spec_remote_type, 0},
[
{:type, 6, :fun,
[
{:type, 6, :product, []},
{:remote_type, 6, [{:atom, 6, Unknown}, {:atom, 6, :atom}, []]}
]}
]}} = result.spec_remote_type
assert {:attribute, 9, :spec,
{{:spec_user_type, 0},
[
{:type, 9, :fun,
[
{:type, 9, :product, []},
{:user_type, 9, :mylist,
[{:type, 9, :union, [{:atom, 9, :ok}, {:type, 9, :atom, []}]}]}
]}
]}} = result.spec_user_type
assert {:attribute, 12, :spec,
{{:spec_map_and_named_type, 1},
[
{:type, 12, :fun,
[
{:type, 12, :product,
[
{:ann_type, 12,
[
{:var, 12, :type},
{:remote_type, 12, [{:atom, 12, Unknown}, {:atom, 12, :atom}, []]}
]}
]},
{:type, 12, :map,
[
{:type, 13, :map_field_assoc,
[{:atom, 13, :value}, {:type, 13, :integer, []}]},
{:type, 14, :map_field_exact,
[
{:atom, 14, :type},
{:remote_type, 14, [{:atom, 14, Unknown}, {:atom, 14, :atom}, []]}
]}
]}
]}
]}} = result.spec_map_and_named_type
assert {:attribute, 18, :spec,
{{:spec_atom, 1},
[
{:type, 18, :fun,
[
{:type, 18, :product,
[
{:type, 18, :union,
[{:atom, 18, :ok}, {:atom, 18, nil}, {:atom, 18, true}, {:atom, 18, false}]}
]},
{:remote_type, 18,
[
{:atom, 18, Unknown},
{:atom, 18, :atom},
[
{:type, 18, :union,
[
{:atom, 18, :ok},
{:atom, 18, nil},
{:atom, 18, true},
{:atom, 18, false}
]}
]
]}
]}
]}} = result.spec_atom
assert {:attribute, 21, :spec,
{{:spec_function, 0},
[
{:type, 21, :fun,
[
{:type, 21, :product, []},
{:type, 21, :fun,
[
{:type, 21, :product,
[
{:type, 21, :atom, []},
{:type, 21, :map,
[
{:type, 21, :map_field_exact,
[
{:atom, 21, :name},
{:remote_type, 21, [{:atom, 21, String}, {:atom, 21, :t}, []]}
]}
]}
]},
{:type, 21, :map, :any}
]}
]}
]}} = result.spec_function
assert {:attribute, 24, :spec,
{{:spec_struct, 1},
[
{:type, 24, :fun,
[
{:type, 24, :product,
[
{:type, 24, :map,
[
{:type, 24, :map_field_exact,
[{:atom, 24, :__struct__}, {:atom, 24, Typespec}]},
{:type, 24, :map_field_exact,
[{:atom, 24, :age}, {:type, 24, :term, []}]},
{:type, 24, :map_field_exact,
[{:atom, 24, :name}, {:type, 24, :term, []}]}
]}
]},
{:type, 24, :map,
[
{:type, 24, :map_field_exact,
[{:atom, 24, :__struct__}, {:atom, 24, Typespec}]},
{:type, 24, :map_field_exact, [{:atom, 24, :age}, {:type, 24, :term, []}]},
{:type, 24, :map_field_exact, [{:atom, 24, :name}, {:type, 24, :term, []}]}
]}
]}
]}} = result.spec_struct
assert {:attribute, 27, :spec,
{{:spec_list, 1},
[
{:type, 27, :fun,
[
{:type, 27, :product,
[{:type, 27, :nonempty_list, [{:type, 27, :integer, []}]}]},
{:type, 27, :nonempty_list, []}
]}
]}} = result.spec_list
assert {:attribute, 30, :spec,
{{:spec_range, 1},
[
{:type, 30, :fun,
[
{:type, 30, :product,
[{:type, 30, :range, [{:integer, 30, 1}, {:integer, 30, 10}]}]},
{:type, 30, :list,
[{:type, 30, :range, [{:integer, 30, 1}, {:integer, 30, 10}]}]}
]}
]}} = result.spec_range
assert {:attribute, 33, :spec,
{{:spec_keyword, 1},
[
{:type, 33, :fun,
[
{:type, 33, :product,
[
{:type, 33, :list,
[
{:type, 33, :union,
[
{:type, 33, :tuple, [{:atom, 33, :a}, {:type, 33, :integer, []}]},
{:type, 33, :tuple, [{:atom, 33, :b}, {:type, 33, :integer, []}]}
]}
]}
]},
{:type, 33, :integer, []}
]}
]}} = result.spec_keyword
assert {:attribute, 36, :spec,
{{:spec_tuple, 1},
[
{:type, 36, :fun,
[
{:type, 36, :product,
[{:type, 36, :tuple, [{:atom, 36, :ok}, {:type, 36, :integer, []}]}]},
{:type, 36, :tuple, :any}
]}
]}} = result.spec_tuple
assert {:attribute, 39, :spec,
{{:spec_bitstring, 1},
[
{:type, 39, :fun,
[
{:type, 39, :product,
[{:type, 39, :binary, [{:integer, 39, 48}, {:integer, 39, 8}]}]},
{:type, 39, :binary, [{:integer, 39, 0}, {:integer, 39, 0}]}
]}
]}} = result.spec_bitstring
end
test "clauses without a line" do
forms = [
{:function, 8, :__impl__, 1,
[
{:clause, [generated: true, location: 0],
[{:atom, [generated: true, location: 0], :for}], [],
[{:atom, [generated: true, location: 0], TypedSchemaTest}]}
]}
]
assert [_] = AstSpecifier.run_mappers(forms, [])
end
test "nested modules" do
{tokensA, astA} = load("Elixir.NestedModules.ModuleA.beam", "nested_modules.ex")
{tokensB, astB} = load("Elixir.NestedModules.ModuleB.beam", "nested_modules.ex")
{tokens, ast} = load("Elixir.NestedModules.beam", "nested_modules.ex")
assert {:function, 3, :name, 0, [{:clause, 3, [], [], [{:atom, 4, :module_a}]}]} =
List.last(AstSpecifier.run_mappers(astA, tokensA))
assert {:function, 9, :name, 0, [{:clause, 9, [], [], [{:atom, 10, :module_b}]}]} =
List.last(AstSpecifier.run_mappers(astB, tokensB))
assert {:function, 14, :name, 0, [{:clause, 14, [], [], [{:atom, 15, :module}]}]} =
List.last(AstSpecifier.run_mappers(ast, tokens))
end
# Helpers
def filter_attributes(ast, type) do
Enum.filter(ast, &match?({:attribute, _, ^type, _}, &1))
end
def make_spec_map(specs) do
specs
|> Enum.map(fn {:attribute, _, _, {{name, _arity}, _}} = attr -> {name, attr} end)
|> Enum.into(%{})
end
end
| 39.08209 | 99 | 0.331583 |
9ebf5ab10395ee8a8e2db358f733f2c9656a688d | 626 | exs | Elixir | test/dgraph_ex/core/alter_test.exs | sunny-g/dgraph_ex | fab9f1fd684538758ed211537c3b0869233e7ce4 | [
"MIT"
] | 1 | 2019-05-01T15:18:41.000Z | 2019-05-01T15:18:41.000Z | test/dgraph_ex/core/alter_test.exs | sunny-g/dgraph_ex | fab9f1fd684538758ed211537c3b0869233e7ce4 | [
"MIT"
] | null | null | null | test/dgraph_ex/core/alter_test.exs | sunny-g/dgraph_ex | fab9f1fd684538758ed211537c3b0869233e7ce4 | [
"MIT"
] | 1 | 2019-05-01T15:18:48.000Z | 2019-05-01T15:18:48.000Z | defmodule DgraphEx.Core.AlterTest do
use ExUnit.Case, async: true
doctest DgraphEx.Core.Alter
alias DgraphEx.Core.{Alter, Field}
test "render/1 renders an alter struct correctly" do
one = %Field{
index: true,
subject: "123",
predicate: "loves",
object: "cooking",
type: :string
}
two = %Field{
index: true,
subject: "123",
predicate: "hates",
object: "mean birds",
type: :string
}
assert [one, two]
|> Alter.new()
|> Alter.render() == "loves: string @index(string) .\nhates: string @index(string) .\n"
end
end
| 21.586207 | 98 | 0.57508 |
9ebf5bd37996e9d45cb6845671efe1e8dc049df9 | 1,935 | ex | Elixir | clients/games/lib/google_api/games/v1/model/player_level.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/games/lib/google_api/games/v1/model/player_level.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/games/lib/google_api/games/v1/model/player_level.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Games.V1.Model.PlayerLevel do
@moduledoc """
1P/3P metadata about a user's level.
## Attributes
* `kind` (*type:* `String.t`, *default:* `nil`) - Uniquely identifies the type of this resource. Value is always the fixed string `games#playerLevel`.
* `level` (*type:* `integer()`, *default:* `nil`) - The level for the user.
* `maxExperiencePoints` (*type:* `String.t`, *default:* `nil`) - The maximum experience points for this level.
* `minExperiencePoints` (*type:* `String.t`, *default:* `nil`) - The minimum experience points for this level.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:kind => String.t(),
:level => integer(),
:maxExperiencePoints => String.t(),
:minExperiencePoints => String.t()
}
field(:kind)
field(:level)
field(:maxExperiencePoints)
field(:minExperiencePoints)
end
defimpl Poison.Decoder, for: GoogleApi.Games.V1.Model.PlayerLevel do
def decode(value, options) do
GoogleApi.Games.V1.Model.PlayerLevel.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Games.V1.Model.PlayerLevel do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.553571 | 154 | 0.704393 |
9ebf6658627e080615d326bcf45fa9455144366b | 964 | ex | Elixir | Next_Level_Week_4/rocketpay/lib/rocketpay/users/create.ex | amaziero/next-level-weeek-1 | 2269252f6a7294a82a28aade4404770fb3c438e8 | [
"MIT"
] | null | null | null | Next_Level_Week_4/rocketpay/lib/rocketpay/users/create.ex | amaziero/next-level-weeek-1 | 2269252f6a7294a82a28aade4404770fb3c438e8 | [
"MIT"
] | 2 | 2020-07-19T20:12:50.000Z | 2021-02-23T00:47:46.000Z | Next_Level_Week_4/rocketpay/lib/rocketpay/users/create.ex | amaziero/next-level-weeek-1 | 2269252f6a7294a82a28aade4404770fb3c438e8 | [
"MIT"
] | null | null | null | defmodule Rocketpay.Users.Create do
alias Ecto.Multi
alias Rocketpay.{Account, Repo, User}
def call(params) do
Multi.new()
|> Multi.insert(:create_user, User.changeset(params))
|> Multi.run(:create_account, fn repo, %{create_user: user} -> insert_account(repo, user) end)
|> Multi.run(:preload_data, fn repo, %{create_user: user} -> preload_data(repo, user) end)
|> run_transaction()
end
defp insert_account(repo, user) do
user.id
|> account_changeset()
|> repo.insert()
end
defp account_changeset(user_id) do
params = %{user_id: user_id, balance: "0.00"}
Account.changeset(params)
end
defp run_transaction(multi) do
case Repo.transaction(multi) do
{:error, _operation, reason, _changes} -> {:error, reason}
{:ok, %{preload_data: user}} -> {:ok, user}
end
end
defp preload_data(repo, user) do
{:ok, repo.preload(user, :account)}
end
end
| 27.542857 | 99 | 0.637967 |
9ebf668e00a5780bec09ff7c3ce54ba60fe15ed5 | 532 | ex | Elixir | lib/planga_web/views/conversation_view.ex | ResiliaDev/Planga | b21d290dd7c2c7fa30571d0a5124d63bd09c0c9e | [
"MIT"
] | 37 | 2018-07-13T14:08:16.000Z | 2021-04-09T15:00:22.000Z | lib/planga_web/views/conversation_view.ex | ResiliaDev/Planga | b21d290dd7c2c7fa30571d0a5124d63bd09c0c9e | [
"MIT"
] | 9 | 2018-07-16T15:24:39.000Z | 2021-09-01T14:21:20.000Z | lib/planga_web/views/conversation_view.ex | ResiliaDev/Planga | b21d290dd7c2c7fa30571d0a5124d63bd09c0c9e | [
"MIT"
] | 3 | 2018-10-05T20:19:25.000Z | 2019-12-05T00:30:01.000Z | defmodule PlangaWeb.ConversationView do
use PlangaWeb, :view
alias PlangaWeb.ConversationView
def render("index.json", %{conversations: conversations}) do
%{data: render_many(conversations, ConversationView, "conversation.json")}
end
def render("show.json", %{conversation: conversation}) do
%{data: render_one(conversation, ConversationView, "conversation.json")}
end
def render("conversation.json", %{conversation: conversation}) do
%{id: conversation.id, remote_id: conversation.remote_id}
end
end
| 31.294118 | 78 | 0.74812 |
9ebf85140728419a2c9e37a3cd5d15b161277bf8 | 110 | ex | Elixir | lib/jeopardy/fsm/final_jeopardy/game_over.ex | ryoung786/jeopardy | 5558fc49013c5a22e556a0040cbc116aa8f63912 | [
"MIT"
] | null | null | null | lib/jeopardy/fsm/final_jeopardy/game_over.ex | ryoung786/jeopardy | 5558fc49013c5a22e556a0040cbc116aa8f63912 | [
"MIT"
] | 32 | 2020-06-08T14:50:46.000Z | 2021-01-01T05:40:44.000Z | lib/jeopardy/fsm/final_jeopardy/game_over.ex | ryoung786/jeopardy | 5558fc49013c5a22e556a0040cbc116aa8f63912 | [
"MIT"
] | null | null | null | defmodule Jeopardy.FSM.FinalJeopardy.GameOver do
use Jeopardy.FSM
def handle(_, _, state), do: state
end
| 18.333333 | 48 | 0.754545 |
9ebf8bf137fc0fa402fe7ec56573b76f12f97ce9 | 64,570 | exs | Elixir | lib/elixir/test/elixir/kernel/expansion_test.exs | andrewtimberlake/elixir | a1c4ffc897f9407fe7e739e20e697805fbbff810 | [
"Apache-2.0"
] | 1 | 2019-10-11T01:36:26.000Z | 2019-10-11T01:36:26.000Z | lib/elixir/test/elixir/kernel/expansion_test.exs | andrewtimberlake/elixir | a1c4ffc897f9407fe7e739e20e697805fbbff810 | [
"Apache-2.0"
] | 1 | 2015-06-09T15:52:43.000Z | 2015-06-09T15:52:43.000Z | lib/elixir/test/elixir/kernel/expansion_test.exs | andrewtimberlake/elixir | a1c4ffc897f9407fe7e739e20e697805fbbff810 | [
"Apache-2.0"
] | null | null | null | Code.require_file("../test_helper.exs", __DIR__)
defmodule Kernel.ExpansionTarget do
defmacro seventeen, do: 17
defmacro bar, do: "bar"
end
defmodule Kernel.ExpansionTest do
use ExUnit.Case, async: false
describe "__block__" do
test "expands to nil when empty" do
assert expand(quote(do: unquote(:__block__)())) == nil
end
test "expands to argument when arity is 1" do
assert expand(quote(do: unquote(:__block__)(1))) == 1
end
test "is recursive to argument when arity is 1" do
expanded =
quote do
_ = 1
2
end
assert expand(quote(do: unquote(:__block__)(_ = 1, unquote(:__block__)(2)))) == expanded
end
test "accumulates vars" do
before_expansion =
quote do
a = 1
a
end
after_expansion =
quote do
a = 1
a
end
assert expand(before_expansion) == after_expansion
end
end
describe "alias" do
test "expand args, defines alias and returns itself" do
alias true, as: True
input = quote(do: alias(:hello, as: World, warn: True))
{output, env} = expand_env(input, __ENV__)
assert output == :hello
assert env.aliases == [{:"Elixir.True", true}, {:"Elixir.World", :hello}]
end
test "invalid alias" do
message =
~r"invalid value for option :as, expected a simple alias, got nested alias: Sample.Lists"
assert_raise CompileError, message, fn ->
expand(quote(do: alias(:lists, as: Sample.Lists)))
end
message = ~r"invalid argument for alias, expected a compile time atom or alias, got: 1 \+ 2"
assert_raise CompileError, message, fn ->
expand(quote(do: alias(1 + 2)))
end
message = ~r"invalid value for option :as, expected an alias, got: :foobar"
assert_raise CompileError, message, fn ->
expand(quote(do: alias(:lists, as: :foobar)))
end
message = ~r"invalid value for option :as, expected an alias, got: :\"Elixir.foobar\""
assert_raise CompileError, message, fn ->
expand(quote(do: alias(:lists, as: :"Elixir.foobar")))
end
end
test "invalid expansion" do
assert_raise CompileError, ~r"invalid alias: \"foo\.Foo\"", fn ->
code =
quote do
foo = :foo
foo.Foo
end
expand(code)
end
end
test "raises if :as is passed to multi-alias aliases" do
assert_raise CompileError, ~r":as option is not supported by multi-alias call", fn ->
expand(quote(do: alias(Foo.{Bar, Baz}, as: BarBaz)))
end
end
test "invalid options" do
assert_raise CompileError, ~r"unsupported option :ops given to alias", fn ->
expand(quote(do: alias(Foo, ops: 1)))
end
end
end
describe "__aliases__" do
test "expands even if no alias" do
assert expand(quote(do: World)) == :"Elixir.World"
assert expand(quote(do: Elixir.World)) == :"Elixir.World"
end
test "expands with alias" do
alias Hello, as: World
assert expand_env(quote(do: World), __ENV__) |> elem(0) == :"Elixir.Hello"
end
test "expands with alias is recursive" do
alias Source, as: Hello
alias Hello, as: World
assert expand_env(quote(do: World), __ENV__) |> elem(0) == :"Elixir.Source"
end
end
describe "import" do
test "raises on invalid macro" do
message = ~r"cannot import Kernel.invalid/1 because it is undefined or private"
assert_raise CompileError, message, fn ->
expand(quote(do: import(Kernel, only: [invalid: 1])))
end
end
test "raises on invalid options" do
message = ~r"invalid :only option for import, expected a keyword list with integer values"
assert_raise CompileError, message, fn ->
expand(quote(do: import(Kernel, only: [invalid: nil])))
end
message = ~r"invalid :except option for import, expected a keyword list with integer values"
assert_raise CompileError, message, fn ->
expand(quote(do: import(Kernel, except: [invalid: nil])))
end
message = ~r/invalid options for import, expected a keyword list, got: "invalid_options"/
assert_raise CompileError, message, fn ->
expand(quote(do: import(Kernel, "invalid_options")))
end
end
test "raises on conflicting options" do
message =
~r":only and :except can only be given together to import when :only is either :functions or :macros"
assert_raise CompileError, message, fn ->
expand(quote(do: import(Kernel, only: [], except: [])))
end
end
test "invalid import option" do
assert_raise CompileError, ~r"unsupported option :ops given to import", fn ->
expand(quote(do: import(:lists, ops: 1)))
end
end
test "raises for non-compile-time module" do
assert_raise CompileError, ~r"invalid argument for import, .*, got: {:a, :tuple}", fn ->
expand(quote(do: import({:a, :tuple})))
end
end
end
describe "require" do
test "raises for non-compile-time module" do
assert_raise CompileError, ~r"invalid argument for require, .*, got: {:a, :tuple}", fn ->
expand(quote(do: require({:a, :tuple})))
end
end
test "invalid options" do
assert_raise CompileError, ~r"unsupported option :ops given to require", fn ->
expand(quote(do: require(Foo, ops: 1)))
end
end
end
describe "=" do
test "sets context to match" do
assert expand(quote(do: __ENV__.context = :match)) == quote(do: :match = :match)
end
test "defines vars" do
{output, env} = expand_env(quote(do: a = 1), __ENV__)
assert output == quote(do: a = 1)
assert Macro.Env.has_var?(env, {:a, __MODULE__})
end
test "does not define _" do
{output, env} = expand_env(quote(do: _ = 1), __ENV__)
assert output == quote(do: _ = 1)
assert Macro.Env.vars(env) == []
end
end
describe "environment macros" do
test "__MODULE__" do
assert expand(quote(do: __MODULE__)) == __MODULE__
end
test "__DIR__" do
assert expand(quote(do: __DIR__)) == __DIR__
end
test "__ENV__" do
env = %{__ENV__ | line: 0}
assert expand_env(quote(do: __ENV__), env) == {Macro.escape(env), env}
end
test "__ENV__.accessor" do
env = %{__ENV__ | line: 0}
assert expand_env(quote(do: __ENV__.file), env) == {__ENV__.file, env}
assert expand_env(quote(do: __ENV__.unknown), env) ==
{quote(do: unquote(Macro.escape(env)).unknown), env}
end
end
describe "vars" do
test "expand to local call" do
{output, env} = expand_env(quote(do: a), __ENV__)
assert output == quote(do: a())
assert Macro.Env.vars(env) == []
end
test "forces variable to exist" do
code =
quote do
var!(a) = 1
var!(a)
end
assert expand(code)
message = ~r"expected \"a\" to expand to an existing variable or be part of a match"
assert_raise CompileError, message, fn ->
expand(quote(do: var!(a)))
end
message =
~r"expected \"a\" \(context Unknown\) to expand to an existing variable or be part of a match"
assert_raise CompileError, message, fn ->
expand(quote(do: var!(a, Unknown)))
end
end
test "raises for _ used outside of a match" do
assert_raise CompileError, ~r"invalid use of _", fn ->
expand(quote(do: {1, 2, _}))
end
end
end
describe "^" do
test "expands args" do
before_expansion =
quote do
after_expansion = 1
^after_expansion = 1
end
after_expansion =
quote do
after_expansion = 1
^after_expansion = 1
end
assert expand(before_expansion) == after_expansion
end
test "raises outside match" do
assert_raise CompileError, ~r"cannot use \^a outside of match clauses", fn ->
expand(quote(do: ^a))
end
end
test "raises without var" do
message =
~r"invalid argument for unary operator \^, expected an existing variable, got: \^1"
assert_raise CompileError, message, fn ->
expand(quote(do: ^1 = 1))
end
end
test "raises when the var is undefined" do
assert_raise CompileError, ~r"undefined variable \^foo", fn ->
expand(quote(do: ^foo = :foo))
end
end
end
describe "locals" do
test "expands to remote calls" do
assert {{:., _, [Kernel, :=~]}, _, [{:a, _, []}, {:b, _, []}]} = expand(quote(do: a =~ b))
end
test "in matches" do
message = ~r"cannot find or invoke local foo/1 inside match. .+ Called as: foo\(:bar\)"
assert_raise CompileError, message, fn ->
expand(quote(do: foo(:bar) = :bar))
end
end
test "in guards" do
code = quote(do: fn pid when :erlang.==(pid, self) -> pid end)
expanded_code = quote(do: fn pid when :erlang.==(pid, :erlang.self()) -> pid end)
assert clean_meta(expand(code), [:import, :context]) == expanded_code
message = ~r"cannot find or invoke local foo/1"
assert_raise CompileError, message, fn ->
expand(quote(do: fn arg when foo(arg) -> arg end))
end
end
test "custom imports" do
before_expansion =
quote do
import Kernel.ExpansionTarget
seventeen()
end
after_expansion =
quote do
:"Elixir.Kernel.ExpansionTarget"
17
end
assert expand(before_expansion) == after_expansion
end
end
describe "tuples" do
test "expanded as arguments" do
assert expand(quote(do: {after_expansion = 1, a})) == quote(do: {after_expansion = 1, a()})
assert expand(quote(do: {b, after_expansion = 1, a})) ==
quote(do: {b(), after_expansion = 1, a()})
end
end
describe "maps" do
test "expanded as arguments" do
assert expand(quote(do: %{a: after_expansion = 1, b: a})) ==
quote(do: %{a: after_expansion = 1, b: a()})
end
test "with variables on keys" do
ast =
quote do
%{(x = 1) => 1}
end
assert expand(ast) == ast
ast =
quote do
x = 1
%{%{^x => 1} => 2} = y()
end
assert expand(ast) == ast
assert_raise CompileError,
~r"cannot use pin operator \^x inside a data structure as a map key in a pattern",
fn ->
expand(
quote do
x = 1
%{{^x} => 1} = %{}
end
)
end
assert_raise CompileError, ~r"cannot use variable x as map key inside a pattern", fn ->
expand(quote(do: %{x => 1} = %{}))
end
assert_raise CompileError, ~r"undefined variable \^x", fn ->
expand(quote(do: {x, %{^x => 1}} = %{}))
end
end
test "expects key-value pairs" do
assert_raise CompileError, ~r"expected key-value pairs in a map, got: :foo", fn ->
expand(quote(do: unquote({:%{}, [], [:foo]})))
end
end
end
defmodule User do
defstruct name: "", age: 0
end
describe "structs" do
test "expanded as arguments" do
assert expand(quote(do: %User{})) ==
quote(do: %:"Elixir.Kernel.ExpansionTest.User"{age: 0, name: ""})
assert expand(quote(do: %User{name: "john doe"})) ==
quote(do: %:"Elixir.Kernel.ExpansionTest.User"{age: 0, name: "john doe"})
end
test "expects atoms" do
expand(quote(do: %unknown{a: 1} = x))
message = ~r"expected struct name to be a compile time atom or alias"
assert_raise CompileError, message, fn ->
expand(quote(do: %unknown{a: 1}))
end
message = ~r"expected struct name to be a compile time atom or alias"
assert_raise CompileError, message, fn ->
expand(quote(do: %unquote(1){a: 1}))
end
message = ~r"expected struct name in a match to be a compile time atom, alias or a variable"
assert_raise CompileError, message, fn ->
expand(quote(do: %unquote(1){a: 1} = x))
end
end
test "update syntax" do
expand(quote(do: %{%{a: 0} | a: 1}))
assert_raise CompileError, ~r"cannot use map/struct update syntax in match", fn ->
expand(quote(do: %{%{a: 0} | a: 1} = %{}))
end
end
test "dynamic syntax expands to itself" do
assert expand(quote(do: %x{} = 1)) == quote(do: %x{} = 1)
end
test "unknown ^keys in structs" do
message = ~r"unknown key \^my_key for struct Kernel\.ExpansionTest\.User"
assert_raise CompileError, message, fn ->
code =
quote do
my_key = :my_key
%User{^my_key => :my_value} = %{}
end
expand(code)
end
end
end
describe "quote" do
test "expanded to raw forms" do
assert expand(quote(do: quote(do: hello))) == {:{}, [], [:hello, [], __MODULE__]}
end
test "raises if the :context option is nil or not a compile-time module" do
assert_raise CompileError, ~r"invalid :context for quote, .*, got: :erlang\.self\(\)", fn ->
expand(quote(do: quote(context: self(), do: :ok)))
end
assert_raise CompileError, ~r"invalid :context for quote, .*, got: nil", fn ->
expand(quote(do: quote(context: nil, do: :ok)))
end
end
test "raises for missing do" do
assert_raise CompileError, ~r"missing :do option in \"quote\"", fn ->
expand(quote(do: quote(context: Foo)))
end
end
test "raises for invalid arguments" do
assert_raise CompileError, ~r"invalid arguments for \"quote\"", fn ->
expand(quote(do: quote(1 + 1)))
end
end
test "raises unless its options are a keyword list" do
assert_raise CompileError, ~r"invalid options for quote, expected a keyword list", fn ->
expand(quote(do: quote(:foo, do: :foo)))
end
end
end
describe "anonymous calls" do
test "expands base and args" do
assert expand(quote(do: a.(b))) == quote(do: a().(b()))
end
test "raises on atom base" do
assert_raise CompileError, ~r"invalid function call :foo.()", fn ->
expand(quote(do: :foo.(a)))
end
end
end
describe "remotes" do
test "expands to Erlang" do
assert expand(quote(do: Kernel.is_atom(a))) == quote(do: :erlang.is_atom(a()))
end
test "expands macros" do
assert expand(quote(do: Kernel.ExpansionTest.thirteen())) == 13
end
test "expands receiver and args" do
assert expand(quote(do: a.is_atom(b))) == quote(do: a().is_atom(b()))
assert expand(quote(do: (after_expansion = :foo).is_atom(a))) ==
quote(do: (after_expansion = :foo).is_atom(a()))
end
test "modules must be required for macros" do
before_expansion =
quote do
require Kernel.ExpansionTarget
Kernel.ExpansionTarget.seventeen()
end
after_expansion =
quote do
:"Elixir.Kernel.ExpansionTarget"
17
end
assert expand(before_expansion) == after_expansion
end
test "raises when not required" do
msg =
~r"you must require Kernel\.ExpansionTarget before invoking the macro Kernel\.ExpansionTarget\.seventeen/0"
assert_raise CompileError, msg, fn ->
expand(quote(do: Kernel.ExpansionTarget.seventeen()))
end
end
test "in matches" do
message = ~r"cannot invoke remote function Hello.fun_that_does_not_exist/0 inside a match"
assert_raise CompileError, message, fn ->
expand(quote(do: Hello.fun_that_does_not_exist() = :foo))
end
message = ~r"cannot invoke remote function :erlang.make_ref/0 inside a match"
assert_raise CompileError, message, fn -> expand(quote(do: make_ref() = :foo)) end
message = ~r"invalid argument for \+\+ operator inside a match"
assert_raise CompileError, message, fn ->
expand(quote(do: "a" ++ "b" = "ab"))
end
assert_raise CompileError, message, fn ->
expand(quote(do: [1 | 2] ++ [3] = [1, 2, 3]))
end
assert_raise CompileError, message, fn ->
expand(quote(do: [1] ++ 2 ++ [3] = [1, 2, 3]))
end
assert {:=, _, [-1, {{:., [], [:erlang, :-]}, _, [1]}]} = expand(quote(do: -1 = -1))
assert {:=, _, [1, {{:., [], [:erlang, :+]}, _, [1]}]} = expand(quote(do: +1 = +1))
assert {:=, _, [[{:|, _, [1, [{:|, _, [2, 3]}]]}], [1, 2, 3]]} =
expand(quote(do: [1] ++ [2] ++ 3 = [1, 2, 3]))
end
test "in guards" do
message =
~r"cannot invoke remote function Hello.something_that_does_not_exist/1 inside guard"
assert_raise CompileError, message, fn ->
expand(quote(do: fn arg when Hello.something_that_does_not_exist(arg) -> arg end))
end
message = ~r"cannot invoke remote function :erlang.make_ref/0 inside guard"
assert_raise CompileError, message, fn ->
expand(quote(do: fn arg when make_ref() -> arg end))
end
end
test "in guards with bitstrings" do
message = ~r"cannot invoke remote function String.Chars.to_string/1 inside guards"
assert_raise CompileError, message, fn ->
expand(quote(do: fn arg when "#{arg}foo" == "argfoo" -> arg end))
end
assert_raise CompileError, message, fn ->
expand(
quote do
fn arg when <<:"Elixir.Kernel".to_string(arg)::binary, "foo">> == "argfoo" ->
arg
end
end
)
end
end
end
describe "comprehensions" do
test "variables do not leak with enums" do
before_expansion =
quote do
for(a <- b, do: c = 1)
c
end
after_expansion =
quote do
for(a <- b(), do: c = 1)
c()
end
assert expand(before_expansion) == after_expansion
end
test "variables do not leak with binaries" do
before_expansion =
quote do
for(<<a <- b>>, do: c = 1)
c
end
after_expansion =
quote do
for(<<(<<a::integer()>> <- b())>>, do: c = 1)
c()
end
assert expand(before_expansion) |> clean_meta([:alignment]) == after_expansion
end
test "variables inside filters are available in blocks" do
assert expand(quote(do: for(a <- b, c = a, do: c))) ==
quote(do: for(a <- b(), c = a, do: c))
end
test "variables inside options do not leak" do
before_expansion =
quote do
for(a <- c = b, into: [], do: 1)
c
end
after_expansion =
quote do
for(a <- c = b(), do: 1, into: [])
c()
end
assert expand(before_expansion) == after_expansion
before_expansion =
quote do
for(a <- b, into: c = [], do: 1)
c
end
after_expansion =
quote do
for(a <- b(), do: 1, into: c = [])
c()
end
assert expand(before_expansion) == after_expansion
end
test "must start with generators" do
assert_raise CompileError, ~r"for comprehensions must start with a generator", fn ->
expand(quote(do: for(is_atom(:foo), do: :foo)))
end
assert_raise CompileError, ~r"for comprehensions must start with a generator", fn ->
expand(quote(do: for(do: :foo)))
end
end
test "requires size on binary generators" do
message = ~r"a binary field without size is only allowed at the end of a binary pattern"
assert_raise CompileError, message, fn ->
expand(quote(do: for(<<x::binary <- "123">>, do: x)))
end
end
test "require do option" do
assert_raise CompileError, ~r"missing :do option in \"for\"", fn ->
expand(quote(do: for(_ <- 1..2)))
end
end
test "uniq option is boolean" do
message = ~r":uniq option for comprehensions only accepts a boolean, got: x"
assert_raise CompileError, message, fn ->
expand(quote(do: for(x <- 1..2, uniq: x, do: x)))
end
end
test "raise error on invalid reduce" do
assert_raise CompileError,
~r"cannot use :reduce alongside :into/:uniq in comprehension",
fn ->
expand(quote(do: for(x <- 1..3, reduce: %{}, into: %{}, do: (acc -> acc))))
end
assert_raise CompileError,
~r"the do block was written using acc -> expr clauses but the :reduce option was not given",
fn -> expand(quote(do: for(x <- 1..3, do: (acc -> acc)))) end
assert_raise CompileError,
~r"when using :reduce with comprehensions, the do block must be written using acc -> expr clauses",
fn -> expand(quote(do: for(x <- 1..3, reduce: %{}, do: x))) end
assert_raise CompileError,
~r"when using :reduce with comprehensions, the do block must be written using acc -> expr clauses",
fn -> expand(quote(do: for(x <- 1..3, reduce: %{}, do: (acc, x -> x)))) end
end
test "raise error for unknown options" do
assert_raise CompileError, ~r"unsupported option :else given to for", fn ->
expand(quote(do: for(_ <- 1..2, do: 1, else: 1)))
end
assert_raise CompileError, ~r"unsupported option :other given to for", fn ->
expand(quote(do: for(_ <- 1..2, do: 1, other: 1)))
end
end
end
describe "with" do
test "variables do not leak" do
before_expansion =
quote do
with({foo} <- {bar}, do: baz = :ok)
baz
end
after_expansion =
quote do
with({foo} <- {bar()}, do: baz = :ok)
baz()
end
assert expand(before_expansion) == after_expansion
end
test "variables are available in do option" do
before_expansion =
quote do
with({foo} <- {bar}, do: baz = foo)
baz
end
after_expansion =
quote do
with({foo} <- {bar()}, do: baz = foo)
baz()
end
assert expand(before_expansion) == after_expansion
end
test "variables inside else do not leak" do
before_expansion =
quote do
with({foo} <- {bar}, do: :ok, else: (baz -> baz))
baz
end
after_expansion =
quote do
with({foo} <- {bar()}, do: :ok, else: (baz -> baz))
baz()
end
assert expand(before_expansion) == after_expansion
end
test "fails if \"do\" is missing" do
assert_raise CompileError, ~r"missing :do option in \"with\"", fn ->
expand(quote(do: with(_ <- true, [])))
end
end
test "fails on invalid else option" do
assert_raise CompileError, ~r"expected -> clauses for :else in \"with\"", fn ->
expand(quote(do: with(_ <- true, do: :ok, else: [:error])))
end
assert_raise CompileError, ~r"expected -> clauses for :else in \"with\"", fn ->
expand(quote(do: with(_ <- true, do: :ok, else: :error)))
end
end
test "fails for invalid options" do
# Only the required "do" is present alongside the unexpected option.
assert_raise CompileError, ~r"unexpected option :foo in \"with\"", fn ->
expand(quote(do: with(_ <- true, foo: :bar, do: :ok)))
end
# More options are present alongside the unexpected option.
assert_raise CompileError, ~r"unexpected option :foo in \"with\"", fn ->
expand(quote(do: with(_ <- true, do: :ok, else: (_ -> :ok), foo: :bar)))
end
end
end
describe "&" do
test "keeps locals" do
assert expand(quote(do: &unknown/2)) == {:&, [], [{:/, [], [{:unknown, [], nil}, 2]}]}
assert expand(quote(do: &unknown(&1, &2))) == {:&, [], [{:/, [], [{:unknown, [], nil}, 2]}]}
end
test "expands remotes" do
assert expand(quote(do: &List.flatten/2)) ==
quote(do: &:"Elixir.List".flatten/2) |> clean_meta([:import, :context])
assert expand(quote(do: &Kernel.is_atom/1)) ==
quote(do: &:erlang.is_atom/1) |> clean_meta([:import, :context])
end
test "expands macros" do
before_expansion =
quote do
require Kernel.ExpansionTarget
&Kernel.ExpansionTarget.seventeen/0
end
after_expansion =
quote do
:"Elixir.Kernel.ExpansionTarget"
fn -> 17 end
end
assert expand(before_expansion) == after_expansion
end
test "fails on non-continuous" do
assert_raise CompileError, ~r"capture &0 is not allowed", fn ->
expand(quote(do: &foo(&0)))
end
assert_raise CompileError, ~r"capture &2 cannot be defined without &1", fn ->
expand(quote(do: & &2))
end
assert_raise CompileError, ~r"capture &255 cannot be defined without &1", fn ->
expand(quote(do: & &255))
end
end
test "fails on block" do
message = ~r"invalid args for &, block expressions are not allowed, got: \(\n 1\n 2\n\)"
assert_raise CompileError, message, fn ->
code =
quote do
&(
1
2
)
end
expand(code)
end
end
test "fails on other types" do
message =
~r"invalid args for &, expected an expression in the format of &Mod.fun/arity, &local/arity or a capture containing at least one argument as &1, got: :foo"
assert_raise CompileError, message, fn ->
expand(quote(do: &:foo))
end
end
test "fails on invalid arity" do
message = ~r"invalid arity for &, expected a number between 0 and 255, got: 256"
assert_raise CompileError, message, fn ->
expand(quote(do: &Mod.fun/256))
end
end
test "fails when no captures" do
message =
~r"invalid args for &, expected an expression in the format of &Mod.fun/arity, &local/arity or a capture containing at least one argument as &1, got: foo()"
assert_raise CompileError, message, fn ->
expand(quote(do: &foo()))
end
end
test "fails on nested capture" do
assert_raise CompileError, ~r"nested captures via & are not allowed: &\(&1\)", fn ->
expand(quote(do: &(& &1)))
end
end
test "fails on integers" do
assert_raise CompileError, ~r"unhandled &1 outside of a capture", fn ->
expand(quote(do: &1))
end
end
end
describe "fn" do
test "expands each clause" do
before_expansion =
quote do
fn
x -> x
_ -> x
end
end
after_expansion =
quote do
fn
x -> x
_ -> x()
end
end
assert expand(before_expansion) == after_expansion
end
test "does not share lexical scope between clauses" do
before_expansion =
quote do
fn
1 -> import List
2 -> flatten([1, 2, 3])
end
end
after_expansion =
quote do
fn
1 -> :"Elixir.List"
2 -> flatten([1, 2, 3])
end
end
assert expand(before_expansion) == after_expansion
end
test "expands guards" do
assert expand(quote(do: fn x when x when __ENV__.context -> true end)) ==
quote(do: fn x when x when :guard -> true end)
end
test "does not leak vars" do
before_expansion =
quote do
fn x -> x end
x
end
after_expansion =
quote do
fn x -> x end
x()
end
assert expand(before_expansion) == after_expansion
end
test "raises on mixed arities" do
message = ~r"cannot mix clauses with different arities in anonymous functions"
assert_raise CompileError, message, fn ->
code =
quote do
fn
x -> x
x, y -> x + y
end
end
expand(code)
end
end
end
describe "cond" do
test "expands each clause" do
before_expansion =
quote do
cond do
x = 1 -> x
true -> x
end
end
after_expansion =
quote do
cond do
x = 1 -> x
true -> x()
end
end
assert expand(before_expansion) == after_expansion
end
test "does not share lexical scope between clauses" do
before_expansion =
quote do
cond do
1 -> import List
2 -> flatten([1, 2, 3])
end
end
after_expansion =
quote do
cond do
1 -> :"Elixir.List"
2 -> flatten([1, 2, 3])
end
end
assert expand(before_expansion) == after_expansion
end
test "does not leaks vars on head" do
before_expansion =
quote do
cond do
x = 1 -> x
y = 2 -> y
end
:erlang.+(x, y)
end
after_expansion =
quote do
cond do
x = 1 -> x
y = 2 -> y
end
:erlang.+(x(), y())
end
assert expand(before_expansion) == after_expansion
end
test "does not leak vars" do
before_expansion =
quote do
cond do
1 -> x = 1
2 -> y = 2
end
:erlang.+(x, y)
end
after_expansion =
quote do
cond do
1 -> x = 1
2 -> y = 2
end
:erlang.+(x(), y())
end
assert expand(before_expansion) == after_expansion
end
test "expects exactly one do" do
assert_raise CompileError, ~r"missing :do option in \"cond\"", fn ->
expand(quote(do: cond([])))
end
assert_raise CompileError, ~r"duplicated :do clauses given for \"cond\"", fn ->
expand(quote(do: cond(do: (x -> x), do: (y -> y))))
end
end
test "expects clauses" do
assert_raise CompileError, ~r"expected -> clauses for :do in \"cond\"", fn ->
expand(quote(do: cond(do: :ok)))
end
assert_raise CompileError, ~r"expected -> clauses for :do in \"cond\"", fn ->
expand(quote(do: cond(do: [:not, :clauses])))
end
end
test "expects one argument in clauses" do
assert_raise CompileError, ~r"expected one arg for :do clauses \(->\) in \"cond\"", fn ->
code =
quote do
cond do
_, _ -> :ok
end
end
expand(code)
end
end
test "raises for invalid arguments" do
assert_raise CompileError, ~r"invalid arguments for \"cond\"", fn ->
expand(quote(do: cond(:foo)))
end
end
test "raises with invalid options" do
assert_raise CompileError, ~r"unexpected option :foo in \"cond\"", fn ->
expand(quote(do: cond(do: (1 -> 1), foo: :bar)))
end
end
test "raises for _ in clauses" do
message = ~r"invalid use of _ inside \"cond\"\. If you want the last clause"
assert_raise CompileError, message, fn ->
code =
quote do
cond do
x -> x
_ -> :raise
end
end
expand(code)
end
end
end
describe "case" do
test "expands each clause" do
before_expansion =
quote do
case w do
x -> x
_ -> x
end
end
after_expansion =
quote do
case w() do
x -> x
_ -> x()
end
end
assert expand(before_expansion) == after_expansion
end
test "does not share lexical scope between clauses" do
before_expansion =
quote do
case w do
1 -> import List
2 -> flatten([1, 2, 3])
end
end
after_expansion =
quote do
case w() do
1 -> :"Elixir.List"
2 -> flatten([1, 2, 3])
end
end
assert expand(before_expansion) == after_expansion
end
test "expands guards" do
before_expansion =
quote do
case w do
x when x when __ENV__.context -> true
end
end
after_expansion =
quote do
case w() do
x when x when :guard -> true
end
end
assert expand(before_expansion) == after_expansion
end
test "does not leaks vars on head" do
before_expansion =
quote do
case w do
x -> x
y -> y
end
:erlang.+(x, y)
end
after_expansion =
quote do
case w() do
x -> x
y -> y
end
:erlang.+(x(), y())
end
assert expand(before_expansion) == after_expansion
end
test "does not leak vars" do
before_expansion =
quote do
case w do
x -> x = x
y -> y = y
end
:erlang.+(x, y)
end
after_expansion =
quote do
case w() do
x -> x = x
y -> y = y
end
:erlang.+(x(), y())
end
assert expand(before_expansion) == after_expansion
end
test "expects exactly one do" do
assert_raise CompileError, ~r"missing :do option in \"case\"", fn ->
expand(quote(do: case(e, [])))
end
assert_raise CompileError, ~r"duplicated :do clauses given for \"case\"", fn ->
expand(quote(do: case(e, do: (x -> x), do: (y -> y))))
end
end
test "expects clauses" do
assert_raise CompileError, ~r"expected -> clauses for :do in \"case\"", fn ->
code =
quote do
case e do
x
end
end
expand(code)
end
assert_raise CompileError, ~r"expected -> clauses for :do in \"case\"", fn ->
code =
quote do
case e do
[:not, :clauses]
end
end
expand(code)
end
end
test "expects exactly one argument in clauses" do
assert_raise CompileError, ~r"expected one arg for :do clauses \(->\) in \"case\"", fn ->
code =
quote do
case e do
_, _ -> :ok
end
end
expand(code)
end
end
test "fails with invalid arguments" do
assert_raise CompileError, ~r"invalid arguments for \"case\"", fn ->
expand(quote(do: case(:foo, :bar)))
end
end
test "fails for invalid options" do
assert_raise CompileError, ~r"unexpected option :foo in \"case\"", fn ->
expand(quote(do: case(e, do: (x -> x), foo: :bar)))
end
end
end
describe "receive" do
test "expands each clause" do
before_expansion =
quote do
receive do
x -> x
_ -> x
end
end
after_expansion =
quote do
receive do
x -> x
_ -> x()
end
end
assert expand(before_expansion) == after_expansion
end
test "does not share lexical scope between clauses" do
before_expansion =
quote do
receive do
1 -> import List
2 -> flatten([1, 2, 3])
end
end
after_expansion =
quote do
receive do
1 -> :"Elixir.List"
2 -> flatten([1, 2, 3])
end
end
assert expand(before_expansion) == after_expansion
end
test "expands guards" do
before_expansion =
quote do
receive do
x when x when __ENV__.context -> true
end
end
after_expansion =
quote do
receive do
x when x when :guard -> true
end
end
assert expand(before_expansion) == after_expansion
end
test "does not leaks clause vars" do
before_expansion =
quote do
receive do
x -> x
y -> y
end
:erlang.+(x, y)
end
after_expansion =
quote do
receive do
x -> x
y -> y
end
:erlang.+(x(), y())
end
assert expand(before_expansion) == after_expansion
end
test "does not leak vars" do
before_expansion =
quote do
receive do
x -> x = x
y -> y = y
end
:erlang.+(x, y)
end
after_expansion =
quote do
receive do
x -> x = x
y -> y = y
end
:erlang.+(x(), y())
end
assert expand(before_expansion) == after_expansion
end
test "does not leak vars on after" do
before_expansion =
quote do
receive do
x -> x = x
after
y ->
y
w = y
end
:erlang.+(x, w)
end
after_expansion =
quote do
receive do
x -> x = x
after
y() ->
y()
w = y()
end
:erlang.+(x(), w())
end
assert expand(before_expansion) == after_expansion
end
test "expects exactly one do or after" do
assert_raise CompileError, ~r"missing :do/:after option in \"receive\"", fn ->
expand(quote(do: receive([])))
end
assert_raise CompileError, ~r"duplicated :do clauses given for \"receive\"", fn ->
expand(quote(do: receive(do: (x -> x), do: (y -> y))))
end
assert_raise CompileError, ~r"duplicated :after clauses given for \"receive\"", fn ->
code =
quote do
receive do
x -> x
after
y -> y
after
z -> z
end
end
expand(code)
end
end
test "expects clauses" do
assert_raise CompileError, ~r"expected -> clauses for :do in \"receive\"", fn ->
code =
quote do
receive do
x
end
end
expand(code)
end
assert_raise CompileError, ~r"expected -> clauses for :do in \"receive\"", fn ->
code =
quote do
receive do
[:not, :clauses]
end
end
expand(code)
end
end
test "expects on argument for do/after clauses" do
assert_raise CompileError, ~r"expected one arg for :do clauses \(->\) in \"receive\"", fn ->
code =
quote do
receive do
_, _ -> :ok
end
end
expand(code)
end
message = ~r"expected one arg for :after clauses \(->\) in \"receive\""
assert_raise CompileError, message, fn ->
code =
quote do
receive do
x -> x
after
_, _ -> :ok
end
end
expand(code)
end
end
test "expects a single clause for \"after\"" do
assert_raise CompileError, ~r"expected a single -> clause for :after in \"receive\"", fn ->
code =
quote do
receive do
x -> x
after
1 -> y
2 -> z
end
end
expand(code)
end
end
test "raises for invalid arguments" do
assert_raise CompileError, ~r"invalid arguments for \"receive\"", fn ->
expand(quote(do: receive(:foo)))
end
end
test "raises with invalid options" do
assert_raise CompileError, ~r"unexpected option :foo in \"receive\"", fn ->
expand(quote(do: receive(do: (x -> x), foo: :bar)))
end
end
end
describe "try" do
test "expands catch" do
before_expansion =
quote do
try do
x
catch
x, y -> z = :erlang.+(x, y)
end
z
end
after_expansion =
quote do
try do
x()
catch
x, y -> z = :erlang.+(x, y)
end
z()
end
assert expand(before_expansion) == after_expansion
end
test "expands after" do
before_expansion =
quote do
try do
x
after
z = y
end
z
end
after_expansion =
quote do
try do
x()
after
z = y()
end
z()
end
assert expand(before_expansion) == after_expansion
end
test "expands else" do
before_expansion =
quote do
try do
x
catch
_, _ -> :ok
else
z -> z
end
z
end
after_expansion =
quote do
try do
x()
catch
_, _ -> :ok
else
z -> z
end
z()
end
assert expand(before_expansion) == after_expansion
end
test "expands rescue" do
before_expansion =
quote do
try do
x
rescue
x -> x
Error -> x
end
x
end
after_expansion =
quote do
try do
x()
rescue
x -> x
unquote(:in)(_, [:"Elixir.Error"]) -> x()
end
x()
end
assert expand(before_expansion) == after_expansion
end
test "expects more than do" do
assert_raise CompileError, ~r"missing :catch/:rescue/:after option in \"try\"", fn ->
code =
quote do
try do
x = y
end
x
end
expand(code)
end
end
test "raises if do is missing" do
assert_raise CompileError, ~r"missing :do option in \"try\"", fn ->
expand(quote(do: try([])))
end
end
test "expects at most one clause" do
assert_raise CompileError, ~r"duplicated :do clauses given for \"try\"", fn ->
expand(quote(do: try(do: e, do: f)))
end
assert_raise CompileError, ~r"duplicated :rescue clauses given for \"try\"", fn ->
code =
quote do
try do
e
rescue
x -> x
rescue
y -> y
end
end
expand(code)
end
assert_raise CompileError, ~r"duplicated :after clauses given for \"try\"", fn ->
code =
quote do
try do
e
after
x = y
after
x = y
end
end
expand(code)
end
assert_raise CompileError, ~r"duplicated :else clauses given for \"try\"", fn ->
code =
quote do
try do
e
else
x -> x
else
y -> y
end
end
expand(code)
end
assert_raise CompileError, ~r"duplicated :catch clauses given for \"try\"", fn ->
code =
quote do
try do
e
catch
x -> x
catch
y -> y
end
end
expand(code)
end
end
test "raises with invalid arguments" do
assert_raise CompileError, ~r"invalid arguments for \"try\"", fn ->
expand(quote(do: try(:foo)))
end
end
test "raises with invalid options" do
assert_raise CompileError, ~r"unexpected option :foo in \"try\"", fn ->
expand(quote(do: try(do: x, foo: :bar)))
end
end
test "expects exactly one argument in rescue clauses" do
assert_raise CompileError, ~r"expected one arg for :rescue clauses \(->\) in \"try\"", fn ->
code =
quote do
try do
x
rescue
_, _ -> :ok
end
end
expand(code)
end
end
test "expects an alias, a variable, or \"var in [alias]\" as the argument of rescue clauses" do
assert_raise CompileError, ~r"invalid \"rescue\" clause\. The clause should match", fn ->
code =
quote do
try do
x
rescue
function(:call) -> :ok
end
end
expand(code)
end
end
test "expects one or two args for catch clauses" do
message = ~r"expected one or two args for :catch clauses \(->\) in \"try\""
assert_raise CompileError, message, fn ->
code =
quote do
try do
x
catch
_, _, _ -> :ok
end
end
expand(code)
end
end
test "expects clauses for rescue, else, catch" do
assert_raise CompileError, ~r"expected -> clauses for :rescue in \"try\"", fn ->
code =
quote do
try do
e
rescue
x
end
end
expand(code)
end
assert_raise CompileError, ~r"expected -> clauses for :rescue in \"try\"", fn ->
code =
quote do
try do
e
rescue
[:not, :clauses]
end
end
expand(code)
end
assert_raise CompileError, ~r"expected -> clauses for :catch in \"try\"", fn ->
code =
quote do
try do
e
catch
x
end
end
expand(code)
end
assert_raise CompileError, ~r"expected -> clauses for :catch in \"try\"", fn ->
code =
quote do
try do
e
catch
[:not, :clauses]
end
end
expand(code)
end
assert_raise CompileError, ~r"expected -> clauses for :else in \"try\"", fn ->
code =
quote do
try do
e
catch
_ -> :ok
else
x
end
end
expand(code)
end
assert_raise CompileError, ~r"expected -> clauses for :else in \"try\"", fn ->
code =
quote do
try do
e
catch
_ -> :ok
else
[:not, :clauses]
end
end
expand(code)
end
end
end
describe "bitstrings" do
test "parallel match" do
assert expand(quote(do: <<foo>> = <<bar>>)) |> clean_meta([:alignment]) ==
quote(do: <<foo::integer()>> = <<bar()::integer()>>)
assert expand(quote(do: <<foo>> = baz = <<bar>>)) |> clean_meta([:alignment]) ==
quote(do: <<foo::integer()>> = baz = <<bar()::integer()>>)
assert expand(quote(do: <<foo>> = {<<baz>>} = bar())) |> clean_meta([:alignment]) ==
quote(do: <<foo::integer()>> = {<<baz::integer()>>} = bar())
message = ~r"binary patterns cannot be matched in parallel using \"=\""
assert_raise CompileError, message, fn ->
expand(quote(do: <<foo>> = <<baz>> = bar()))
end
assert_raise CompileError, message, fn ->
expand(quote(do: <<foo>> = qux = <<baz>> = bar()))
end
assert_raise CompileError, message, fn ->
expand(quote(do: {<<foo>>} = {qux} = {<<baz>>} = bar()))
end
assert expand(quote(do: {:foo, <<foo>>} = {<<baz>>, :baz} = bar()))
# two-element tuples are special cased
assert_raise CompileError, message, fn ->
expand(quote(do: {:foo, <<foo>>} = {:foo, <<baz>>} = bar()))
end
assert_raise CompileError, message, fn ->
expand(quote(do: %{foo: <<foo>>} = %{baz: <<qux>>, foo: <<baz>>} = bar()))
end
assert expand(quote(do: %{foo: <<foo>>} = %{baz: <<baz>>} = bar()))
assert_raise CompileError, message, fn ->
expand(quote(do: %_{foo: <<foo>>} = %_{foo: <<baz>>} = bar()))
end
assert expand(quote(do: %_{foo: <<foo>>} = %_{baz: <<baz>>} = bar()))
assert_raise CompileError, message, fn ->
expand(quote(do: %_{foo: <<foo>>} = %{foo: <<baz>>} = bar()))
end
assert expand(quote(do: %_{foo: <<foo>>} = %{baz: <<baz>>} = bar()))
assert_raise CompileError, message, fn ->
code =
quote do
case bar() do
<<foo>> = <<baz>> -> nil
end
end
expand(code)
end
assert_raise CompileError, message, fn ->
code =
quote do
case bar() do
<<foo>> = qux = <<baz>> -> nil
end
end
expand(code)
end
assert_raise CompileError, message, fn ->
code =
quote do
case bar() do
[<<foo>>] = [<<baz>>] -> nil
end
end
expand(code)
end
end
test "nested match" do
assert expand(quote(do: <<foo = bar>>)) |> clean_meta([:alignment]) ==
quote(do: <<foo = bar()::integer()>>)
assert expand(quote(do: <<?-, <<_, _::binary>> = rest()::binary>>))
|> clean_meta([:alignment]) ==
quote(do: <<45::integer(), <<_::integer(), _::binary()>> = rest()::binary()>>)
message = ~r"cannot pattern match inside a bitstring that is already in match"
assert_raise CompileError, message, fn ->
expand(quote(do: <<bar = baz>> = foo()))
end
assert_raise CompileError, message, fn ->
expand(quote(do: <<?-, <<_, _::binary>> = rest::binary>> = foo()))
end
end
test "inlines binaries inside interpolation" do
import Kernel.ExpansionTarget
assert expand(quote(do: "foo#{bar()}" = "foobar")) |> clean_meta([:alignment]) ==
quote(do: <<"foo"::binary(), "bar"::binary()>> = "foobar")
end
test "inlines binaries inside interpolation is isomorphic after manual expansion" do
import Kernel.ExpansionTarget
quoted = Macro.prewalk(quote(do: "foo#{bar()}" = "foobar"), &Macro.expand(&1, __ENV__))
assert expand(quoted) |> clean_meta([:alignment]) ==
quote(do: <<"foo"::binary(), "bar"::binary()>> = "foobar")
end
test "expands size * unit" do
import Kernel, except: [-: 2]
import Kernel.ExpansionTarget
assert expand(quote(do: <<x::13>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::integer()-size(13)>>)
assert expand(quote(do: <<x::13*6>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::integer()-unit(6)-size(13)>>)
assert expand(quote(do: <<x::_*6-binary>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::binary()-unit(6)>>)
assert expand(quote(do: <<x::13*6-binary>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::binary()-unit(6)-size(13)>>)
assert expand(quote(do: <<x::binary-(13 * 6)-binary>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::binary()-unit(6)-size(13)>>)
assert expand(quote(do: <<x::seventeen()>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::integer()-size(17)>>)
assert expand(quote(do: <<x::seventeen()*2>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::integer()-unit(2)-size(17)>>)
assert expand(quote(do: <<x::seventeen()*seventeen()>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::integer()-unit(17)-size(17)>>)
assert expand(quote(do: <<x::_*seventeen()-binary>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::binary()-unit(17)>>)
end
test "expands binary/bitstring specifiers" do
import Kernel, except: [-: 2]
assert expand(quote(do: <<x::binary>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::binary()>>)
assert expand(quote(do: <<x::bytes>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::binary()>>)
assert expand(quote(do: <<x::bitstring>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::bitstring()>>)
assert expand(quote(do: <<x::bits>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::bitstring()>>)
assert expand(quote(do: <<x::binary-little>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::binary()>>)
message = ~r"signed and unsigned specifiers are supported only on integer and float type"
assert_raise CompileError, message, fn ->
expand(quote(do: <<x()::binary-signed>>))
end
end
test "expands utf* specifiers" do
import Kernel, except: [-: 2]
assert expand(quote(do: <<x::utf8>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::utf8()>>)
assert expand(quote(do: <<x::utf16>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::utf16()>>)
assert expand(quote(do: <<x::utf32-little>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::utf32()-little()>>)
message = ~r"signed and unsigned specifiers are supported only on integer and float type"
assert_raise CompileError, message, fn ->
expand(quote(do: <<x()::utf8-signed>>))
end
assert_raise CompileError, ~r"size and unit are not supported on utf types", fn ->
expand(quote(do: <<x()::utf8-size(32)>>))
end
end
test "expands numbers specifiers" do
import Kernel, except: [-: 2]
assert expand(quote(do: <<x::integer>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::integer()>>)
assert expand(quote(do: <<x::little>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::integer()-little()>>)
assert expand(quote(do: <<x::signed>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::integer()-signed()>>)
assert expand(quote(do: <<x::signed-native>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::integer()-native()-signed()>>)
assert expand(quote(do: <<x::float-signed-native>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::float()-native()-signed()>>)
message =
~r"integer and float types require a size specifier if the unit specifier is given"
assert_raise CompileError, message, fn ->
expand(quote(do: <<x::unit(8)>>))
end
end
test "expands macro specifiers" do
import Kernel, except: [-: 2]
import Kernel.ExpansionTarget
assert expand(quote(do: <<x::seventeen>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::integer()-size(17)>>)
assert expand(quote(do: <<seventeen::seventeen, x::size(seventeen)>> = 1))
|> clean_meta([:alignment]) ==
quote(do: <<seventeen::integer()-size(17), x::integer()-size(seventeen)>> = 1)
end
test "expands macro in args" do
import Kernel, except: [-: 2]
before_expansion =
quote do
require Kernel.ExpansionTarget
<<x::size(Kernel.ExpansionTarget.seventeen())>>
end
after_expansion =
quote do
:"Elixir.Kernel.ExpansionTarget"
<<x()::integer()-size(17)>>
end
assert expand(before_expansion) |> clean_meta([:alignment]) == after_expansion
end
test "supports dynamic size" do
import Kernel, except: [-: 2]
before_expansion =
quote do
var = 1
<<x::size(var)-unit(8)>>
end
after_expansion =
quote do
var = 1
<<x()::integer()-unit(8)-size(var)>>
end
assert expand(before_expansion) |> clean_meta([:alignment]) == after_expansion
end
defmacro offset(size, binary) do
quote do
offset = unquote(size)
<<_::size(offset)>> = unquote(binary)
end
end
test "supports size from counters" do
assert offset(8, <<0>>)
end
test "merges bitstrings" do
import Kernel, except: [-: 2]
assert expand(quote(do: <<x, <<y::signed-native>>, z>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::integer(), y()::integer()-native()-signed(), z()::integer()>>)
assert expand(quote(do: <<x, <<y::signed-native>>::bitstring, z>>))
|> clean_meta([:alignment]) ==
quote(do: <<x()::integer(), y()::integer()-native()-signed(), z()::integer()>>)
end
test "merges binaries" do
import Kernel, except: [-: 2]
assert expand(quote(do: "foo" <> x)) |> clean_meta([:alignment]) ==
quote(do: <<"foo"::binary(), x()::binary()>>)
assert expand(quote(do: "foo" <> <<x::size(4), y::size(4)>>)) |> clean_meta([:alignment]) ==
quote(do: <<"foo"::binary(), x()::integer()-size(4), y()::integer()-size(4)>>)
assert expand(quote(do: <<"foo", <<x::size(4), y::size(4)>>::binary>>))
|> clean_meta([:alignment]) ==
quote(do: <<"foo"::binary(), x()::integer()-size(4), y()::integer()-size(4)>>)
end
test "raises on unaligned binaries in match" do
message = ~r"cannot verify size of binary expression in match"
assert_raise CompileError, message, fn ->
expand(quote(do: <<rest::bits>> <> _ = "foo"))
end
assert_raise CompileError, message, fn ->
expand(quote(do: <<rest::size(3)>> <> _ = "foo"))
end
end
test "raises on size or unit for literal bitstrings" do
message = ~r"literal <<>> in bitstring supports only type specifiers"
assert_raise CompileError, message, fn ->
expand(quote(do: <<(<<"foo">>)::32>>))
end
end
test "raises on size or unit for literal strings" do
message = ~r"literal string in bitstring supports only endianness and type specifiers"
assert_raise CompileError, message, fn ->
expand(quote(do: <<"foo"::32>>))
end
end
test "raises for invalid size * unit for floats" do
message = ~r"float requires size\*unit to be 32 or 64 \(default\), got: 128"
assert_raise CompileError, message, fn ->
expand(quote(do: <<12.3::32*4>>))
end
message = ~r"float requires size\*unit to be 32 or 64 \(default\), got: 256"
assert_raise CompileError, message, fn ->
expand(quote(do: <<12.3::256>>))
end
end
test "raises for invalid size" do
message = ~r"size in bitstring expects an integer or a variable as argument, got: :oops"
assert_raise CompileError, message, fn ->
expand(quote(do: <<"foo"::size(:oops)>>))
end
assert_raise CompileError, ~r/undefined variable "foo"/, fn ->
code =
quote do
fn <<_::size(foo)>> -> :ok end
end
expand(code)
end
assert_raise CompileError, ~r/undefined variable "foo"/, fn ->
code =
quote do
fn <<_::size(foo), foo::size(8)>> -> :ok end
end
expand(code)
end
assert_raise CompileError, ~r/undefined variable "foo" in bitstring segment/, fn ->
code =
quote do
fn foo, <<_::size(foo)>> -> :ok end
end
expand(code)
end
message = ~r"size in bitstring expects an integer or a variable as argument, got: foo()"
assert_raise CompileError, message, fn ->
code =
quote do
fn <<_::size(foo())>> -> :ok end
end
expand(code)
end
end
test "raises for invalid unit" do
message = ~r"unit in bitstring expects an integer as argument, got: :oops"
assert_raise CompileError, message, fn ->
expand(quote(do: <<"foo"::size(8)-unit(:oops)>>))
end
end
test "raises for unknown specifier" do
assert_raise CompileError, ~r"unknown bitstring specifier: unknown()", fn ->
expand(quote(do: <<1::unknown>>))
end
end
test "raises for conflicting specifiers" do
assert_raise CompileError, ~r"conflicting endianness specification for bit field", fn ->
expand(quote(do: <<1::little-big>>))
end
assert_raise CompileError, ~r"conflicting unit specification for bit field", fn ->
expand(quote(do: <<x::bitstring-unit(2)>>))
end
end
test "raises for invalid literals" do
assert_raise CompileError, ~r"invalid literal :foo in <<>>", fn ->
expand(quote(do: <<:foo>>))
end
assert_raise CompileError, ~r"invalid literal \[\] in <<>>", fn ->
expand(quote(do: <<[]::size(8)>>))
end
end
test "raises on binary fields with size in matches" do
assert expand(quote(do: <<x::binary-size(3), y::binary>> = "foobar"))
message = ~r"a binary field without size is only allowed at the end of a binary pattern"
assert_raise CompileError, message, fn ->
expand(quote(do: <<x::binary, y::binary>> = "foobar"))
end
end
end
describe "op ambiguity" do
test "raises when a call is ambiguous" do
message = ~r["a -1" looks like a function call but there is a variable named "a"]
assert_raise CompileError, message, fn ->
# We use string_to_quoted! here to avoid the formatter adding parentheses to "a -1".
code =
Code.string_to_quoted!("""
a = 1
a -1
""")
expand(code)
end
end
end
test "handles invalid expressions" do
assert_raise CompileError, ~r"invalid quoted expression: {1, 2, 3}", fn ->
expand(quote(do: unquote({1, 2, 3})))
end
assert_raise CompileError, ~r"invalid quoted expression: #Function<", fn ->
expand(quote(do: unquote({:sample, fn -> nil end})))
end
assert_raise CompileError, ~r"invalid pattern in match", fn ->
code =
quote do
x = & &1
case true do
x.(false) -> true
end
end
expand(code)
end
assert_raise CompileError, ~r"invalid expression in guard", fn ->
code =
quote do
x = & &1
case true do
true when x.(true) -> true
end
end
expand(code)
end
assert_raise CompileError, ~r"invalid call foo\(1\)\(2\)", fn ->
expand(quote(do: foo(1)(2)))
end
assert_raise CompileError, ~r"invalid call 1\.foo\(\)", fn ->
expand(quote(do: 1.foo))
end
assert_raise CompileError, ~r"invalid call 0\.foo\(\)", fn ->
expand(quote(do: __ENV__.line.foo))
end
assert_raise CompileError, ~r"unhandled operator ->", fn ->
expand(quote(do: (foo -> bar)))
end
message = ~r/"wrong_fun" cannot handle clauses with the ->/
assert_raise CompileError, message, fn ->
code =
quote do
wrong_fun do
_ -> :ok
end
end
expand(code)
end
assert_raise CompileError, message, fn ->
code =
quote do
wrong_fun do
foo -> bar
after
:ok
end
end
expand(code)
end
assert_raise CompileError, ~r/"length" cannot handle clauses with the ->/, fn ->
code =
quote do
length do
_ -> :ok
end
end
expand(code)
end
end
## Helpers
defmacro thirteen do
13
end
defp clean_meta(expr, vars) do
cleaner = &Keyword.drop(&1, vars)
Macro.prewalk(expr, &Macro.update_meta(&1, cleaner))
end
defp expand(expr) do
expand_env(expr, __ENV__) |> elem(0)
end
defp expand_env(expr, env) do
ExUnit.CaptureIO.capture_io(:stderr, fn ->
send(self(), {:expand_env, :elixir_expand.expand(expr, env)})
end)
receive do
{:expand_env, {expr, env}} -> {clean_meta(expr, [:version]), env}
end
end
end
| 25.963008 | 164 | 0.52495 |
9ebfcc37db9adaa1cebab9f1349461f205f4c882 | 2,303 | ex | Elixir | lib/blog_web/controllers/ycy_controllers/ycy_real_estate_controller.ex | albertschr/blog_supported_blockchain | ee9578c8ebddd3e72ff5eaabefc2d724c3e2a9a1 | [
"MIT"
] | 1 | 2019-02-06T12:29:35.000Z | 2019-02-06T12:29:35.000Z | lib/blog_web/controllers/ycy_controllers/ycy_real_estate_controller.ex | albertschr/blog_supported_blockchain | ee9578c8ebddd3e72ff5eaabefc2d724c3e2a9a1 | [
"MIT"
] | null | null | null | lib/blog_web/controllers/ycy_controllers/ycy_real_estate_controller.ex | albertschr/blog_supported_blockchain | ee9578c8ebddd3e72ff5eaabefc2d724c3e2a9a1 | [
"MIT"
] | null | null | null | defmodule BlogWeb.YcyRealEstateController do
use BlogWeb, :controller
alias Blog.Repo
def show(conn, %{"name" => name}) do
try do
real_estate =
name
|> YcyRealEstate.get_real_estate()
|> StructTranslater.struct_to_map()
json(conn, real_estate)
rescue
_ ->
json(conn, %{"error" => "no_exist"})
end
end
def show(conn, _params) do
real_estates = YcyRealEstate.get_real_estates()
real_estates_map =
Enum.map(real_estates, fn real_estate ->
real_estate_preloaded = Repo.preload(real_estate, :ycy_user)
real_estate
|> Map.put(:owner, real_estate_preloaded.ycy_user.name)
|> StructTranslater.struct_to_map()
end)
json(conn, real_estates_map)
end
def buy(conn, %{
"api" => api_key,
"name" => name,
"buyer" => buyer_puid,
"group_id" => group_puid,
"amount" => amount
}) do
if Auth.auth?(api_key) do
real_estate = YcyRealEstate.get_real_estate(name)
buyer = YcyUser.get_user_by_puid(buyer_puid, group_puid)
IO.puts(inspect(buyer))
result =
if is_nil(real_estate) or is_nil(buyer) do
:no_exist
else
YcyRealEstate.buy_estate(buyer, amount, name)
end
handle_result(conn, result)
else
json(conn, %{"result" => "error", "reason" => "no_auth"})
end
end
def handle_result(conn, :success) do
json(conn, %{"result" => "success"})
end
def handle_result(conn, error) do
json(conn, %{"result" => "error", "reason" => error})
end
def update(conn, %{
"api" => api_key,
"user" => user_puid,
"group_id" => group_puid,
"name" => name,
"signature" => signature
}) do
if Auth.auth?(api_key) do
real_estate =
name
|> YcyRealEstate.get_real_estate()
|> Repo.preload(:ycy_user)
user = YcyUser.get_user_by_puid(user_puid, group_puid)
if real_estate.ycy_user == user do
YcyRealEstate.modify(signature, name)
json(conn, %{"result" => "success"})
else
json(conn, %{"result" => "error", "reason" => "not_owner"})
end
else
json(conn, %{"result" => "error", "reason" => "no_auth"})
end
end
end
| 24.763441 | 68 | 0.585323 |
9ebffcc640a9bf89f7f6c912bbd8966222c627bc | 11,443 | ex | Elixir | lib/uinta/plug.ex | KalvinHom/uinta | c02ea83b92305d4b009853232292c94bd26bba25 | [
"MIT"
] | null | null | null | lib/uinta/plug.ex | KalvinHom/uinta | c02ea83b92305d4b009853232292c94bd26bba25 | [
"MIT"
] | null | null | null | lib/uinta/plug.ex | KalvinHom/uinta | c02ea83b92305d4b009853232292c94bd26bba25 | [
"MIT"
] | null | null | null | if Code.ensure_loaded?(Plug) do
defmodule Uinta.Plug do
@moduledoc """
This plug combines the request and response logs into a single line. This
brings many benefits including:
- Removing the need to visually match up the request and response makes it
easier to read your logs and get a full picture of what has happened.
- Having a single line for both request and response halves the number of
request logs that your log aggregator will need to process and index, which
leads to saved costs
In addition to combining the log lines, it also gives you the ability to
output request logs in JSON format so that you can easily have your log
aggregator parse the fields. To do this, pass `json: true` in the options
when calling the plug.
You will also gain the ability to not log certain paths that are requested,
as long as those paths return a 200-level status code. This can be
particularly useful for things like not showing health checks in your logs
to cut down on noise. To do this, just pass `ignored_paths:
["/path_to_ignore"]` in the options.
Finally, GraphQL requests will replace `POST /graphql` with the GraphQL
operation type and name like `QUERY getUser` or `MUTATION createUser` if an
operation name is provided. This will give you more visibility into your
GraphQL requests without having to log out the entire request body or go
into debug mode. If desired, the GraphQL variables can be included in the
log line as well. The query can also be included if unnamed.
## Installation
Installation of the plug will depend on how your app currently logs requests.
Open `YourApp.Endpoint` and look for the following line:
```
plug Plug.Logger
```
If it exists in your endpoint, replace it with this (using the options you
want):
```
plug Uinta.Plug,
log: :info,
json: false,
include_variables: false,
ignored_paths: [],
filter_variables: [],
success_log_sampling_ratio: 1.0
```
If your endpoint didn't call `Plug.Logger`, add the above line above the line
that looks like this:
```
plug Plug.RequestId
```
Now you will also want to add the following anywhere in your main config file to
make sure that you aren't logging each request twice:
```
config :phoenix, logger: false
```
## Options
- `:log` - The log level at which this plug should log its request info.
Default is `:info`
- `:json` - Whether or not this plug should log in JSON format. Default is
`false`
- `:ignored_paths` - A list of paths that should not log requests. Default
is `[]`.
- `:include_variables` - Whether or not to include any GraphQL variables in
the log line when applicable. Default is `false`.
- `:filter_variables` - A list of variable names that should be filtered
out from the logs. By default `password`, `passwordConfirmation`,
`idToken`, and `refreshToken` will be filtered.
- `:include_unnamed_queries` - Whether or not to include the full query
body for queries with no name supplied
- `:success_log_sampling_ratio` - What percentage of successful requests
should be logged. Defaults to 1.0
"""
require Logger
alias Plug.Conn
@behaviour Plug
@default_filter ~w(password passwordConfirmation idToken refreshToken)
@default_sampling_ratio 1.0
@query_name_regex ~r/^(?:(?:query|mutation)\s+(\w+)(?:\(\s*\$\w+:\s+\[?\w+\]?!?(?:,?\s+\$\w+:\s+\[?\w+\]?!?)*\s*\))?\s*)?{/
@type format :: :json | :string
@type graphql_info :: %{type: String.t(), operation: String.t(), variables: String.t() | nil}
@type opts :: %{
level: Logger.level(),
format: format(),
include_unnamed_queries: boolean(),
include_variables: boolean(),
ignored_paths: list(String.t()),
filter_variables: list(String.t())
}
@impl Plug
def init(opts) do
format = if Keyword.get(opts, :json, false), do: :json, else: :string
%{
level: Keyword.get(opts, :log, :info),
format: format,
ignored_paths: Keyword.get(opts, :ignored_paths, []),
include_unnamed_queries: Keyword.get(opts, :include_unnamed_queries, false),
include_variables: Keyword.get(opts, :include_variables, false),
filter_variables: Keyword.get(opts, :filter_variables, @default_filter),
success_log_sampling_ratio:
Keyword.get(
opts,
:success_log_sampling_ratio,
@default_sampling_ratio
)
}
end
@impl Plug
def call(conn, opts) do
start = System.monotonic_time()
Conn.register_before_send(conn, fn conn ->
if should_log_request?(conn, opts) do
Logger.log(opts.level, fn ->
stop = System.monotonic_time()
diff = System.convert_time_unit(stop - start, :native, :microsecond)
graphql_info = graphql_info(conn, opts)
info = info(conn, graphql_info, diff, opts)
format_line(info, opts.format)
end)
end
conn
end)
end
@spec info(Plug.Conn.t(), graphql_info(), integer(), opts()) :: map()
defp info(conn, graphql_info, diff, opts) do
%{
connection_type: connection_type(conn),
method: method(conn, graphql_info),
path: conn.request_path,
operation_name: graphql_info[:operation],
query: query(graphql_info, opts),
status: Integer.to_string(conn.status),
timing: formatted_diff(diff),
duration_ms: diff / 1000,
client_ip: conn.remote_ip,
user_agent: get_first_value_for_header(conn, "user-agent"),
referer: get_first_value_for_header(conn, "referer"),
x_forwarded_for: get_first_value_for_header(conn, "x-forwarded-for"),
x_forwarded_proto: get_first_value_for_header(conn, "x-forwarded-proto"),
x_forwarded_port: get_first_value_for_header(conn, "x-forwarded-port"),
via: get_first_value_for_header(conn, "via"),
variables: variables(graphql_info)
}
end
@spec format_line(map(), format()) :: iodata()
defp format_line(info, :json) do
info =
info
|> Map.delete(:connection_type)
|> Enum.filter(fn {_, value} -> !is_nil(value) end)
|> Enum.into(%{})
case Jason.encode(info) do
{:ok, encoded} -> encoded
_ -> inspect(info)
end
end
defp format_line(info, :string) do
log = [info.method, ?\s, info.operation_name || info.path]
log = if is_nil(info.operation_name), do: log, else: [log, " (", info.path, ")"]
log = if is_nil(info.variables), do: log, else: [log, " with ", info.variables]
log = [log, " - ", info.connection_type, ?\s, info.status, " in ", info.timing]
if is_nil(info.query), do: log, else: [log, "\nQuery: ", info.query]
end
defp get_first_value_for_header(conn, name) do
conn
|> Plug.Conn.get_req_header(name)
|> List.first()
end
@spec method(Plug.Conn.t(), graphql_info()) :: String.t()
defp method(_, %{type: type}), do: type
defp method(conn, _), do: conn.method
@spec query(graphql_info(), opts()) :: String.t() | nil
defp query(_, %{include_unnamed_queries: false}), do: nil
defp query(%{query: query}, _), do: query
defp query(_, _), do: nil
@spec variables(graphql_info() | nil) :: String.t() | nil
defp variables(%{variables: variables}), do: variables
defp variables(_), do: nil
@spec graphql_info(Plug.Conn.t(), opts()) :: graphql_info() | nil
defp graphql_info(%{method: "POST", params: params}, opts) do
type =
params["query"]
|> Kernel.||("")
|> String.trim()
|> query_type()
if is_nil(type) do
nil
else
%{type: type}
|> put_operation_name(params)
|> put_query(params["query"], opts)
|> put_variables(params["variables"], opts)
end
end
defp graphql_info(_, _), do: nil
@spec put_operation_name(map(), map()) :: map()
defp put_operation_name(info, params) do
operation = operation_name(params)
Map.put(info, :operation, operation)
end
@spec put_query(map(), String.t(), opts()) :: map()
defp put_query(%{operation: "unnamed"} = info, query, %{include_unnamed_queries: true}),
do: Map.put(info, :query, query)
defp put_query(info, _query, _opts), do: info
@spec put_variables(map(), any(), opts()) :: map()
defp put_variables(info, _variables, %{include_variables: false}), do: info
defp put_variables(info, variables, _) when not is_map(variables), do: info
defp put_variables(info, variables, opts) do
filtered = filter_variables(variables, opts.filter_variables)
case Jason.encode(filtered) do
{:ok, encoded} -> Map.put(info, :variables, encoded)
_ -> info
end
end
@spec filter_variables(map(), list(String.t())) :: map()
defp filter_variables(variables, to_filter) do
variables
|> Enum.map(&filter(&1, to_filter))
|> Enum.into(%{})
end
@spec filter({String.t(), term()}, list(String.t())) :: {String.t(), term()}
defp filter({key, value}, to_filter) do
if key in to_filter do
{key, "[FILTERED]"}
else
{key, value}
end
end
@spec formatted_diff(integer()) :: list(String.t())
defp formatted_diff(diff) when diff > 1000 do
"#{diff |> div(1000) |> Integer.to_string()}ms"
end
defp formatted_diff(diff), do: "#{Integer.to_string(diff)}µs"
@spec connection_type(Plug.Conn.t()) :: String.t()
defp connection_type(%{state: :set_chunked}), do: "Chunked"
defp connection_type(_), do: "Sent"
@spec operation_name(String.t()) :: String.t() | nil
defp operation_name(%{"operationName" => name}), do: name
defp operation_name(%{"query" => query}) do
case Regex.run(@query_name_regex, query, capture: :all_but_first) do
[query_name] -> query_name
_ -> "unnamed"
end
end
defp operation_name(_), do: "unnamed"
@spec query_type(term()) :: String.t() | nil
defp query_type("query" <> _), do: "QUERY"
defp query_type("mutation" <> _), do: "MUTATION"
defp query_type("{" <> _), do: "QUERY"
defp query_type(_), do: nil
defp should_log_request?(conn, opts) do
IO.inspect("should_log_request?")
IO.inspect(conn.request_path not in opts.ignored_paths)
IO.inspect(conn.status >= 300)
IO.inspect((conn.request_path not in opts.ignored_paths &&
should_include_in_sample?(opts[:success_log_sampling_ratio])) ||
conn.status >= 300)
# Log successful request if path is not filtered based on the sampling pool
# or log all HTTP status >= 300 (usually errors)
(conn.request_path not in opts.ignored_paths &&
should_include_in_sample?(opts[:success_log_sampling_ratio])) ||
conn.status >= 300
end
defp should_include_in_sample?(ratio) do
random_float() <= ratio
end
# Returns a float (4 digit precision) between 0.0 and 1.0
#
# Alternative:
# :crypto.rand_uniform(1, 10_000) / 10_000
#
defp random_float do
:rand.uniform(10_000) / 10_000
end
end
end
| 34.675758 | 127 | 0.634449 |
9ec00667c158cae92c0758a93b78d3949323bdab | 941 | ex | Elixir | lib/challenge_gov/submissions/document.ex | smartlogic/Challenge_gov | b4203d1fcfb742dd17ecfadb9e9c56ad836d4254 | [
"CC0-1.0"
] | 9 | 2020-02-26T20:24:38.000Z | 2022-03-22T21:14:52.000Z | lib/challenge_gov/submissions/document.ex | smartlogic/Challenge_gov | b4203d1fcfb742dd17ecfadb9e9c56ad836d4254 | [
"CC0-1.0"
] | 15 | 2020-04-22T19:33:24.000Z | 2022-03-26T15:11:17.000Z | lib/challenge_gov/submissions/document.ex | smartlogic/Challenge_gov | b4203d1fcfb742dd17ecfadb9e9c56ad836d4254 | [
"CC0-1.0"
] | 4 | 2020-04-27T22:58:57.000Z | 2022-01-14T13:42:09.000Z | defmodule ChallengeGov.Submissions.Document do
@moduledoc """
Submission document schema
"""
use Ecto.Schema
import Ecto.Changeset
alias ChallengeGov.Accounts.User
alias ChallengeGov.Submissions.Submission
@type t :: %__MODULE__{}
schema "submission_documents" do
belongs_to(:user, User)
belongs_to(:submission, Submission)
field(:filename, :string)
field(:key, Ecto.UUID)
field(:extension, :string)
field(:name, :string)
timestamps(type: :utc_datetime_usec)
end
def create_changeset(struct, file, key, name \\ "") do
struct
|> change()
|> put_change(:filename, file.filename)
|> put_change(:key, key)
|> put_change(:extension, file.extension)
|> put_change(:name, name)
end
def submission_changeset(struct, submission) do
struct
|> change()
|> put_change(:submission_id, submission.id)
|> foreign_key_constraint(:challenge_id)
end
end
| 21.883721 | 56 | 0.686504 |
9ec0206e05750172dcf414f2e1cc62563c2df0d6 | 721 | exs | Elixir | examples/example-phx-1_3/config/test.exs | devshane/thesis-phoenix | afe22a25542f91e15cfffb1e93ff8d833a64c25b | [
"MIT"
] | null | null | null | examples/example-phx-1_3/config/test.exs | devshane/thesis-phoenix | afe22a25542f91e15cfffb1e93ff8d833a64c25b | [
"MIT"
] | null | null | null | examples/example-phx-1_3/config/test.exs | devshane/thesis-phoenix | afe22a25542f91e15cfffb1e93ff8d833a64c25b | [
"MIT"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :example_phx, ExamplePhxWeb.Endpoint,
http: [port: 4001],
server: true
config :example_phx, :sql_sandbox, true
config :hound, driver: "chrome_driver"
# Print only warnings and errors during test
config :logger, level: :warn
# Configure your database
config :example_phx, ExamplePhx.Repo,
adapter: Ecto.Adapters.Postgres,
username: System.get_env("DATABASE_POSTGRESQL_USERNAME") || "postgres",
password: System.get_env("DATABASE_POSTGRESQL_PASSWORD") || "postgres",
database: "example_phx_test",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox,
ownership_timeout: 60_000
| 28.84 | 73 | 0.758669 |
9ec0406cfb4429e0ee6a3661d228a481cb2babbb | 3,199 | ex | Elixir | lib/codes/codes_k76.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_k76.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_k76.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | defmodule IcdCode.ICDCode.Codes_K76 do
alias IcdCode.ICDCode
def _K760 do
%ICDCode{full_code: "K760",
category_code: "K76",
short_code: "0",
full_name: "Fatty (change of) liver, not elsewhere classified",
short_name: "Fatty (change of) liver, not elsewhere classified",
category_name: "Fatty (change of) liver, not elsewhere classified"
}
end
def _K761 do
%ICDCode{full_code: "K761",
category_code: "K76",
short_code: "1",
full_name: "Chronic passive congestion of liver",
short_name: "Chronic passive congestion of liver",
category_name: "Chronic passive congestion of liver"
}
end
def _K762 do
%ICDCode{full_code: "K762",
category_code: "K76",
short_code: "2",
full_name: "Central hemorrhagic necrosis of liver",
short_name: "Central hemorrhagic necrosis of liver",
category_name: "Central hemorrhagic necrosis of liver"
}
end
def _K763 do
%ICDCode{full_code: "K763",
category_code: "K76",
short_code: "3",
full_name: "Infarction of liver",
short_name: "Infarction of liver",
category_name: "Infarction of liver"
}
end
def _K764 do
%ICDCode{full_code: "K764",
category_code: "K76",
short_code: "4",
full_name: "Peliosis hepatis",
short_name: "Peliosis hepatis",
category_name: "Peliosis hepatis"
}
end
def _K765 do
%ICDCode{full_code: "K765",
category_code: "K76",
short_code: "5",
full_name: "Hepatic veno-occlusive disease",
short_name: "Hepatic veno-occlusive disease",
category_name: "Hepatic veno-occlusive disease"
}
end
def _K766 do
%ICDCode{full_code: "K766",
category_code: "K76",
short_code: "6",
full_name: "Portal hypertension",
short_name: "Portal hypertension",
category_name: "Portal hypertension"
}
end
def _K767 do
%ICDCode{full_code: "K767",
category_code: "K76",
short_code: "7",
full_name: "Hepatorenal syndrome",
short_name: "Hepatorenal syndrome",
category_name: "Hepatorenal syndrome"
}
end
def _K7681 do
%ICDCode{full_code: "K7681",
category_code: "K76",
short_code: "81",
full_name: "Hepatopulmonary syndrome",
short_name: "Hepatopulmonary syndrome",
category_name: "Hepatopulmonary syndrome"
}
end
def _K7689 do
%ICDCode{full_code: "K7689",
category_code: "K76",
short_code: "89",
full_name: "Other specified diseases of liver",
short_name: "Other specified diseases of liver",
category_name: "Other specified diseases of liver"
}
end
def _K769 do
%ICDCode{full_code: "K769",
category_code: "K76",
short_code: "9",
full_name: "Liver disease, unspecified",
short_name: "Liver disease, unspecified",
category_name: "Liver disease, unspecified"
}
end
end
| 30.179245 | 76 | 0.593623 |
9ec047e81c650719d48050597944170a02a49195 | 1,732 | exs | Elixir | apps/interface/config/dev.exs | jeremy-miller/life-elixir | 5c9396b187279110dc8da38a5070023d1ed02955 | [
"MIT"
] | null | null | null | apps/interface/config/dev.exs | jeremy-miller/life-elixir | 5c9396b187279110dc8da38a5070023d1ed02955 | [
"MIT"
] | null | null | null | apps/interface/config/dev.exs | jeremy-miller/life-elixir | 5c9396b187279110dc8da38a5070023d1ed02955 | [
"MIT"
] | null | null | null | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
config :interface, InterfaceWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [node: ["node_modules/brunch/bin/brunch", "watch", "--stdin",
cd: Path.expand("../assets", __DIR__)]]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# command from your terminal:
#
# openssl req -new -newkey rsa:4096 -days 365 -nodes -x509 -subj "/C=US/ST=Denial/L=Springfield/O=Dis/CN=www.example.com" -keyout priv/server.key -out priv/server.pem
#
# The `http:` config above can be replaced with:
#
# https: [port: 4000, keyfile: "priv/server.key", certfile: "priv/server.pem"],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :interface, InterfaceWeb.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{lib/interface_web/views/.*(ex)$},
~r{lib/interface_web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
| 34.64 | 170 | 0.702656 |
9ec05433dd04039b5be09398ff6c6a684709ba6c | 376 | ex | Elixir | lib/quasar/web/views/error_view.ex | okbreathe/quasar | 58449a190aefde36aa83e5b1f3116f458ced7c09 | [
"Apache-2.0"
] | 11 | 2017-07-10T10:13:42.000Z | 2021-12-19T16:46:20.000Z | lib/quasar/web/views/error_view.ex | okbreathe/quasar | 58449a190aefde36aa83e5b1f3116f458ced7c09 | [
"Apache-2.0"
] | null | null | null | lib/quasar/web/views/error_view.ex | okbreathe/quasar | 58449a190aefde36aa83e5b1f3116f458ced7c09 | [
"Apache-2.0"
] | 3 | 2017-07-18T20:03:34.000Z | 2019-07-28T13:32:49.000Z | defmodule Quasar.Web.ErrorView do
use Quasar.Web, :view
def render("404.html", _assigns) do
"Page not found"
end
def render("500.html", _assigns) do
"Internal server error"
end
# In case no render clause matches or no
# template is found, let's render it as 500
def template_not_found(_template, assigns) do
render "500.html", assigns
end
end
| 20.888889 | 47 | 0.696809 |
9ec055d367b1bb3cab1b522dcde6ae80e877a996 | 107 | ex | Elixir | lib/hologram/compiler/call_graph_builder.ex | gregjohnsonsaltaire/hologram | aa8e9ea0d599def864c263cc37cc8ee31f02ac4a | [
"MIT"
] | 40 | 2022-01-19T20:27:36.000Z | 2022-03-31T18:17:41.000Z | lib/hologram/compiler/call_graph_builder.ex | gregjohnsonsaltaire/hologram | aa8e9ea0d599def864c263cc37cc8ee31f02ac4a | [
"MIT"
] | 42 | 2022-02-03T22:52:43.000Z | 2022-03-26T20:57:32.000Z | lib/hologram/compiler/call_graph_builder.ex | gregjohnsonsaltaire/hologram | aa8e9ea0d599def864c263cc37cc8ee31f02ac4a | [
"MIT"
] | 3 | 2022-02-10T04:00:37.000Z | 2022-03-08T22:07:45.000Z | defprotocol Hologram.Compiler.CallGraphBuilder do
def build(ir, module_defs, templates, from_vertex)
end
| 26.75 | 52 | 0.831776 |
9ec05ab0d887cdeb9a2ca9fe8deccd0bd0c67281 | 17,376 | ex | Elixir | lib/ex_unit/lib/ex_unit/formatter.ex | samgaw/elixir | 92e8263102d95281a5aa7850b747636805f13fc9 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/lib/ex_unit/formatter.ex | samgaw/elixir | 92e8263102d95281a5aa7850b747636805f13fc9 | [
"Apache-2.0"
] | 1 | 2021-10-21T08:22:30.000Z | 2021-10-21T08:22:30.000Z | lib/ex_unit/lib/ex_unit/formatter.ex | samgaw/elixir | 92e8263102d95281a5aa7850b747636805f13fc9 | [
"Apache-2.0"
] | null | null | null | defmodule ExUnit.Formatter do
@moduledoc """
Helper functions for formatting and the formatting protocols.
Formatters are `GenServer`s specified during ExUnit configuration
that receive a series of events as casts.
The following events are possible:
* `{:suite_started, opts}` -
the suite has started with the specified options to the runner.
* `{:suite_finished, times_us}` -
the suite has finished. Returns several measurements in microseconds
for running the suite. See `t:times_us` for more information.
* `{:module_started, test_module}` -
a test module has started. See `ExUnit.TestModule` for details.
* `{:module_finished, test_module}` -
a test module has finished. See `ExUnit.TestModule` for details.
* `{:test_started, test}` -
a test has started. See `ExUnit.Test` for details.
* `{:test_finished, test}` -
a test has finished. See `ExUnit.Test` for details.
* `{:sigquit, [test | test_module]}` -
the VM is going to shutdown. It receives the test cases (or test
module in case of `setup_all`) still running.
The formatter will also receive the following events but they are deprecated
and should be ignored:
* `{:case_started, test_module}` -
a test module has started. See `ExUnit.TestModule` for details.
* `{:case_finished, test_module}` -
a test module has finished. See `ExUnit.TestModule` for details.
The full ExUnit configuration is passed as the argument to `c:GenServer.init/1` callback when the
formatters are started. If you need to do runtime configuration of a
formatter, you can add any configuration needed by using `ExUnit.configure/1`
or `ExUnit.start/1`, and this will then be included in the options passed to
the `c:GenServer.init/1` callback.
"""
@type id :: term
@type test :: ExUnit.Test.t()
@typedoc """
The times spent on several parts of the test suite.
The following properties can be computed:
sync = run - (async || 0)
total = run + (load || 0)
`async` is nil when there are no async tests.
`load` is nil when the test suite is running and loading
tests concurrently.
"""
@type times_us :: %{
run: pos_integer,
async: pos_integer | nil,
load: pos_integer | nil
}
import Exception, only: [format_stacktrace_entry: 1, format_file_line: 3]
alias ExUnit.Diff
alias Inspect.Algebra
@counter_padding " "
@mailbox_label_padding @counter_padding <> " "
@formatter_exceptions [ExUnit.AssertionError, FunctionClauseError]
@no_value ExUnit.AssertionError.no_value()
@doc """
Formats time taken running the test suite.
## Examples
iex> format_times(%{run: 10000, async: nil, load: nil})
"Finished in 0.01 seconds (0.00s async, 0.01s sync)"
iex> format_times(%{run: 10000, async: nil, load: 20000})
"Finished in 0.03 seconds (0.02s on load, 0.00s async, 0.01s sync)"
iex> format_times(%{run: 10000, async: nil, load: 200_000})
"Finished in 0.2 seconds (0.2s on load, 0.00s async, 0.01s sync)"
iex> format_times(%{run: 100_000, async: 50000, load: 200_000})
"Finished in 0.3 seconds (0.2s on load, 0.05s async, 0.05s sync)"
"""
@spec format_times(times_us) :: String.t()
def format_times(times) do
run_us = normalize_us(times.run)
load_us = normalize_us(times.load)
async_us = normalize_us(times.async)
sync_us = run_us - async_us
total_us = run_us + load_us
maybe_load =
if times.load do
"#{format_us(load_us)}s on load, "
else
""
end
"Finished in #{format_us(total_us)} seconds " <>
"(#{maybe_load}#{format_us(async_us)}s async, #{format_us(sync_us)}s sync)"
end
defp normalize_us(nil), do: 0
defp normalize_us(us), do: div(us, 10000)
defp format_us(us) do
if us < 10 do
"0.0#{us}"
else
us = div(us, 10)
"#{div(us, 10)}.#{rem(us, 10)}"
end
end
# Deprecate me on Elixir v1.16
@doc false
def format_time(run, load) do
format_times(%{run: run, load: load, async: nil})
end
@doc """
Formats filters used to constrain cases to be run.
## Examples
iex> format_filters([run: true, slow: false], :include)
"Including tags: [run: true, slow: false]"
"""
@spec format_filters(keyword, atom) :: String.t()
def format_filters(filters, type) do
case type do
:exclude -> "Excluding tags: #{inspect(filters)}"
:include -> "Including tags: #{inspect(filters)}"
end
end
@doc """
Receives a test and formats its failure.
"""
def format_test_failure(test, failures, counter, width, formatter) do
%ExUnit.Test{name: name, module: module, tags: tags} = test
test_info(with_counter(counter, "#{name} (#{inspect(module)})"), formatter) <>
test_location(with_location(tags), formatter) <>
Enum.map_join(Enum.with_index(failures), "", fn {{kind, reason, stack}, index} ->
{text, stack} = format_kind_reason(test, kind, reason, stack, width, formatter)
failure_header(failures, index) <>
text <> format_stacktrace(stack, module, name, formatter)
end)
end
@doc false
def format_assertion_error(%ExUnit.AssertionError{} = struct) do
format_exception(%{}, struct, [], :infinity, fn _, msg -> msg end, "") |> elem(0)
end
defp format_exception(test, %ExUnit.AssertionError{} = struct, stack, width, formatter, pad) do
label_padding_size = if has_value?(struct.right), do: 7, else: 6
padding_size = label_padding_size + byte_size(@counter_padding)
code_multiline =
if struct.doctest != @no_value,
do: &pad_multiline(&1, padding_size),
else: &code_multiline(&1, padding_size)
formatted =
[
note: if_value(struct.message, &format_message(&1, formatter)),
doctest: if_value(struct.doctest, &pad_multiline(&1, 2 + byte_size(@counter_padding))),
code: if_value(struct.expr, code_multiline),
code: unless_value(struct.expr, fn -> get_code(test, stack) || @no_value end),
arguments: if_value(struct.args, &format_args(&1, width))
]
|> Kernel.++(format_context(struct, formatter, padding_size, width))
|> format_meta(formatter, pad, label_padding_size)
|> IO.iodata_to_binary()
{formatted, stack}
end
defp format_exception(test, %FunctionClauseError{} = struct, stack, _width, formatter, _pad) do
{blamed, stack} = Exception.blame(:error, struct, stack)
banner = Exception.format_banner(:error, struct)
blamed = FunctionClauseError.blame(blamed, &inspect/1, &blame_match(&1, formatter))
message = error_info(banner, formatter) <> "\n" <> pad(String.trim_leading(blamed, "\n"))
{message <> format_code(test, stack, formatter), stack}
end
@doc false
@deprecated "Use ExUnit.Formatter.format_test_all_failure/5 instead"
def format_test_case_failure(test_case, failures, counter, width, formatter) do
format_test_all_failure(test_case, failures, counter, width, formatter)
end
@doc """
Receives a test module and formats its failure.
"""
def format_test_all_failure(test_module, failures, counter, width, formatter) do
name = test_module.name
test_module_info(with_counter(counter, "#{inspect(name)}: "), formatter) <>
Enum.map_join(Enum.with_index(failures), "", fn {{kind, reason, stack}, index} ->
{text, stack} = format_kind_reason(test_module, kind, reason, stack, width, formatter)
failure_header(failures, index) <> text <> format_stacktrace(stack, name, nil, formatter)
end)
end
defp format_kind_reason(test, :error, %mod{} = struct, stack, width, formatter)
when mod in @formatter_exceptions do
format_exception(test, struct, stack, width, formatter, @counter_padding)
end
defp format_kind_reason(test, kind, reason, stack, width, formatter) do
case linked_or_trapped_exit(kind, reason) do
{header, wrapped_reason, wrapped_stack} ->
struct = Exception.normalize(:error, wrapped_reason, wrapped_stack)
{formatted_reason, wrapped_stack} =
format_exception(test, struct, wrapped_stack, width, formatter, @counter_padding)
formatted_stack = format_stacktrace(wrapped_stack, test.module, test.name, formatter)
{error_info(header, formatter) <> pad(formatted_reason <> formatted_stack), stack}
:error ->
{reason, stack} = Exception.blame(kind, reason, stack)
message = error_info(Exception.format_banner(kind, reason), formatter)
{message <> format_code(test, stack, formatter), stack}
end
end
defp linked_or_trapped_exit({:EXIT, pid}, {reason, [_ | _] = stack})
when reason.__struct__ in @formatter_exceptions
when reason == :function_clause do
{"** (EXIT from #{inspect(pid)}) an exception was raised:\n", reason, stack}
end
defp linked_or_trapped_exit(:exit, {{reason, [_ | _] = stack}, {mod, fun, args}})
when is_atom(mod) and is_atom(fun) and is_list(args) and
reason.__struct__ in @formatter_exceptions
when is_atom(mod) and is_atom(fun) and is_list(args) and reason == :function_clause do
{
"** (exit) exited in: #{Exception.format_mfa(mod, fun, args)}\n ** (EXIT) an exception was raised:",
reason,
stack
}
end
defp linked_or_trapped_exit(_kind, _reason), do: :error
defp format_code(test, stack, formatter) do
if snippet = get_code(test, stack) do
" " <> formatter.(:extra_info, "code: ") <> snippet <> "\n"
else
""
end
end
defp get_code(%{module: module, name: name}, stack) do
info =
Enum.find_value(stack, fn
{^module, ^name, _, info} -> info
_ -> nil
end)
file = info[:file]
line = info[:line]
if line > 0 && file && File.exists?(file) do
file |> File.stream!() |> Enum.at(line - 1) |> String.trim()
end
rescue
_ -> nil
end
defp get_code(%{}, _) do
nil
end
defp blame_match(%{match?: true, node: node}, _formatter),
do: Macro.to_string(node)
defp blame_match(%{match?: false, node: node}, formatter),
do: formatter.(:blame_diff, Macro.to_string(node))
defp format_meta(fields, formatter, padding, padding_size) do
for {label, value} <- fields, has_value?(value) do
[padding, format_label(label, formatter, padding_size), value, "\n"]
end
end
defp if_value(value, fun) do
if has_value?(value) do
fun.(value)
else
value
end
end
defp unless_value(value, fun) do
if has_value?(value) do
@no_value
else
fun.()
end
end
defp has_value?(value) do
value != @no_value
end
defp format_label(:note, _formatter, _padding_size), do: ""
defp format_label(label, formatter, padding_size) do
formatter.(:extra_info, String.pad_trailing("#{label}:", padding_size))
end
defp format_message(value, formatter) do
value = String.replace(value, "\n", "\n" <> @counter_padding)
formatter.(:error_info, value)
end
defp format_args(args, width) do
entries =
for {arg, i} <- Enum.with_index(args, 1) do
"""
# #{i}
#{inspect_multiline(arg, 9, width)}
"""
end
["\n" | entries]
end
@assertions [
:assert,
:assert_raise,
:assert_receive,
:assert_received,
:refute,
:refute_receive,
:refute_received
]
defp code_multiline({fun, _, [expr]}, padding_size) when fun in @assertions do
pad_multiline(Atom.to_string(fun) <> " " <> Macro.to_string(expr), padding_size)
end
defp code_multiline(expr, padding_size) do
pad_multiline(Macro.to_string(expr), padding_size)
end
defp inspect_multiline(expr, padding_size, width) do
width = if width == :infinity, do: width, else: width - padding_size
expr
|> Algebra.to_doc(%Inspect.Opts{width: width})
|> Algebra.group()
|> Algebra.nest(padding_size)
|> Algebra.format(width)
end
defp format_context(%{context: {:mailbox, _pins, []}}, _, _, _) do
[]
end
defp format_context(
%{left: left, context: {:mailbox, pins, mailbox}},
formatter,
padding_size,
width
) do
formatted_mailbox =
for message <- mailbox do
{pattern, value} =
format_sides(
left,
message,
{:match, pins},
formatter,
padding_size + 5,
width
)
[
"\n",
@mailbox_label_padding,
format_label(:pattern, formatter, 9),
pattern,
"\n",
@mailbox_label_padding,
format_label(:value, formatter, 9),
value
]
end
[mailbox: Enum.join(formatted_mailbox, "\n")]
end
defp format_context(
%{left: left, right: right, context: context},
formatter,
padding_size,
width
) do
{left, right} = format_sides(left, right, context, formatter, padding_size, width)
[left: left, right: right]
end
defp format_sides(left, right, context, formatter, padding_size, width) do
inspect = &inspect_multiline(&1, padding_size, width)
content_width = if width == :infinity, do: width, else: width - padding_size
case format_diff(left, right, context, formatter) do
{result, _env} ->
left =
result.left
|> Diff.to_algebra(&colorize_diff_delete(&1, formatter))
|> Algebra.nest(padding_size)
|> Algebra.format(content_width)
right =
result.right
|> Diff.to_algebra(&colorize_diff_insert(&1, formatter))
|> Algebra.nest(padding_size)
|> Algebra.format(content_width)
{left, right}
nil when is_atom(context) ->
{if_value(left, inspect), if_value(right, inspect)}
nil ->
{if_value(left, &code_multiline(&1, padding_size)), if_value(right, inspect)}
end
end
defp format_diff(left, right, context, formatter) do
if has_value?(left) and has_value?(right) and formatter.(:diff_enabled?, false) do
find_diff(left, right, context)
end
end
defp colorize_diff_delete(doc, formatter) do
format = colorize_format(doc, :diff_delete, :diff_delete_whitespace)
formatter.(format, doc)
end
defp colorize_diff_insert(doc, formatter) do
format = colorize_format(doc, :diff_insert, :diff_insert_whitespace)
formatter.(format, doc)
end
defp colorize_format(content, normal, whitespace) when is_binary(content) do
if String.trim_leading(content) == "", do: whitespace, else: normal
end
defp colorize_format(_doc, normal, _whitespace) do
normal
end
defp find_diff(left, right, context) do
task = Task.async(Diff, :compute, [left, right, context])
case Task.yield(task, 1500) || Task.shutdown(task, :brutal_kill) do
{:ok, diff} -> diff
nil -> nil
end
end
defp format_stacktrace([], _case, _test, _color) do
""
end
defp format_stacktrace(stacktrace, test_case, test, color) do
extra_info("stacktrace:", color) <>
Enum.map_join(stacktrace, fn entry ->
stacktrace_info(format_stacktrace_entry(entry, test_case, test), color)
end)
end
defp format_stacktrace_entry({test_case, test, _, location}, test_case, test) do
format_file_line(location[:file], location[:line], " (test)")
end
defp format_stacktrace_entry(entry, _test_case, _test) do
format_stacktrace_entry(entry)
end
defp with_location(tags) do
path = "#{Path.relative_to_cwd(tags[:file])}:#{tags[:line]}"
if prefix = Application.get_env(:ex_unit, :test_location_relative_path) do
Path.join(prefix, path)
else
path
end
end
defp failure_header([_], _), do: ""
defp failure_header(_, i), do: "\n#{@counter_padding}Failure ##{i + 1}\n"
defp with_counter(counter, msg) when counter < 10 do
" #{counter}) #{msg}"
end
defp with_counter(counter, msg) when counter < 100 do
" #{counter}) #{msg}"
end
defp with_counter(counter, msg) do
"#{counter}) #{msg}"
end
defp test_module_info(msg, nil),
do: msg <> "failure on setup_all callback, all tests have been invalidated\n"
defp test_module_info(msg, formatter),
do: test_module_info(formatter.(:test_module_info, msg), nil)
defp test_info(msg, nil), do: msg <> "\n"
defp test_info(msg, formatter), do: test_info(formatter.(:test_info, msg), nil)
defp test_location(msg, nil), do: " " <> msg <> "\n"
defp test_location(msg, formatter), do: test_location(formatter.(:location_info, msg), nil)
defp pad(msg) do
" " <> pad_multiline(msg, 5) <> "\n"
end
defp pad_multiline(expr, padding_size) when is_binary(expr) do
padding = String.duplicate(" ", padding_size)
String.replace(expr, "\n", "\n" <> padding)
end
defp error_info(msg, nil), do: pad(msg)
defp error_info(msg, formatter), do: pad(formatter.(:error_info, msg))
defp extra_info(msg, nil), do: pad(msg)
defp extra_info(msg, formatter), do: pad(formatter.(:extra_info, msg))
defp stacktrace_info("", _formatter), do: ""
defp stacktrace_info(msg, nil), do: " " <> msg <> "\n"
defp stacktrace_info(msg, formatter),
do: stacktrace_info(formatter.(:stacktrace_info, msg), nil)
end
| 30.808511 | 108 | 0.649804 |
9ec05ca80fe13fab94c2fd6187aba12b9c7c9e24 | 872 | exs | Elixir | Microsoft.Azure.Management.Database.PostgreSql/mix.exs | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | 4 | 2018-09-29T03:43:15.000Z | 2021-04-01T18:30:46.000Z | Microsoft.Azure.Management.Database.PostgreSql/mix.exs | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | Microsoft.Azure.Management.Database.PostgreSql/mix.exs | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | defmodule Microsoft.Azure.Management.Database.PostgreSql.Mixfile do
use Mix.Project
def project do
[app: :ex_microsoft_azure_management_postgresql,
version: "0.1.0",
elixir: "~> 1.4",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
# Specify extra applications you'll use from Erlang/Elixir
[extra_applications: [:logger]]
end
# Dependencies can be Hex packages:
#
# {:my_dep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:my_dep, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[
{:tesla, "~> 0.8"},
{:poison, ">= 1.0.0"}
]
end
end
| 23.567568 | 79 | 0.627294 |
9ec066f61a3543f1295bd13878fd46b1430e0f7c | 1,530 | ex | Elixir | exercises/practice/bank-account/.meta/example.ex | jaimeiniesta/elixir-1 | e8ddafeb313822645e0cd76743955a5c728a84c5 | [
"MIT"
] | 343 | 2017-06-22T16:28:28.000Z | 2022-03-25T21:33:32.000Z | exercises/practice/bank-account/.meta/example.ex | jaimeiniesta/elixir-1 | e8ddafeb313822645e0cd76743955a5c728a84c5 | [
"MIT"
] | 583 | 2017-06-19T10:48:40.000Z | 2022-03-28T21:43:12.000Z | exercises/practice/bank-account/.meta/example.ex | jaimeiniesta/elixir-1 | e8ddafeb313822645e0cd76743955a5c728a84c5 | [
"MIT"
] | 228 | 2017-07-05T07:09:32.000Z | 2022-03-27T08:59:08.000Z | defmodule BankAccount do
@moduledoc """
A bank account that supports access from multiple processes.
"""
use GenServer
## Callbacks
@impl true
def init(_args) do
{:ok, 0}
end
@impl true
def handle_call(:balance, _from, balance) do
{:reply, balance, balance}
end
@impl true
def handle_call({:update, amount}, _from, balance) do
{:reply, :ok, balance + amount}
end
@impl true
def handle_call(:close, _from, balance) do
# We stop normally and return :stopped to the caller.
{:stop, :normal, :stopped, balance}
end
## Interface for tests
@typedoc """
An account handle.
"""
@opaque account :: pid
@doc """
Open the bank. Makes the account available.
"""
@spec open_bank() :: account
def open_bank() do
{:ok, pid} = GenServer.start_link(__MODULE__, [], [])
pid
end
@doc """
Close the bank. Makes the account unavailable.
"""
@spec close_bank(account) :: any
def close_bank(account) do
GenServer.stop(account)
end
@doc """
Get the account's balance.
"""
@spec balance(account) :: integer
def balance(account) do
if Process.alive?(account) do
GenServer.call(account, :balance)
else
{:error, :account_closed}
end
end
@doc """
Update the account's balance by adding the given amount which may be negative.
"""
def update(account, amount) do
if Process.alive?(account) do
GenServer.call(account, {:update, amount})
else
{:error, :account_closed}
end
end
end
| 19.615385 | 80 | 0.639216 |
9ec06d42bef71b0701225e1ee5ac6a27018e2004 | 2,181 | ex | Elixir | clients/bigtable_admin/lib/google_api/bigtable_admin/v2/model/get_policy_options.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/bigtable_admin/lib/google_api/bigtable_admin/v2/model/get_policy_options.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/bigtable_admin/lib/google_api/bigtable_admin/v2/model/get_policy_options.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.BigtableAdmin.V2.Model.GetPolicyOptions do
@moduledoc """
Encapsulates settings provided to GetIamPolicy.
## Attributes
* `requestedPolicyVersion` (*type:* `integer()`, *default:* `nil`) - Optional. The maximum policy version that will be used to format the policy. Valid values are 0, 1, and 3. Requests specifying an invalid value will be rejected. Requests for policies with any conditional role bindings must specify version 3. Policies with no conditional role bindings may specify any valid value or leave the field unset. The policy in the response might use the policy version that you specified, or it might use a lower policy version. For example, if you specify version 3, but the policy has no conditional role bindings, the response uses version 1. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:requestedPolicyVersion => integer() | nil
}
field(:requestedPolicyVersion)
end
defimpl Poison.Decoder, for: GoogleApi.BigtableAdmin.V2.Model.GetPolicyOptions do
def decode(value, options) do
GoogleApi.BigtableAdmin.V2.Model.GetPolicyOptions.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.BigtableAdmin.V2.Model.GetPolicyOptions do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 46.404255 | 805 | 0.767538 |
9ec06f6daaee8c1cd3a2f6bbe491186143b1110b | 558 | exs | Elixir | admin/test/views/error_view_test.exs | shipperizer/symmetrical-octo-parakeet | 6c9c428898d3529c04d872fec8f099456cc54633 | [
"MIT"
] | null | null | null | admin/test/views/error_view_test.exs | shipperizer/symmetrical-octo-parakeet | 6c9c428898d3529c04d872fec8f099456cc54633 | [
"MIT"
] | null | null | null | admin/test/views/error_view_test.exs | shipperizer/symmetrical-octo-parakeet | 6c9c428898d3529c04d872fec8f099456cc54633 | [
"MIT"
] | null | null | null | defmodule Admin.ErrorViewTest do
use Admin.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(Admin.ErrorView, "404.html", []) ==
"Page not found"
end
test "render 500.html" do
assert render_to_string(Admin.ErrorView, "500.html", []) ==
"Internal server error"
end
test "render any other" do
assert render_to_string(Admin.ErrorView, "505.html", []) ==
"Internal server error"
end
end
| 25.363636 | 66 | 0.670251 |
9ec083884969e3d5a21ddc83f577107f4bcdb8e6 | 1,813 | ex | Elixir | lib/google_api/big_query/v2/model/user_defined_function_resource.ex | albert-io/elixir-google-big-query | 26537253b83e7bad513ea4b5143b156c914b1475 | [
"Apache-2.0"
] | null | null | null | lib/google_api/big_query/v2/model/user_defined_function_resource.ex | albert-io/elixir-google-big-query | 26537253b83e7bad513ea4b5143b156c914b1475 | [
"Apache-2.0"
] | null | null | null | lib/google_api/big_query/v2/model/user_defined_function_resource.ex | albert-io/elixir-google-big-query | 26537253b83e7bad513ea4b5143b156c914b1475 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.BigQuery.V2.Model.UserDefinedFunctionResource do
@moduledoc """
## Attributes
- inlineCode (String.t): [Pick one] An inline resource that contains code for a user-defined function (UDF). Providing a inline code resource is equivalent to providing a URI for a file containing the same code. Defaults to: `null`.
- resourceUri (String.t): [Pick one] A code resource to load from a Google Cloud Storage URI (gs://bucket/path). Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:"inlineCode" => any(),
:"resourceUri" => any()
}
field(:"inlineCode")
field(:"resourceUri")
end
defimpl Poison.Decoder, for: GoogleApi.BigQuery.V2.Model.UserDefinedFunctionResource do
def decode(value, options) do
GoogleApi.BigQuery.V2.Model.UserDefinedFunctionResource.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.BigQuery.V2.Model.UserDefinedFunctionResource do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.574074 | 234 | 0.74738 |
9ec08735610dc30002019f5ae0223004a0649ce6 | 1,269 | ex | Elixir | server/test/support/conn_case.ex | brianjaustin/melody-match | 5200fd347f7ae636ec782398896c782d80c17f59 | [
"Apache-1.1"
] | null | null | null | server/test/support/conn_case.ex | brianjaustin/melody-match | 5200fd347f7ae636ec782398896c782d80c17f59 | [
"Apache-1.1"
] | 4 | 2021-03-28T03:09:37.000Z | 2021-04-10T17:45:10.000Z | server/test/support/conn_case.ex | brianjaustin/melody-match | 5200fd347f7ae636ec782398896c782d80c17f59 | [
"Apache-1.1"
] | null | null | null | defmodule MelodyMatchWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use MelodyMatchWeb.ConnCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
import Plug.Conn
import Phoenix.ConnTest
import MelodyMatchWeb.ConnCase
alias MelodyMatchWeb.Router.Helpers, as: Routes
# The default endpoint for testing
@endpoint MelodyMatchWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(MelodyMatch.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(MelodyMatch.Repo, {:shared, self()})
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 28.840909 | 73 | 0.730496 |
9ec0942cb43cc8084ea02039b5091fbfbb70f747 | 4,431 | ex | Elixir | lib/sobelow/config/csrf_route.ex | gabrielgiordan/sobelow | 2193139737fa8b2e32692c8fcba0b9052f67baa7 | [
"Apache-2.0"
] | null | null | null | lib/sobelow/config/csrf_route.ex | gabrielgiordan/sobelow | 2193139737fa8b2e32692c8fcba0b9052f67baa7 | [
"Apache-2.0"
] | null | null | null | lib/sobelow/config/csrf_route.ex | gabrielgiordan/sobelow | 2193139737fa8b2e32692c8fcba0b9052f67baa7 | [
"Apache-2.0"
] | null | null | null | defmodule Sobelow.Config.CSRFRoute do
@moduledoc """
# Cross-Site Request Forgery
In a Cross-Site Request Forgery (CSRF) attack, an untrusted
application can cause a user's browser to submit requests or perform
actions on the user's behalf.
Read more about CSRF here:
https://www.owasp.org/index.php/Cross-Site_Request_Forgery_(CSRF)
This type of CSRF is flagged by `sobelow` when state-changing
routes share an action with GET-based routes. For example:
get "/users", UserController, :new
post "/users", UserController, :new
In this instance, it may be possible to trigger the POST
functionality with a GET request and query parameters.
CSRF checks can be ignored with the following command:
$ mix sobelow -i Config.CSRFRoute
"""
alias Sobelow.Parse
use Sobelow.Finding
@state_changing_methods [:post, :put, :patch, :delete]
@finding_type "Config.CSRFRoute: CSRF via Action Reuse"
def run(router, _) do
finding = Finding.init(@finding_type, Utils.normalize_path(router))
router
|> Parse.ast()
|> Parse.get_top_level_funs_of_type(:scope)
|> combine_scopes()
|> Stream.flat_map(&route_findings(&1, finding))
# Sort for deterministic txt-format output
|> Enum.sort()
|> Enum.each(&add_finding/1)
end
def route_findings(scope, finding) do
scope
|> Parse.get_funs_of_type([:get | @state_changing_methods])
|> Enum.reduce(%{}, &transform_routes/2)
|> Stream.filter(&get_and_state_changing?/1)
|> Stream.flat_map(&put_finding_details(&1, finding))
end
defp put_finding_details({_, meta}, %Finding{fun_source: nil} = finding) do
src = Enum.map(meta, fn {_, v} -> v end)
Enum.reduce(
meta,
[],
&put_finding_details(&1, &2, %{finding | fun_source: {:__block__, [], src}})
)
end
defp put_finding_details({:get, fun}, acc, finding) do
finding = %{
finding
| vuln_source: fun,
vuln_line_no: Parse.get_fun_line(fun),
fun_name: get_action(fun),
confidence: :high
}
[finding | acc]
end
defp put_finding_details(_, acc, _), do: acc
defp add_finding(%Finding{} = finding) do
finding = Finding.fetch_fingerprint(finding)
file_header = "File: #{finding.filename}"
action_header = "Action: #{finding.fun_name}"
line_header = "Line: #{finding.vuln_line_no}"
case Sobelow.format() do
"json" ->
json_finding = [
type: finding.type,
file: finding.filename,
route: finding.fun_name,
line: finding.vuln_line_no
]
Sobelow.log_finding(json_finding, finding)
"txt" ->
Sobelow.log_finding(finding)
Print.print_custom_finding_metadata(
finding,
[file_header, action_header, line_header]
)
"compact" ->
Print.log_compact_finding(finding)
_ ->
Sobelow.log_finding(finding)
end
end
def combine_scopes(scopes) do
scopes
|> Stream.map(&get_alias_and_list/1)
|> Enum.reduce(%{}, &transform_scopes/2)
|> Map.values()
end
defp get_alias_and_list({:scope, _, [_path, alias, _opts, list]}) do
{get_alias(alias), list[:do]}
end
defp get_alias_and_list({:scope, _, [_path, opts, list]}), do: {get_alias(opts), list[:do]}
defp get_alias_and_list({:scope, _, [opts, list]}), do: {get_alias(opts), list[:do]}
defp get_alias({fun, _, opts}), do: {fun, [], opts}
defp get_alias(opts) when is_list(opts) do
opts[:alias]
end
defp get_alias(_), do: nil
defp get_and_state_changing?({_, meta}) do
has_method?(meta, :get) && Enum.any?(@state_changing_methods, &has_method?(meta, &1))
end
defp has_method?(meta, action) do
Enum.any?(meta, fn {method, _} -> method == action end)
end
defp transform_routes({method, _, opts} = fun, acc) do
value = {method, fun}
Map.update(acc, get_ca(opts), [value], &[value | &1])
end
defp transform_scopes({scope, routes}, acc) do
Map.update(acc, scope, [routes], &[routes | &1])
end
defp get_action({_, _, opts}) when is_list(opts), do: get_action(opts)
defp get_action([_, _, action | _]), do: action
defp get_ca([_path, controller, action | _]) do
[normalize_controller(controller), action]
end
defp normalize_controller({:__aliases__, _, controller}), do: controller
defp normalize_controller({fun, _, opts}), do: {fun, [], opts}
end
| 27.867925 | 93 | 0.653803 |
9ec0d8f4253e7e4e869f234af7e97e4e9429b2a7 | 278 | exs | Elixir | apps/thundermoon/priv/repo/migrations/20190804091735_add_external_id_to_user.exs | psychedel/thundermoon-umbrella | b52cb59f1b55fe092ddc70fac7cf2f6e7890c9af | [
"MIT"
] | 10 | 2019-08-02T06:41:23.000Z | 2022-03-09T16:12:30.000Z | apps/thundermoon/priv/repo/migrations/20190804091735_add_external_id_to_user.exs | psychedel/thundermoon-umbrella | b52cb59f1b55fe092ddc70fac7cf2f6e7890c9af | [
"MIT"
] | 319 | 2019-06-11T20:16:26.000Z | 2022-03-09T16:24:22.000Z | apps/thundermoon/priv/repo/migrations/20190804091735_add_external_id_to_user.exs | psychedel/thundermoon-umbrella | b52cb59f1b55fe092ddc70fac7cf2f6e7890c9af | [
"MIT"
] | 4 | 2019-08-15T19:52:07.000Z | 2021-12-02T14:52:54.000Z | defmodule Thundermoon.Repo.Migrations.AddExternalIdToUser do
use Ecto.Migration
def change do
alter table("users") do
add :external_id, :integer
add :name, :string
add :avatar, :string
end
create unique_index("users", :external_id)
end
end
| 19.857143 | 60 | 0.68705 |
9ec0da56b950618c01bc1c1fe3f8acccc9b1aa16 | 125 | exs | Elixir | test/ipc_test.exs | pshah123/elixir-simple | 0b9d488769f9ef6924c8ae3753d0880d05f70015 | [
"MIT"
] | null | null | null | test/ipc_test.exs | pshah123/elixir-simple | 0b9d488769f9ef6924c8ae3753d0880d05f70015 | [
"MIT"
] | null | null | null | test/ipc_test.exs | pshah123/elixir-simple | 0b9d488769f9ef6924c8ae3753d0880d05f70015 | [
"MIT"
] | null | null | null | defmodule IPCTest do
use ExUnit.Case
doctest IPCServer
test "runs successfully" do
assert IPCServer.run
end
end
| 13.888889 | 29 | 0.744 |
9ec12b1bf52d9227ca2a290e255336ba3ed37914 | 1,359 | ex | Elixir | lib/hello_web/live/thermostat_live.ex | loretoparisi/phoenix-elixir-boilerplate | 1fd0afa7c1cefbb82d3080f151fef269332d8e14 | [
"MIT"
] | 4 | 2019-04-07T17:43:10.000Z | 2020-07-07T21:32:44.000Z | lib/hello_web/live/thermostat_live.ex | loretoparisi/phoenix-elixir-boilerplate | 1fd0afa7c1cefbb82d3080f151fef269332d8e14 | [
"MIT"
] | null | null | null | lib/hello_web/live/thermostat_live.ex | loretoparisi/phoenix-elixir-boilerplate | 1fd0afa7c1cefbb82d3080f151fef269332d8e14 | [
"MIT"
] | 3 | 2019-09-09T15:21:41.000Z | 2020-07-07T21:35:07.000Z | defmodule HelloWeb.ThermostatLive do
use Phoenix.LiveView
import Calendar.Strftime
def render(assigns) do
~L"""
<div class="thermostat">
<div class="bar <%= @mode %>">
<a href="#" phx-click="toggle-mode"><%= @mode %></a>
<span><%= strftime!(@time, "%r") %></span>
</div>
<div class="controls">
<span class="reading"><%= @val %></span>
<button phx-click="dec" class="minus">-</button>
<button phx-click="inc" class="plus">+</button>
<span class="weather">
<%= live_render(@socket, HelloWeb.WeatherLive) %>
</span>
</div>
</div>
"""
end
def mount(_session, socket) do
if connected?(socket), do: :timer.send_interval(100, self(), :tick)
{:ok, assign(socket, val: 72, mode: :cooling, time: :calendar.local_time())}
end
def handle_info(:tick, socket) do
{:noreply, assign(socket, time: :calendar.local_time())}
end
def handle_event("inc", _, socket) do
if socket.assigns.val >= 75, do: raise("boom")
{:noreply, update(socket, :val, &(&1 + 1))}
end
def handle_event("dec", _, socket) do
{:noreply, update(socket, :val, &(&1 - 1))}
end
def handle_event("toggle-mode", _, socket) do
{:noreply,
update(socket, :mode, fn
:cooling -> :heating
:heating -> :cooling
end)}
end
end
| 27.18 | 80 | 0.57248 |
9ec14066d0aa5657fd040481e134f768efdd438c | 26,514 | exs | Elixir | test/ecto_nested_changeset_test.exs | BrianPhilips/ecto_nested_changeset | 4d16f2955c1e7c71e25685030f66aefeba5ce0fc | [
"MIT"
] | null | null | null | test/ecto_nested_changeset_test.exs | BrianPhilips/ecto_nested_changeset | 4d16f2955c1e7c71e25685030f66aefeba5ce0fc | [
"MIT"
] | null | null | null | test/ecto_nested_changeset_test.exs | BrianPhilips/ecto_nested_changeset | 4d16f2955c1e7c71e25685030f66aefeba5ce0fc | [
"MIT"
] | null | null | null | defmodule EctoNestedChangesetTest do
use ExUnit.Case
import Ecto.Changeset
import EctoNestedChangeset
alias __MODULE__.Category
alias __MODULE__.Comment
alias __MODULE__.Post
alias Ecto.Changeset
defmodule Category do
use Ecto.Schema
schema "categories" do
has_many :posts, EctoNestedChangesetTest.Post, on_replace: :delete
end
end
defmodule Comment do
use Ecto.Schema
schema "comments" do
belongs_to :post, EctoNestedChangesetTest.Post
end
end
defmodule Post do
use Ecto.Schema
schema "posts" do
field :delete, :boolean, virtual: true, default: false
field :title, :string
field :tags, {:array, :string}, default: []
belongs_to :category, EctoNestedChangesetTest.Category
has_many :comments, EctoNestedChangesetTest.Comment
end
end
describe "append_at/3" do
test "appends item at a root level field without data" do
changeset =
%Category{id: 1, posts: []}
|> change()
|> append_at(:posts, %Post{title: "first"})
|> append_at(:posts, %Post{title: "second"})
assert %{
posts: [
%Ecto.Changeset{action: :insert, data: %Post{title: "first"}},
%Ecto.Changeset{action: :insert, data: %Post{title: "second"}}
]
} = changeset.changes
end
test "doesn't raise error if field of unpersisted resource is not loaded" do
%Category{id: 1}
|> change()
|> append_at(:posts, %Post{title: "first"})
end
test "raises error if field of persisted resource is not preloaded" do
assert_raise EctoNestedChangeset.NotLoadedError,
"field `:posts` is not loaded",
fn ->
%Category{id: 1}
|> Map.update!(:__meta__, &Map.put(&1, :state, :loaded))
|> change()
|> append_at(:posts, %Post{title: "first"})
end
end
test "appends item at a sub field of a new list item" do
changeset =
%Category{id: 1, posts: []}
|> change()
|> append_at(:posts, %Post{title: "first"})
|> append_at([:posts, 0, :comments], %Comment{})
assert %{
posts: [
%Ecto.Changeset{
action: :insert,
changes: %{
comments: [
%Ecto.Changeset{
action: :insert,
data: %Comment{}
}
]
},
data: %Post{title: "first"}
}
]
} = changeset.changes
end
test "appends item at a root level field with existing data" do
changeset =
%Category{id: 1, posts: [%Post{id: 1, title: "existing"}]}
|> change()
|> append_at(:posts, %Post{title: "first"})
|> append_at(:posts, %Post{title: "second"})
assert %{
posts: [
%Ecto.Changeset{
action: :update,
data: %Post{title: "existing"}
},
%Ecto.Changeset{action: :insert, data: %Post{title: "first"}},
%Ecto.Changeset{action: :insert, data: %Post{title: "second"}}
]
} = changeset.changes
end
test "appends item at a nested field" do
changeset =
%Category{
id: 1,
posts: [
%Post{
id: 1,
title: "first",
comments: [%Comment{id: 1}]
},
%Post{
id: 2,
title: "second",
comments: []
}
]
}
|> change()
|> append_at([:posts, 1, :comments], %Comment{})
|> append_at([:posts, 0, :comments], %Comment{})
assert %{
posts: [
%Ecto.Changeset{
action: :update,
changes: %{
comments: [
%Ecto.Changeset{
action: :update,
data: %Comment{},
valid?: true
},
%Ecto.Changeset{
action: :insert,
data: %Comment{},
valid?: true
}
]
},
data: %Post{}
},
%Ecto.Changeset{
action: :update,
changes: %{
comments: [
%Ecto.Changeset{
action: :insert,
data: %Comment{}
}
]
},
data: %Post{}
}
]
} = changeset.changes
end
test "appends item to an array field" do
changeset =
%Category{id: 1, posts: [%Post{id: 1, title: "first", tags: ["one"]}]}
|> change()
|> append_at([:posts, 0, :tags], "two")
assert %{
posts: [
%Ecto.Changeset{
action: :update,
data: %Post{title: "first"},
changes: %{tags: ["one", "two"]},
valid?: true
}
]
} = changeset.changes
end
end
describe "prepend_at/3" do
test "prepend item at a root level field without data" do
changeset =
%Category{id: 1, posts: []}
|> change()
|> prepend_at(:posts, %Post{title: "first"})
|> prepend_at(:posts, %Post{title: "second"})
assert %{
posts: [
%Ecto.Changeset{action: :insert, data: %Post{title: "second"}},
%Ecto.Changeset{action: :insert, data: %Post{title: "first"}}
]
} = changeset.changes
end
test "doesn't raise error if field of unpersisted resource is not loaded" do
%Category{id: 1}
|> change()
|> prepend_at(:posts, %Post{title: "first"})
end
test "raises error if field of persisted resource is not preloaded" do
assert_raise EctoNestedChangeset.NotLoadedError,
"field `:posts` is not loaded",
fn ->
%Category{id: 1}
|> Map.update!(:__meta__, &Map.put(&1, :state, :loaded))
|> change()
|> prepend_at(:posts, %Post{title: "first"})
end
end
test "prepends item at a sub field of a new list item" do
changeset =
%Category{id: 1, posts: []}
|> change()
|> prepend_at(:posts, %Post{title: "first"})
|> prepend_at([:posts, 0, :comments], %Comment{})
assert %{
posts: [
%Ecto.Changeset{
action: :insert,
changes: %{
comments: [
%Ecto.Changeset{
action: :insert,
data: %Comment{}
}
]
},
data: %Post{title: "first"}
}
]
} = changeset.changes
end
test "prepend item at a root level field with existing data" do
changeset =
%Category{id: 1, posts: [%Post{id: 1, title: "existing"}]}
|> change()
|> prepend_at(:posts, %Post{title: "first"})
|> prepend_at(:posts, %Post{title: "second"})
assert %{
posts: [
%Ecto.Changeset{action: :insert, data: %Post{title: "second"}},
%Ecto.Changeset{action: :insert, data: %Post{title: "first"}},
%Ecto.Changeset{
action: :update,
data: %Post{title: "existing"}
}
]
} = changeset.changes
end
test "prepend item at a nested field" do
changeset =
%Category{
id: 1,
posts: [
%Post{
id: 1,
title: "first",
comments: [%Comment{id: 1}]
},
%Post{
id: 2,
title: "second",
comments: []
}
]
}
|> change()
|> prepend_at([:posts, 1, :comments], %Comment{})
|> prepend_at([:posts, 0, :comments], %Comment{})
assert %{
posts: [
%Ecto.Changeset{
action: :update,
changes: %{
comments: [
%Ecto.Changeset{
action: :insert,
data: %Comment{},
valid?: true
},
%Ecto.Changeset{
action: :update,
data: %Comment{},
valid?: true
}
]
},
data: %Post{}
},
%Ecto.Changeset{
action: :update,
changes: %{
comments: [
%Ecto.Changeset{
action: :insert,
data: %Comment{}
}
]
},
data: %Post{}
}
]
} = changeset.changes
end
test "prepends item to an array field" do
changeset =
%Category{id: 1, posts: [%Post{id: 1, title: "first", tags: ["one"]}]}
|> change()
|> prepend_at([:posts, 0, :tags], "two")
assert %{
posts: [
%Ecto.Changeset{
action: :update,
data: %Post{title: "first"},
changes: %{tags: ["two", "one"]},
valid?: true
}
]
} = changeset.changes
end
end
describe "insert_at/3" do
test "inserts item at a root level field without data" do
changeset =
%Category{id: 1, posts: []}
|> change()
|> insert_at([:posts, 0], %Post{title: "first"})
|> insert_at([:posts, 0], %Post{title: "second"})
assert %{
posts: [
%Ecto.Changeset{
action: :insert,
data: %Post{title: "second"},
valid?: true
},
%Ecto.Changeset{
action: :insert,
data: %Post{title: "first"},
valid?: true
}
]
} = changeset.changes
end
test "doesn't raise error if field of unpersisted resource is not loaded" do
%Category{id: 1}
|> change()
|> insert_at([:posts, 0], %Post{title: "first"})
end
test "raises error if field of persisted resource is not preloaded" do
assert_raise EctoNestedChangeset.NotLoadedError,
"field `:posts` is not loaded",
fn ->
%Category{id: 1}
|> Map.update!(:__meta__, &Map.put(&1, :state, :loaded))
|> change()
|> insert_at([:posts, 0], %Post{title: "first"})
end
end
test "inserts item at a sub field of a new list item" do
changeset =
%Category{id: 1, posts: []}
|> change()
|> insert_at([:posts, 0], %Post{title: "first"})
|> insert_at([:posts, 0, :comments, 0], %Comment{})
assert %{
posts: [
%Ecto.Changeset{
action: :insert,
changes: %{
comments: [
%Ecto.Changeset{
action: :insert,
data: %Comment{}
}
]
},
data: %Post{title: "first"}
}
]
} = changeset.changes
end
test "inserts item at a root level field with existing data" do
changeset =
%Category{
id: 1,
posts: [
%Post{id: 1, title: "existing 1"},
%Post{id: 2, title: "existing 2"},
%Post{id: 3, title: "existing 3"}
]
}
|> change()
|> insert_at([:posts, 2], %Post{title: "first"})
|> insert_at([:posts, 1], %Post{title: "second"})
assert %{
posts: [
%Ecto.Changeset{
action: :update,
data: %Post{title: "existing 1"},
valid?: true
},
%Ecto.Changeset{
action: :insert,
data: %Post{title: "second"},
valid?: true
},
%Ecto.Changeset{
action: :update,
data: %Post{title: "existing 2"},
valid?: true
},
%Ecto.Changeset{
action: :insert,
data: %Post{title: "first"},
valid?: true
},
%Ecto.Changeset{
action: :update,
data: %Post{title: "existing 3"},
valid?: true
}
]
} = changeset.changes
end
test "inserts item at a nested field" do
changeset =
%Category{
id: 1,
posts: [
%Post{
id: 1,
title: "first",
comments: [%Comment{id: 1}, %Comment{id: 2}, %Comment{id: 3}]
},
%Post{
id: 2,
title: "second",
comments: [%Comment{id: 4}, %Comment{id: 5}]
}
]
}
|> change()
|> insert_at([:posts, 0, :comments, 3], %Comment{})
|> insert_at([:posts, 1, :comments, 1], %Comment{})
assert %{
posts: [
%Ecto.Changeset{
action: :update,
changes: %{
comments: [
%Ecto.Changeset{
action: :update,
data: %Comment{},
valid?: true
},
%Ecto.Changeset{
action: :update,
data: %Comment{},
valid?: true
},
%Ecto.Changeset{
action: :update,
data: %Comment{},
valid?: true
},
%Ecto.Changeset{
action: :insert,
data: %Comment{},
valid?: true
}
]
},
data: %Post{},
valid?: true
},
%Ecto.Changeset{
action: :update,
changes: %{
comments: [
%Ecto.Changeset{action: :update, data: %Comment{}},
%Ecto.Changeset{action: :insert, data: %Comment{}},
%Ecto.Changeset{action: :update, data: %Comment{}}
]
},
data: %Post{}
}
]
} = changeset.changes
end
test "inserts item into array field" do
changeset =
%Category{id: 1, posts: [%Post{title: "first", tags: ["one", "two"]}]}
|> change()
|> insert_at([:posts, 0, :tags, 1], "three")
assert %{
posts: [
%Ecto.Changeset{
action: :update,
data: %Post{title: "first"},
changes: %{tags: ["one", "three", "two"]},
valid?: true
}
]
} = changeset.changes
end
end
describe "update_at/3" do
test "updates a field" do
changeset =
%Category{id: 1, posts: [%Post{id: 1, title: "first"}]}
|> change()
|> update_at([:posts, 0, :title], &String.reverse/1)
|> append_at([:posts], %Post{title: "second"})
|> update_at([:posts, 1, :title], &String.duplicate(&1, 2))
assert %{
posts: [
%Ecto.Changeset{
action: :update,
data: %Post{},
changes: %{title: "tsrif"},
valid?: true
},
%Ecto.Changeset{
action: :insert,
data: %Post{},
changes: %{title: "secondsecond"},
valid?: true
}
]
} = changeset.changes
end
test "updates a list field" do
changeset =
%Category{
id: 1,
posts: [%Post{id: 1, title: "first"}, %Post{id: 2, title: "second"}]
}
|> change()
|> append_at([:posts], %Post{title: "third"})
|> update_at(
[:posts],
&Enum.map(&1, fn post_changeset ->
title = get_field(post_changeset, :title)
put_change(post_changeset, :title, String.reverse(title))
end)
)
assert %{
posts: [
%Ecto.Changeset{
action: :update,
data: %Post{id: 1},
changes: %{title: "tsrif"},
valid?: true
},
%Ecto.Changeset{
action: :update,
data: %Post{id: 2},
changes: %{title: "dnoces"},
valid?: true
},
%Ecto.Changeset{
action: :insert,
data: %Post{},
changes: %{title: "driht"},
valid?: true
}
]
} = changeset.changes
end
test "updates item in array field" do
changeset =
%Category{id: 1, posts: [%Post{title: "first", tags: ["one", "two"]}]}
|> change()
|> append_at([:posts, 0, :tags], "three")
|> update_at([:posts, 0, :tags, 1], &String.reverse/1)
assert %{
posts: [
%Ecto.Changeset{
action: :update,
data: %Post{title: "first"},
changes: %{tags: ["one", "owt", "three"]},
valid?: true
}
]
} = changeset.changes
end
test "updates array field" do
changeset =
%Category{id: 1, posts: [%Post{title: "first", tags: ["one", "two"]}]}
|> change()
|> append_at([:posts, 0, :tags], "three")
|> update_at(
[:posts, 0, :tags],
&Enum.map(&1, fn tag -> String.reverse(tag) end)
)
assert %{
posts: [
%Ecto.Changeset{
action: :update,
data: %Post{title: "first"},
changes: %{tags: ["eno", "owt", "eerht"]},
valid?: true
}
]
} = changeset.changes
end
end
describe "delete_at/3" do
test "deletes item from changes that isn't persisted yet" do
changeset =
%Category{
id: 1,
posts: [
%Post{id: 1, title: "one"},
%Post{id: 2, title: "two"}
]
}
|> change()
|> append_at([:posts], %Post{title: "three"})
|> delete_at([:posts, 2])
assert changeset.changes == %{}
end
test "deletes existing item" do
changeset =
%Category{
id: 1,
posts: [
%Post{id: 1, title: "one"},
%Post{id: 2, title: "two"},
%Post{id: 3, title: "three"}
]
}
|> change()
|> delete_at([:posts, 1])
assert %{
posts: [
%Changeset{action: :replace, data: %Post{id: 2}},
%Changeset{action: :update, data: %Post{id: 1}},
%Changeset{action: :update, data: %Post{id: 3}}
]
} = changeset.changes
end
test "puts delete action for persisted data" do
changeset =
%Category{
id: 1,
posts: [
%Post{id: 1, title: "one"},
%Post{id: 2, title: "two"},
%Post{id: 3, title: "three"}
]
}
|> change()
|> prepend_at([:posts], %Post{})
|> delete_at([:posts, 2], mode: {:action, :delete})
|> delete_at([:posts, 0], mode: {:action, :delete})
assert %{
posts: [
%Changeset{action: :update, data: %Post{id: 1}},
%Changeset{action: :delete, data: %Post{id: 2}},
%Changeset{action: :update, data: %Post{id: 3}}
]
} = changeset.changes
end
test "removes persisted data from list with replace option" do
changeset =
%Category{
id: 1,
posts: [
%Post{id: 1, title: "one"},
%Post{id: 2, title: "two"},
%Post{id: 3, title: "three"}
]
}
|> change()
|> prepend_at([:posts], %Post{})
|> delete_at([:posts, 0], mode: {:action, :replace})
|> delete_at([:posts, 1], mode: {:action, :replace})
assert %{
posts: [
%Changeset{action: :replace, data: %Post{id: 2}},
%Changeset{action: :update, data: %Post{id: 1}},
%Changeset{action: :update, data: %Post{id: 3}}
]
} = changeset.changes
end
test "puts change on delete field from list with flag option" do
changeset =
%Category{
id: 1,
posts: [
%Post{id: 1, title: "one"},
%Post{id: 2, title: "two"},
%Post{id: 3, title: "three"}
]
}
|> change()
|> prepend_at([:posts], %Post{})
|> delete_at([:posts, 0], mode: {:flag, :delete})
|> delete_at([:posts, 1], mode: {:flag, :delete})
assert %{
posts: [
%Changeset{action: :update, data: %Post{id: 1}, valid?: true},
%Changeset{
action: :update,
changes: %{delete: true},
data: %Post{id: 2},
valid?: true
},
%Changeset{action: :update, data: %Post{id: 3}, valid?: true}
]
} = changeset.changes
end
test "deletes item from changes in nested field" do
changeset =
%Category{
id: 1,
posts: [
%Post{id: 1, title: "one"},
%Post{
id: 2,
title: "two",
comments: [%Comment{id: 1}, %Comment{id: 2}]
}
]
}
|> change()
|> append_at([:posts, 1, :comments], %Comment{})
|> delete_at([:posts, 1, :comments, 2])
assert changeset.changes == %{}
end
test "deletes existing item from a nested field" do
changeset =
%Category{
id: 1,
posts: [
%Post{
id: 1,
title: "first",
comments: [%Comment{id: 1}, %Comment{id: 2}, %Comment{id: 3}]
},
%Post{
id: 2,
title: "second",
comments: [%Comment{id: 4}, %Comment{id: 5}]
}
]
}
|> change()
|> delete_at([:posts, 1, :comments, 0], mode: {:action, :delete})
|> delete_at([:posts, 0, :comments, 1], mode: {:action, :delete})
assert %{
posts: [
%Ecto.Changeset{
action: :update,
changes: %{
comments: [
%Ecto.Changeset{
action: :update,
data: %Comment{id: 1},
valid?: true
},
%Ecto.Changeset{
action: :delete,
data: %Comment{id: 2},
valid?: true
},
%Ecto.Changeset{
action: :update,
data: %Comment{id: 3},
valid?: true
}
]
},
data: %Post{}
},
%Ecto.Changeset{
action: :update,
changes: %{
comments: [
%Ecto.Changeset{
action: :delete,
data: %Comment{id: 4}
},
%Ecto.Changeset{
action: :update,
data: %Comment{id: 5}
}
]
},
data: %Post{}
}
]
} = changeset.changes
end
test "deletes item from an array field" do
changeset =
%Category{
id: 1,
posts: [%Post{title: "first", tags: ["one", "two", "three"]}]
}
|> change()
|> delete_at([:posts, 0, :tags, 1])
assert %{
posts: [
%Ecto.Changeset{
action: :update,
data: %Post{title: "first"},
changes: %{tags: ["one", "three"]},
valid?: true
}
]
} = changeset.changes
end
end
end
| 30.267123 | 80 | 0.388172 |
9ec1609d307741ce93e40ad645c27499319e9269 | 1,376 | exs | Elixir | ros/ros_service/config/config.exs | kujua/elixir-handbook | 4185ad8da7f652fdb59c799dc58bcb33fda10475 | [
"Apache-2.0"
] | 1 | 2019-07-01T18:47:28.000Z | 2019-07-01T18:47:28.000Z | ros/ros_service/config/config.exs | kujua/elixir-handbook | 4185ad8da7f652fdb59c799dc58bcb33fda10475 | [
"Apache-2.0"
] | 4 | 2020-07-17T16:57:18.000Z | 2021-05-09T23:50:52.000Z | ros/ros_service/config/config.exs | kujua/elixir-handbook | 4185ad8da7f652fdb59c799dc58bcb33fda10475 | [
"Apache-2.0"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# third-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :ros_service, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:ros_service, :key)
#
# You can also configure a third-party app:
#
# config :logger, level: :info
#
config :ros_service, Ros.Service.PermanentRepository,
database: "ros_data",
username: "postgres",
password: "postgres",
hostname: "localhost"
config :ros_service, ecto_repos: [Ros.Service.PermanentRepository]
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env()}.exs"
| 35.282051 | 73 | 0.743459 |
9ec18df16c4a11496b4b4e3dadccf596b838eafb | 1,175 | ex | Elixir | Chapter7/persistable_todo_cache/lib/todo/cache.ex | benjamindburke/elixir-studies | 65231b5af83dcf701041cae2879107c3bd3e5078 | [
"Unlicense"
] | null | null | null | Chapter7/persistable_todo_cache/lib/todo/cache.ex | benjamindburke/elixir-studies | 65231b5af83dcf701041cae2879107c3bd3e5078 | [
"Unlicense"
] | null | null | null | Chapter7/persistable_todo_cache/lib/todo/cache.ex | benjamindburke/elixir-studies | 65231b5af83dcf701041cae2879107c3bd3e5078 | [
"Unlicense"
] | null | null | null | defmodule Todo.Cache do
use GenServer
@impl GenServer
def init(_) do
Todo.Database.start()
{:ok, %{}}
end
# this request must be sent directly to the caller, so it must be a handle_call function
@impl GenServer
def handle_call({:server_process, todo_list_name}, _, todo_servers) do
case Map.fetch(todo_servers, todo_list_name) do
{:ok, todo_server} ->
{:reply, todo_server, todo_servers}
:error ->
{:ok, new_server} = Todo.Server.start(todo_list_name)
{:reply, new_server, Map.put(todo_servers, todo_list_name, new_server)}
end
end
def start() do
GenServer.start(__MODULE__, nil)
end
def server_process(cache, todo_list_name) do
GenServer.call(cache, {:server_process, todo_list_name})
end
end
# TEST CODE
# ---------
# {:ok, cache} = Todo.Cache.start()
# Todo.Cache.server_process("Bob's list")
# Todo.Cache.server_process("Bob's list")
# Todo.Cache.server_process("Alice's list")
# bobs_list = Todo.Cache.server_process("Bob's list")
# Enum.each(
# 1..100_000,
# fn index ->
# Todo.Cache.server_process("to-do list #{index}")
# end
# )
# :erlang.system_info(:process_count) | 26.111111 | 90 | 0.674894 |
9ec19c9d3fa373d3bbf07f12c5900b435785cf91 | 155 | ex | Elixir | test/support/models/department.ex | nickolaich/formex_ecto | 322907daa4924d1c297acc7f9e60f99bc8f2f6f3 | [
"MIT"
] | 20 | 2017-06-25T12:30:03.000Z | 2021-04-25T06:43:31.000Z | test/support/models/department.ex | nickolaich/formex_ecto | 322907daa4924d1c297acc7f9e60f99bc8f2f6f3 | [
"MIT"
] | 8 | 2017-11-19T17:15:32.000Z | 2020-02-02T12:50:03.000Z | test/support/models/department.ex | nickolaich/formex_ecto | 322907daa4924d1c297acc7f9e60f99bc8f2f6f3 | [
"MIT"
] | 9 | 2018-03-30T22:14:13.000Z | 2021-11-15T12:12:56.000Z | defmodule Formex.Ecto.TestModel.Department do
use Formex.Ecto.TestModel
schema "departments" do
field(:name, :string)
timestamps()
end
end
| 15.5 | 45 | 0.722581 |
9ec1a0e07e568eed0303ec7bfddb3ab869dfce49 | 1,161 | ex | Elixir | core/util/erlang_log_translator.ex | wses-yoshida/antikythera | e108e59d2339edd0b0fad31ad4f41f56df45be55 | [
"Apache-2.0"
] | null | null | null | core/util/erlang_log_translator.ex | wses-yoshida/antikythera | e108e59d2339edd0b0fad31ad4f41f56df45be55 | [
"Apache-2.0"
] | null | null | null | core/util/erlang_log_translator.ex | wses-yoshida/antikythera | e108e59d2339edd0b0fad31ad4f41f56df45be55 | [
"Apache-2.0"
] | null | null | null | # Copyright(c) 2015-2019 ACCESS CO., LTD. All rights reserved.
use Croma
defmodule AntikytheraCore.ErlangLogTranslator do
@moduledoc """
Translator for `Logger`, installed via application config.
Most of translations are delegated to `Logger.Translator`;
this translator neglects messages of the following types:
- SASL progress report
- error log emitted by syn about mnesia down event (when a node in the cluster is terminated)
- supervisor report on brutal kill of a worker process in PoolSup (when a too-long-running worker is stopped)
"""
def translate(_min_level, :info, :report, {:progress, _data}) do
:skip
end
def translate(_min_level, :error, :format, {'Received a MNESIA down event' ++ _, _}) do
:skip
end
def translate(min_level, :error, :report, {:supervisor_report, kw} = message) do
case Keyword.get(kw, :supervisor) do
{_pid, PoolSup.Callback} -> :skip
_otherwise -> Logger.Translator.translate(min_level, :error, :report, message)
end
end
def translate(min_level, level, kind, message) do
Logger.Translator.translate(min_level, level, kind, message)
end
end
| 32.25 | 111 | 0.712317 |
9ec1b4f6300817235a204548433679ed1dbf16fa | 27,875 | exs | Elixir | lib/elixir/test/elixir/string_test.exs | eproxus/elixir | 1c3a3bde539bc96c80d917fbcf3c9dc9e123860b | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/string_test.exs | eproxus/elixir | 1c3a3bde539bc96c80d917fbcf3c9dc9e123860b | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/string_test.exs | eproxus/elixir | 1c3a3bde539bc96c80d917fbcf3c9dc9e123860b | [
"Apache-2.0"
] | null | null | null | Code.require_file "test_helper.exs", __DIR__
defmodule StringTest do
use ExUnit.Case, async: true
doctest String
test "next codepoint" do
assert String.next_codepoint("ésoj") == {"é", "soj"}
assert String.next_codepoint(<<255>>) == {<<255>>, ""}
assert String.next_codepoint("") == nil
end
# test cases described in http://mortoray.com/2013/11/27/the-string-type-is-broken/
test "Unicode" do
assert String.reverse("noël") == "lëon"
assert String.slice("noël", 0..2) == "noë"
assert String.length("noël") == 4
assert String.length("") == 2
assert String.slice("", 1..1) == ""
assert String.reverse("") == ""
assert String.upcase("baffle") == "BAFFLE"
assert String.equivalent?("noël", "noël")
end
test "split" do
assert String.split("") == []
assert String.split("foo bar") == ["foo", "bar"]
assert String.split(" foo bar") == ["foo", "bar"]
assert String.split("foo bar ") == ["foo", "bar"]
assert String.split(" foo bar ") == ["foo", "bar"]
assert String.split("foo\t\n\v\f\r\sbar\n") == ["foo", "bar"]
assert String.split("foo" <> <<194, 133>> <> "bar") == ["foo", "bar"]
# information separators are not considered whitespace
assert String.split("foo\u001Fbar") == ["foo\u001Fbar"]
# no-break space is excluded
assert String.split("foo\00A0bar") == ["foo\00A0bar"]
assert String.split("foo\u202Fbar") == ["foo\u202Fbar"]
assert String.split("a,b,c", ",") == ["a", "b", "c"]
assert String.split("a,b", ".") == ["a,b"]
assert String.split("1,2 3,4", [" ", ","]) == ["1", "2", "3", "4"]
assert String.split("", ",") == [""]
assert String.split(" a b c ", " ") == ["", "a", "b", "c", ""]
assert String.split(" a b c ", " ", parts: :infinity) == ["", "a", "b", "c", ""]
assert String.split(" a b c ", " ", parts: 1) == [" a b c "]
assert String.split(" a b c ", " ", parts: 2) == ["", "a b c "]
assert String.split("", ",", trim: true) == []
assert String.split(" a b c ", " ", trim: true) == ["a", "b", "c"]
assert String.split(" a b c ", " ", trim: true, parts: :infinity) == ["a", "b", "c"]
assert String.split(" a b c ", " ", trim: true, parts: 1) == [" a b c "]
assert String.split(" a b c ", " ", trim: true, parts: 2) == ["a", "b c "]
assert String.split("abé", "") == ["a", "b", "é", ""]
assert String.split("abé", "", parts: :infinity) == ["a", "b", "é", ""]
assert String.split("abé", "", parts: 1) == ["abé"]
assert String.split("abé", "", parts: 2) == ["a", "bé"]
assert String.split("abé", "", parts: 10) == ["a", "b", "é", ""]
assert String.split("abé", "", trim: true) == ["a", "b", "é"]
assert String.split("abé", "", trim: true, parts: :infinity) == ["a", "b", "é"]
assert String.split("abé", "", trim: true, parts: 2) == ["a", "bé"]
assert String.split("noël", "") == ["n", "o", "ë", "l", ""]
end
test "split with regex" do
assert String.split("", ~r{,}) == [""]
assert String.split("", ~r{,}, trim: true) == []
assert String.split("a,b", ~r{,}) == ["a", "b"]
assert String.split("a,b,c", ~r{,}) == ["a", "b", "c"]
assert String.split("a,b,c", ~r{,}, parts: 2) == ["a", "b,c"]
assert String.split("a,b.c ", ~r{\W}) == ["a", "b", "c", ""]
assert String.split("a,b.c ", ~r{\W}, trim: false) == ["a", "b", "c", ""]
assert String.split("a,b", ~r{\.}) == ["a,b"]
end
test "splitter" do
assert String.splitter("a,b,c", ",") |> Enum.to_list == ["a", "b", "c"]
assert String.splitter("a,b", ".") |> Enum.to_list == ["a,b"]
assert String.splitter("1,2 3,4", [" ", ","]) |> Enum.to_list == ["1", "2", "3", "4"]
assert String.splitter("", ",") |> Enum.to_list == [""]
assert String.splitter("", ",", trim: true) |> Enum.to_list == []
assert String.splitter(" a b c ", " ", trim: true) |> Enum.to_list == ["a", "b", "c"]
assert String.splitter(" a b c ", " ", trim: true) |> Enum.take(1) == ["a"]
assert String.splitter(" a b c ", " ", trim: true) |> Enum.take(2) == ["a", "b"]
end
test "split at" do
assert String.split_at("", 0) == {"", ""}
assert String.split_at("", -1) == {"", ""}
assert String.split_at("", 1) == {"", ""}
assert String.split_at("abc", 0) == {"", "abc"}
assert String.split_at("abc", 2) == {"ab", "c"}
assert String.split_at("abc", 3) == {"abc", ""}
assert String.split_at("abc", 4) == {"abc", ""}
assert String.split_at("abc", 1000) == {"abc", ""}
assert String.split_at("abc", -1) == {"ab", "c"}
assert String.split_at("abc", -3) == {"", "abc"}
assert String.split_at("abc", -4) == {"", "abc"}
assert String.split_at("abc", -1000) == {"", "abc"}
assert_raise FunctionClauseError, fn ->
String.split_at("abc", 0.1)
end
assert_raise FunctionClauseError, fn ->
String.split_at("abc", -0.1)
end
end
test "upcase" do
assert String.upcase("123 abcd 456 efg hij ( %$#) kl mnop @ qrst = -_ uvwxyz") == "123 ABCD 456 EFG HIJ ( %$#) KL MNOP @ QRST = -_ UVWXYZ"
assert String.upcase("") == ""
assert String.upcase("abcD") == "ABCD"
end
test "upcase utf8" do
assert String.upcase("& % # àáâ ãäå 1 2 ç æ") == "& % # ÀÁÂ ÃÄÅ 1 2 Ç Æ"
assert String.upcase("àáâãäåæçèéêëìíîïðñòóôõöøùúûüýþ") == "ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞ"
end
test "upcase utf8 multibyte" do
assert String.upcase("straße") == "STRASSE"
assert String.upcase("áüÈß") == "ÁÜÈSS"
end
test "downcase" do
assert String.downcase("123 ABcD 456 EfG HIJ ( %$#) KL MNOP @ QRST = -_ UVWXYZ") == "123 abcd 456 efg hij ( %$#) kl mnop @ qrst = -_ uvwxyz"
assert String.downcase("abcD") == "abcd"
assert String.downcase("") == ""
end
test "downcase utf8" do
assert String.downcase("& % # ÀÁÂ ÃÄÅ 1 2 Ç Æ") == "& % # àáâ ãäå 1 2 ç æ"
assert String.downcase("ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞ") == "àáâãäåæçèéêëìíîïðñòóôõöøùúûüýþ"
assert String.downcase("áüÈß") == "áüèß"
end
test "capitalize" do
assert String.capitalize("") == ""
assert String.capitalize("abc") == "Abc"
assert String.capitalize("ABC") == "Abc"
assert String.capitalize("c b a") == "C b a"
assert String.capitalize("1ABC") == "1abc"
assert String.capitalize("_aBc1") == "_abc1"
assert String.capitalize(" aBc1") == " abc1"
end
test "capitalize utf8" do
assert String.capitalize("àáâ") == "Àáâ"
assert String.capitalize("ÀÁÂ") == "Àáâ"
assert String.capitalize("âáà") == "Âáà"
assert String.capitalize("ÂÁÀ") == "Âáà"
assert String.capitalize("òóôõö") == "Òóôõö"
assert String.capitalize("ÒÓÔÕÖ") == "Òóôõö"
assert String.capitalize("fin") == "Fin"
end
test "replace_leading" do
assert String.replace_leading("aa abc ", "a", "b") == "bb abc "
assert String.replace_leading("__ abc ", "_", "b") == "bb abc "
assert String.replace_leading("aaaaaaaa ", "a", "b") == "bbbbbbbb "
assert String.replace_leading("aaaaaaaa ", "aaa", "b") == "bbaa "
assert String.replace_leading("aaaaaaaaa", "a", "b") == "bbbbbbbbb"
assert String.replace_leading("]]]]]]", "]", "[]") == "[][][][][][]"
assert String.replace_leading("]]]]]]]]", "]", "") == ""
assert String.replace_leading("]]]]]] ]", "]", "") == " ]"
assert String.replace_leading("猫猫 cat ", "猫", "й") == "йй cat "
assert String.replace_leading("test", "t", "T") == "Test"
assert String.replace_leading("t", "t", "T") == "T"
assert String.replace_leading("aaa", "b", "c") == "aaa"
end
test "replace_trailing" do
assert String.replace_trailing(" abc aa", "a", "b") == " abc bb"
assert String.replace_trailing(" abc __", "_", "b") == " abc bb"
assert String.replace_trailing(" aaaaaaaa", "a", "b") == " bbbbbbbb"
assert String.replace_trailing(" aaaaaaaa", "aaa", "b") == " aabb"
assert String.replace_trailing("aaaaaaaaa", "a", "b") == "bbbbbbbbb"
assert String.replace_trailing("]]]]]]", "]", "[]") == "[][][][][][]"
assert String.replace_trailing("]]]]]]]]", "]", "") == ""
assert String.replace_trailing("] ]]]]]]", "]", "") == "] "
assert String.replace_trailing(" cat 猫猫", "猫", "й") == " cat йй"
assert String.replace_trailing("test", "t", "T") == "tesT"
assert String.replace_trailing("t", "t", "T") == "T"
assert String.replace_trailing("aaa", "b", "c") == "aaa"
end
test "trim" do
assert String.trim("") == ""
assert String.trim(" abc ") == "abc"
assert String.trim("a abc a\n\n") == "a abc a"
assert String.trim("a abc a\t\n\v\f\r\s") == "a abc a"
assert String.trim("___ abc ___", "_") == " abc "
assert String.trim("猫猫猫cat猫猫猫", "猫猫") == "猫cat猫"
# no-break space
assert String.trim("\u00A0a abc a\u00A0") == "a abc a"
# whitespace defined as a range
assert String.trim("\u2008a abc a\u2005") == "a abc a"
end
test "trim_leading" do
assert String.trim_leading("") == ""
assert String.trim_leading(" abc ") == "abc "
assert String.trim_leading("a abc a") == "a abc a"
assert String.trim_leading("\n\na abc a") == "a abc a"
assert String.trim_leading("\t\n\v\f\r\sa abc a") == "a abc a"
assert String.trim_leading(<<194, 133, "a abc a">>) == "a abc a"
# information separators are not whitespace
assert String.trim_leading("\u001F a abc a") == "\u001F a abc a"
# no-break space
assert String.trim_leading("\u00A0 a abc a") == "a abc a"
assert String.trim_leading("aa aaa", "aaa") == "aa aaa"
assert String.trim_leading("aaa aaa", "aa") == "a aaa"
assert String.trim_leading("aa abc ", "a") == " abc "
assert String.trim_leading("__ abc ", "_") == " abc "
assert String.trim_leading("aaaaaaaaa ", "a") == " "
assert String.trim_leading("aaaaaaaaaa", "a") == ""
assert String.trim_leading("]]]]]] ]", "]") == " ]"
assert String.trim_leading("猫猫 cat ", "猫") == " cat "
assert String.trim_leading("test", "t") == "est"
assert String.trim_leading("t", "t") == ""
assert String.trim_leading("", "t") == ""
end
test "trim_trailing" do
assert String.trim_trailing("") == ""
assert String.trim_trailing("1\n") == "1"
assert String.trim_trailing("\r\n") == ""
assert String.trim_trailing(" abc ") == " abc"
assert String.trim_trailing(" abc a") == " abc a"
assert String.trim_trailing("a abc a\n\n") == "a abc a"
assert String.trim_trailing("a abc a\t\n\v\f\r\s") == "a abc a"
assert String.trim_trailing(<<"a abc a", 194, 133>>) == "a abc a"
# information separators are not whitespace
assert String.trim_trailing("a abc a \u001F") == "a abc a \u001F"
# no-break space
assert String.trim_trailing("a abc a \u00A0") == "a abc a"
assert String.trim_trailing("aaa aa", "aaa") == "aaa aa"
assert String.trim_trailing("aaa aaa", "aa") == "aaa a"
assert String.trim_trailing(" abc aa", "a") == " abc "
assert String.trim_trailing(" abc __", "_") == " abc "
assert String.trim_trailing(" aaaaaaaaa", "a") == " "
assert String.trim_trailing("aaaaaaaaaa", "a") == ""
assert String.trim_trailing("] ]]]]]]", "]") == "] "
assert String.trim_trailing(" cat 猫猫", "猫") == " cat "
assert String.trim_trailing("test", "t") == "tes"
assert String.trim_trailing("t", "t") == ""
assert String.trim_trailing("", "t") == ""
end
test "pad_leading" do
assert String.pad_leading("", 5) == " "
assert String.pad_leading("abc", 5) == " abc"
assert String.pad_leading(" abc ", 9) == " abc "
assert String.pad_leading("猫", 5) == " 猫"
assert String.pad_leading("-", 0) == "-"
assert String.pad_leading("-", 1) == "-"
assert String.pad_leading("---", 5, "abc") == "ab---"
assert String.pad_leading("---", 9, "abc") == "abcabc---"
assert String.pad_leading("---", 5, ["abc"]) == "abcabc---"
assert String.pad_leading("--", 6, ["a", "bc"]) == "abcabc--"
assert_raise FunctionClauseError, fn ->
String.pad_leading("-", -1)
end
assert_raise FunctionClauseError, fn ->
String.pad_leading("-", 1, [])
end
message = "expected a string padding element, got: 10"
assert_raise ArgumentError, message, fn ->
String.pad_leading("-", 3, ["-", 10])
end
end
test "pad_trailing" do
assert String.pad_trailing("", 5) == " "
assert String.pad_trailing("abc", 5) == "abc "
assert String.pad_trailing(" abc ", 9) == " abc "
assert String.pad_trailing("猫", 5) == "猫 "
assert String.pad_trailing("-", 0) == "-"
assert String.pad_trailing("-", 1) == "-"
assert String.pad_trailing("---", 5, "abc") == "---ab"
assert String.pad_trailing("---", 9, "abc") == "---abcabc"
assert String.pad_trailing("---", 5, ["abc"]) == "---abcabc"
assert String.pad_trailing("--", 6, ["a", "bc"]) == "--abcabc"
assert_raise FunctionClauseError, fn ->
String.pad_trailing("-", -1)
end
assert_raise FunctionClauseError, fn ->
String.pad_trailing("-", 1, [])
end
message = "expected a string padding element, got: 10"
assert_raise ArgumentError, message, fn ->
String.pad_trailing("-", 3, ["-", 10])
end
end
test "reverse" do
assert String.reverse("") == ""
assert String.reverse("abc") == "cba"
assert String.reverse("Hello World") == "dlroW olleH"
assert String.reverse("Hello ∂og") == "go∂ olleH"
assert String.reverse("Ā̀stute") == "etutsĀ̀"
assert String.reverse(String.reverse("Hello World")) == "Hello World"
assert String.reverse(String.reverse("Hello \r\n World")) == "Hello \r\n World"
end
test "replace" do
assert String.replace("a,b,c", ",", "-") == "a-b-c"
assert String.replace("a,b,c", [",", "b"], "-") == "a---c"
assert String.replace("a,b,c", ",", "-", global: false) == "a-b,c"
assert String.replace("a,b,c", [",", "b"], "-", global: false) == "a-b,c"
assert String.replace("ãéã", "é", "e", global: false) == "ãeã"
assert String.replace("a,b,c", ",", "[]", insert_replaced: 2) == "a[],b[],c"
assert String.replace("a,b,c", ",", "[]", insert_replaced: [1, 1]) == "a[,,]b[,,]c"
assert String.replace("a,b,c", "b", "[]", insert_replaced: 1, global: false) == "a,[b],c"
assert String.replace("a,b,c", ~r/,(.)/, ",\\1\\1") == "a,bb,cc"
assert String.replace("a,b,c", ~r/,(.)/, ",\\1\\1", global: false) == "a,bb,c"
end
test "duplicate" do
assert String.duplicate("abc", 0) == ""
assert String.duplicate("abc", 1) == "abc"
assert String.duplicate("abc", 2) == "abcabc"
assert String.duplicate("&ã$", 2) == "&ã$&ã$"
assert_raise FunctionClauseError, fn ->
String.duplicate("abc", -1)
end
end
test "codepoints" do
assert String.codepoints("elixir") == ["e", "l", "i", "x", "i", "r"]
assert String.codepoints("elixír") == ["e", "l", "i", "x", "í", "r"] # slovak
assert String.codepoints("ոգելից ըմպելիք") == ["ո", "գ", "ե", "լ", "ի", "ց", " ", "ը", "մ", "պ", "ե", "լ", "ի", "ք"] # armenian
assert String.codepoints("эліксір") == ["э", "л", "і", "к", "с", "і", "р"] # belarussian
assert String.codepoints("ελιξήριο") == ["ε", "λ", "ι", "ξ", "ή", "ρ", "ι", "ο"] # greek
assert String.codepoints("סם חיים") == ["ס", "ם", " ", "ח", "י", "י", "ם"] # hebraic
assert String.codepoints("अमृत") == ["अ", "म", "ृ", "त"] # hindi
assert String.codepoints("স্পর্শমণি") == ["স", "্", "প", "র", "্", "শ", "ম", "ণ", "ি"] # bengali
assert String.codepoints("સર્વશ્રેષ્ઠ ઇલાજ") == ["સ", "ર", "્", "વ", "શ", "્", "ર", "ે", "ષ", "્", "ઠ", " ", "ઇ", "લ", "ા", "જ"] # gujarati
assert String.codepoints("世界中の一番") == ["世", "界", "中", "の", "一", "番"] # japanese
assert String.codepoints("がガちゃ") == ["が", "ガ", "ち", "ゃ"]
assert String.codepoints("") == []
assert String.codepoints("ϖͲϥЫݎߟΈټϘለДШव׆ש؇؊صلټܗݎޥޘ߉ऌ૫ሏᶆ℆ℙℱ ⅚Ⅷ↠∈⌘①ffi") ==
["ϖ", "Ͳ", "ϥ", "Ы", "ݎ", "ߟ", "Έ", "ټ", "Ϙ", "ለ", "Д", "Ш", "व", "׆", "ש", "؇", "؊", "ص", "ل", "ټ", "ܗ", "ݎ", "ޥ", "ޘ", "߉", "ऌ", "૫", "ሏ", "ᶆ", "℆", "ℙ", "ℱ", " ", "⅚", "Ⅷ", "↠", "∈", "⌘", "①", "ffi"]
end
test "equivalent?" do
assert String.equivalent?("", "")
assert String.equivalent?("elixir", "elixir")
assert String.equivalent?("뢴", "뢴")
assert String.equivalent?("ṩ", "ṩ")
refute String.equivalent?("ELIXIR", "elixir")
refute String.equivalent?("døge", "dóge")
end
test "normalize" do
assert String.normalize("ŝ", :nfd) == "ŝ"
assert String.normalize("ḇravô", :nfd) == "ḇravô"
assert String.normalize("ṩierra", :nfd) == "ṩierra"
assert String.normalize("뢴", :nfd) == "뢴"
assert String.normalize("êchǭ", :nfc) == "êchǭ"
assert String.normalize("거̄", :nfc) == "거̄"
assert String.normalize("뢴", :nfc) == "뢴"
## Cases from NormalizationTest.txt
# 05B8 05B9 05B1 0591 05C3 05B0 05AC 059F
# 05B1 05B8 05B9 0591 05C3 05B0 05AC 059F
# HEBREW POINT QAMATS, HEBREW POINT HOLAM, HEBREW POINT HATAF SEGOL,
# HEBREW ACCENT ETNAHTA, HEBREW PUNCTUATION SOF PASUQ, HEBREW POINT SHEVA,
# HEBREW ACCENT ILUY, HEBREW ACCENT QARNEY PARA
assert String.normalize("ֱָֹ֑׃ְ֬֟", :nfc) == "ֱָֹ֑׃ְ֬֟"
# 095D (exclusion list)
# 0922 093C
# DEVANAGARI LETTER RHA
assert String.normalize("ढ़", :nfc) == "ढ़"
# 0061 0315 0300 05AE 0340 0062
# 00E0 05AE 0300 0315 0062
# LATIN SMALL LETTER A, COMBINING COMMA ABOVE RIGHT, COMBINING GRAVE ACCENT,
# HEBREW ACCENT ZINOR, COMBINING GRAVE TONE MARK, LATIN SMALL LETTER B
assert String.normalize("à֮̀̕b", :nfc) == "à֮̀̕b"
# 0344
# 0308 0301
# COMBINING GREEK DIALYTIKA TONOS
assert String.normalize("\u0344", :nfc) == "\u0308\u0301"
# 115B9 0334 115AF
# 115B9 0334 115AF
# SIDDHAM VOWEL SIGN AI, COMBINING TILDE OVERLAY, SIDDHAM VOWEL SIGN AA
assert String.normalize("𑖹̴𑖯", :nfc) == "𑖹̴𑖯"
end
test "graphemes" do
# Extended
assert String.graphemes("Ā̀stute") == ["Ā̀", "s", "t", "u", "t", "e"]
# CLRF
assert String.graphemes("\r\n\f") == ["\r\n", "\f"]
# Regional indicator
assert String.graphemes("\u{1F1E6}\u{1F1E7}\u{1F1E8}") == ["\u{1F1E6}\u{1F1E7}\u{1F1E8}"]
# Hangul
assert String.graphemes("\u1100\u115D\uB4A4") == ["ᄀᅝ뒤"]
# Special Marking with Extended
assert String.graphemes("a\u0300\u0903") == ["a\u0300\u0903"]
end
test "next grapheme" do
assert String.next_grapheme("Ā̀stute") == {"Ā̀", "stute"}
assert String.next_grapheme("") == nil
end
test "first" do
assert String.first("elixir") == "e"
assert String.first("íelixr") == "í"
assert String.first("եոգլից ըմպելիք") == "ե"
assert String.first("лэіксір") == "л"
assert String.first("ελιξήριο") == "ε"
assert String.first("סם חיים") == "ס"
assert String.first("がガちゃ") == "が"
assert String.first("Ā̀stute") == "Ā̀"
assert String.first("") == nil
end
test "last" do
assert String.last("elixir") == "r"
assert String.last("elixrí") == "í"
assert String.last("եոգլից ըմպելիքե") == "ե"
assert String.last("ліксірэ") == "э"
assert String.last("ειξήριολ") == "λ"
assert String.last("סם ייםח") == "ח"
assert String.last("がガちゃ") == "ゃ"
assert String.last("Ā̀") == "Ā̀"
assert String.last("") == nil
end
test "length" do
assert String.length("elixir") == 6
assert String.length("elixrí") == 6
assert String.length("եոգլից") == 6
assert String.length("ліксрэ") == 6
assert String.length("ειξήριολ") == 8
assert String.length("סם ייםח") == 7
assert String.length("がガちゃ") == 4
assert String.length("Ā̀stute") == 6
assert String.length("") == 0
end
test "at" do
assert String.at("л", 0) == "л"
assert String.at("elixir", 1) == "l"
assert String.at("がガちゃ", 2) == "ち"
assert String.at("л", 10) == nil
assert String.at("elixir", -1) == "r"
assert String.at("がガちゃ", -2) == "ち"
assert String.at("л", -3) == nil
assert String.at("Ā̀stute", 1) == "s"
assert String.at("elixir", 6) == nil
assert_raise FunctionClauseError, fn ->
String.at("elixir", 0.1)
end
assert_raise FunctionClauseError, fn ->
String.at("elixir", -0.1)
end
end
test "slice" do
assert String.slice("elixir", 1, 3) == "lix"
assert String.slice("あいうえお", 2, 2) == "うえ"
assert String.slice("ειξήριολ", 2, 3) == "ξήρ"
assert String.slice("elixir", 3, 4) == "xir"
assert String.slice("あいうえお", 3, 5) == "えお"
assert String.slice("ειξήριολ", 5, 4) == "ιολ"
assert String.slice("elixir", -3, 2) == "xi"
assert String.slice("あいうえお", -4, 3) == "いうえ"
assert String.slice("ειξήριολ", -5, 3) == "ήρι"
assert String.slice("elixir", -10, 1) == ""
assert String.slice("あいうえお", -10, 2) == ""
assert String.slice("ειξήριολ", -10, 3) == ""
assert String.slice("elixir", 8, 2) == ""
assert String.slice("あいうえお", 6, 2) == ""
assert String.slice("ειξήριολ", 8, 1) == ""
assert String.slice("ειξήριολ", 9, 1) == ""
assert String.slice("elixir", 0, 0) == ""
assert String.slice("elixir", 5, 0) == ""
assert String.slice("elixir", -5, 0) == ""
assert String.slice("", 0, 1) == ""
assert String.slice("", 1, 1) == ""
assert String.slice("elixir", 0..-2) == "elixi"
assert String.slice("elixir", 1..3) == "lix"
assert String.slice("elixir", -5..-3) == "lix"
assert String.slice("elixir", -5..3) == "lix"
assert String.slice("あいうえお", 2..3) == "うえ"
assert String.slice("ειξήριολ", 2..4) == "ξήρ"
assert String.slice("elixir", 3..6) == "xir"
assert String.slice("あいうえお", 3..7) == "えお"
assert String.slice("ειξήριολ", 5..8) == "ιολ"
assert String.slice("elixir", -3..-2) == "xi"
assert String.slice("あいうえお", -4..-2) == "いうえ"
assert String.slice("ειξήριολ", -5..-3) == "ήρι"
assert String.slice("elixir", 8..9) == ""
assert String.slice("あいうえお", 6..7) == ""
assert String.slice("ειξήριολ", 8..8) == ""
assert String.slice("ειξήριολ", 9..9) == ""
assert String.slice("", 0..0) == ""
assert String.slice("", 1..1) == ""
assert String.slice("あいうえお", -2..-4) == ""
assert String.slice("あいうえお", -10..-15) == ""
assert String.slice("hello あいうえお unicode", 8..-1) == "うえお unicode"
assert String.slice("abc", -1..14) == "c"
end
test "valid?" do
assert String.valid?("afds")
assert String.valid?("øsdfh")
assert String.valid?("dskfjあska")
refute String.valid?(<<0xFFFF :: 16>>)
refute String.valid?("asd" <> <<0xFFFF :: 16>>)
end
test "chunk valid" do
assert String.chunk("", :valid) == []
assert String.chunk("ødskfjあ\x11ska", :valid)
== ["ødskfjあ\x11ska"]
assert String.chunk("abc\u{0ffff}def", :valid)
== ["abc", <<0x0FFFF::utf8>>, "def"]
assert String.chunk("\u{0FFFE}\u{3FFFF}привет\u{0FFFF}мир", :valid)
== [<<0x0FFFE::utf8, 0x3FFFF::utf8>>, "привет", <<0x0FFFF::utf8>>, "мир"]
assert String.chunk("日本\u{0FFFF}\u{FDEF}ござございます\u{FDD0}", :valid)
== ["日本", <<0x0FFFF::utf8, 0xFDEF::utf8>>, "ござございます", <<0xFDD0::utf8>>]
end
test "chunk printable" do
assert String.chunk("", :printable) == []
assert String.chunk("ødskfjあska", :printable)
== ["ødskfjあska"]
assert String.chunk("abc\u{0FFFF}def", :printable)
== ["abc", <<0x0FFFF::utf8>>, "def"]
assert String.chunk("\x06ab\x05cdef\x03\0", :printable)
== [<<6>>, "ab", <<5>>, "cdef", <<3, 0>>]
end
test "starts_with?" do
assert String.starts_with? "hello", "he"
assert String.starts_with? "hello", "hello"
refute String.starts_with? "hello", []
assert String.starts_with? "hello", ["hellö", "hell"]
assert String.starts_with? "エリクシア", "エリ"
refute String.starts_with? "hello", "lo"
refute String.starts_with? "hello", "hellö"
refute String.starts_with? "hello", ["hellö", "goodbye"]
refute String.starts_with? "エリクシア", "仙丹"
end
test "ends_with?" do
assert String.ends_with? "hello", "lo"
assert String.ends_with? "hello", "hello"
refute String.ends_with? "hello", []
assert String.ends_with? "hello", ["hell", "lo", "xx"]
assert String.ends_with? "hello", ["hellö", "lo"]
assert String.ends_with? "エリクシア", "シア"
refute String.ends_with? "hello", "he"
refute String.ends_with? "hello", "hellö"
refute String.ends_with? "hello", ["hel", "goodbye"]
refute String.ends_with? "エリクシア", "仙丹"
end
test "contains?" do
assert String.contains? "elixir of life", "of"
assert String.contains? "エリクシア", "シ"
refute String.contains? "elixir of life", []
assert String.contains? "elixir of life", ["mercury", "life"]
refute String.contains? "elixir of life", "death"
refute String.contains? "エリクシア", "仙"
refute String.contains? "elixir of life", ["death", "mercury", "eternal life"]
end
test "to charlist" do
assert String.to_charlist("æß") == [?æ, ?ß]
assert String.to_charlist("abc") == [?a, ?b, ?c]
assert_raise UnicodeConversionError,
"invalid encoding starting at <<223, 255>>", fn ->
String.to_charlist(<< 0xDF, 0xFF >>)
end
assert_raise UnicodeConversionError,
"incomplete encoding starting at <<195>>", fn ->
String.to_charlist(<< 106, 111, 115, 195 >>)
end
end
test "to float" do
assert String.to_float("3.0") == 3.0
three = fn -> "3" end
assert_raise ArgumentError, fn -> String.to_float(three.()) end
end
test "jaro distance" do
assert String.jaro_distance("same", "same") == 1.0
assert String.jaro_distance("any", "") == 0.0
assert String.jaro_distance("", "any") == 0.0
assert String.jaro_distance("martha", "marhta") == 0.9444444444444445
assert String.jaro_distance("martha", "marhha") == 0.888888888888889
assert String.jaro_distance("marhha", "martha") == 0.888888888888889
assert String.jaro_distance("dwayne", "duane") == 0.8222222222222223
assert String.jaro_distance("dixon", "dicksonx") == 0.7666666666666666
assert String.jaro_distance("xdicksonx", "dixon") == 0.7851851851851852
assert String.jaro_distance("shackleford", "shackelford") == 0.9696969696969697
assert String.jaro_distance("dunningham", "cunnigham") == 0.8962962962962964
assert String.jaro_distance("nichleson", "nichulson") == 0.9259259259259259
assert String.jaro_distance("jones", "johnson") == 0.7904761904761904
assert String.jaro_distance("massey", "massie") == 0.888888888888889
assert String.jaro_distance("abroms", "abrams") == 0.888888888888889
assert String.jaro_distance("hardin", "martinez") == 0.7222222222222222
assert String.jaro_distance("itman", "smith") == 0.4666666666666666
assert String.jaro_distance("jeraldine", "geraldine") == 0.9259259259259259
assert String.jaro_distance("michelle", "michael") == 0.8690476190476191
assert String.jaro_distance("julies", "julius") == 0.888888888888889
assert String.jaro_distance("tanya", "tonya") == 0.8666666666666667
assert String.jaro_distance("sean", "susan") == 0.7833333333333333
assert String.jaro_distance("jon", "john") == 0.9166666666666666
assert String.jaro_distance("jon", "jan") == 0.7777777777777777
assert String.jaro_distance("семена", "стремя") == 0.6666666666666666
end
test "difference/2" do
assert String.myers_difference("", "abc") == [ins: "abc"]
assert String.myers_difference("abc", "") == [del: "abc"]
assert String.myers_difference("", "") == []
assert String.myers_difference("abc", "abc") == [eq: "abc"]
assert String.myers_difference("abc", "aйbc") == [eq: "a", ins: "й", eq: "bc"]
assert String.myers_difference("aйbc", "abc") == [eq: "a", del: "й", eq: "bc"]
end
end
| 41.791604 | 211 | 0.57252 |
9ec1bd3ad4d814b0c1f272017ddc6c60a9ea825e | 2,445 | exs | Elixir | bench/bench.encoder.exs | marcelotto/saxy | a942d8d2d3ff028294b667487b9251d690ca9364 | [
"MIT"
] | 217 | 2018-02-17T17:43:33.000Z | 2022-03-20T13:53:33.000Z | bench/bench.encoder.exs | marcelotto/saxy | a942d8d2d3ff028294b667487b9251d690ca9364 | [
"MIT"
] | 49 | 2018-05-02T05:11:08.000Z | 2022-03-16T13:30:17.000Z | bench/bench.encoder.exs | marcelotto/saxy | a942d8d2d3ff028294b667487b9251d690ca9364 | [
"MIT"
] | 28 | 2018-04-21T23:13:40.000Z | 2022-03-03T22:47:12.000Z | bench_options = [
time: 5,
memory_time: 2,
inputs: %{
"simple document" => :simple,
"deeply nested elements " => :nested,
"long content element" => :long_content
}
]
defmodule Bench.Saxy.Builder do
import Saxy.XML, only: [element: 3]
def build(:simple) do
element("root", [], [
element("element1", [], []),
element(
"element2",
[],
Enum.map(0..9, fn index ->
element("element2.#{index}", [], "foo")
end)
),
element("element3", [], [])
])
end
def build(:nested) do
Enum.reduce(1000..1, "content", fn index, acc ->
element("element.#{index}", [], acc)
end)
end
# Make them available in compile time.
@strings for _ <- 0..999, do: "Jag Älskar Sverige"
@long_string String.duplicate("Jag Älskar Sverige", 1000)
def build(:long_content) do
element(
"root",
[],
[
element("many-strings", [], @strings),
element("long-string", [], @long_string)
]
)
end
end
defmodule Bench.XMLBuilder.Builder do
import XmlBuilder, only: [document: 3, element: 3]
def build(:simple) do
document("root", [], [
element("element1", [], []),
element(
"element2",
[],
Enum.map(0..9, fn index ->
element("element2.#{index}", [], "foo")
end)
),
element("element3", [], [])
])
end
def build(:nested) do
document(
"level1",
[],
Enum.reduce(1000..2, "content", fn index, acc ->
[element("element.#{index}", [], acc)]
end)
)
end
# Make it available in compile time.
@strings for _ <- 0..999, do: "Jag Älskar Sverige"
@long_string String.duplicate("Jag Älskar Sverige", 1000)
def build(:long_content) do
document(
"root",
[],
[
element("many-strings", [], @strings),
element("long-string", [], @long_string)
]
)
end
end
Benchee.run(
%{
"Saxy (red apple)" => fn sample ->
sample
|> Bench.Saxy.Builder.build()
|> Saxy.encode!()
end,
"XML Builder without formatting (red apple)" => fn sample ->
sample
|> Bench.XMLBuilder.Builder.build()
|> XmlBuilder.generate(format: :none)
end,
"XML Builder with formatting (green apple)" => fn sample ->
sample
|> Bench.XMLBuilder.Builder.build()
|> XmlBuilder.generate()
end
},
bench_options
)
| 21.637168 | 64 | 0.540695 |
9ec1e9ae2b53ff9296ae0649796d7e96e498295a | 884 | exs | Elixir | mix.exs | chrismccord/beam_toolbox_web | 8aaf37b073cc4ca619e33d3742b51335a8944201 | [
"MIT"
] | 1 | 2018-01-14T09:41:05.000Z | 2018-01-14T09:41:05.000Z | mix.exs | chrismccord/beam_toolbox_web | 8aaf37b073cc4ca619e33d3742b51335a8944201 | [
"MIT"
] | null | null | null | mix.exs | chrismccord/beam_toolbox_web | 8aaf37b073cc4ca619e33d3742b51335a8944201 | [
"MIT"
] | null | null | null | defmodule BeamToolboxWeb.Mixfile do
use Mix.Project
def project do
[ app: :beam_toolbox_web,
version: "0.0.1",
elixir: "~> 0.15.0",
elixirc_paths: ["lib", "web"],
deps: deps ]
end
# Configuration for the OTP application
def application do
[
mod: { BeamToolboxWeb, [] },
applications: [:phoenix, :beam_toolbox_data]
]
end
# Returns the list of dependencies in the format:
# { :foobar, git: "https://github.com/elixir-lang/foobar.git", tag: "0.1" }
#
# To specify particular versions, regardless of the tag, do:
# { :barbat, "~> 0.1", github: "elixir-lang/barbat" }
defp deps do
[
{:phoenix, github: "phoenixframework/phoenix"},
{:cowboy, "~> 1.0.0"},
{:beam_toolbox_data, github: "knewter/beam_toolbox_data", env: Mix.env},
{:cadfaerl, github: "ddossot/cadfaerl"}
]
end
end
| 26 | 78 | 0.61086 |
9ec1f16e9ca9faf324cdc8004131b04e5125d66b | 2,682 | ex | Elixir | clients/admin/lib/google_api/admin/reports_v1/api/channels.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/admin/lib/google_api/admin/reports_v1/api/channels.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/admin/lib/google_api/admin/reports_v1/api/channels.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Admin.Reports_v1.Api.Channels do
@moduledoc """
API calls for all endpoints tagged `Channels`.
"""
alias GoogleApi.Admin.Reports_v1.Connection
alias GoogleApi.Gax.{Request, Response}
@doc """
Stop watching resources through this channel
## Parameters
- connection (GoogleApi.Admin.Reports_v1.Connection): Connection to server
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
- :resource (Channel):
## Returns
{:ok, %{}} on success
{:error, info} on failure
"""
@spec admin_channels_stop(Tesla.Env.client(), keyword()) :: {:ok, nil} | {:error, Tesla.Env.t()}
def admin_channels_stop(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:resource => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/admin/reports_v1/channels/stop")
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [decode: false])
end
end
| 37.25 | 170 | 0.698732 |
9ec1f9c9e6291711b8858b5bc9f40d2f9f6a7510 | 3,465 | ex | Elixir | lib/graphql/resolvers/project.ex | drj17/accent | 25b42dff1f9051e0a42ed7135890a9f740c65026 | [
"BSD-3-Clause"
] | null | null | null | lib/graphql/resolvers/project.ex | drj17/accent | 25b42dff1f9051e0a42ed7135890a9f740c65026 | [
"BSD-3-Clause"
] | null | null | null | lib/graphql/resolvers/project.ex | drj17/accent | 25b42dff1f9051e0a42ed7135890a9f740c65026 | [
"BSD-3-Clause"
] | null | null | null | defmodule Accent.GraphQL.Resolvers.Project do
require Ecto.Query
alias Accent.Scopes.Project, as: ProjectScope
alias Accent.{
GraphQL.Paginated,
Operation,
Plugs.GraphQLContext,
Project,
ProjectCreator,
ProjectDeleter,
ProjectUpdater,
Repo,
User
}
alias Ecto.Query
@typep project_operation :: {:ok, %{project: Project.t() | nil, errors: [String.t()] | nil}}
@spec create(any(), %{name: String.t(), language_id: String.t()}, GraphQLContext.t()) :: project_operation
def create(_, args, info) do
params = %{
"name" => args.name,
"main_color" => args.main_color,
"logo" => args.logo,
"language_id" => args.language_id
}
case ProjectCreator.create(params: params, user: info.context[:conn].assigns[:current_user]) do
{:ok, project} ->
{:ok, %{project: project, errors: nil}}
{:error, _reason} ->
{:ok, %{project: nil, errors: ["unprocessable_entity"]}}
end
end
@spec delete(Project.t(), any(), GraphQLContext.t()) :: project_operation
def delete(project, _, _) do
{:ok, _} = ProjectDeleter.delete(project: project)
{:ok, %{project: project, errors: nil}}
end
@spec update(Project.t(), %{name: String.t(), main_color: String.t()}, GraphQLContext.t()) :: project_operation
def update(project, args, info) do
args =
Map.merge(
%{
is_file_operations_locked: nil,
logo: nil
},
args
)
params = %{
"name" => args.name,
"main_color" => args.main_color,
"logo" => args.logo,
"locked_file_operations" => args.is_file_operations_locked
}
case ProjectUpdater.update(project: project, params: params, user: info.context[:conn].assigns[:current_user]) do
{:ok, project} ->
{:ok, %{project: project, errors: nil}}
{:error, _reason} ->
{:ok, %{project: nil, errors: ["unprocessable_entity"]}}
end
end
@spec list_viewer(User.t(), %{query: String.t(), page: number()}, GraphQLContext.t()) :: {:ok, Paginated.t(Project.t())}
def list_viewer(viewer, args, _info) do
paginated_projects =
Project
|> Query.join(:inner, [p], c in assoc(p, :collaborators))
|> Query.where([_, c], c.user_id == ^viewer.id)
|> Query.order_by([p, _], asc: p.name)
|> ProjectScope.from_search(args[:query])
|> ProjectScope.with_stats()
|> Paginated.paginate(args)
|> Paginated.format()
nodes_projects =
Project
|> Query.join(:inner, [p], c in assoc(p, :collaborators))
|> Query.where([_, c], c.user_id == ^viewer.id)
|> ProjectScope.from_ids(args[:node_ids])
|> ProjectScope.with_stats()
|> Repo.all()
projects = Map.put(paginated_projects, :nodes, nodes_projects)
{:ok, projects}
end
@spec show_viewer(any(), %{id: String.t()}, GraphQLContext.t()) :: {:ok, Project.t() | nil}
def show_viewer(_, %{id: id}, _) do
Project
|> ProjectScope.with_stats()
|> Repo.get(id)
|> (&{:ok, &1}).()
end
@spec last_activity(Project.t(), any(), GraphQLContext.t()) :: {:ok, Operation.t() | nil}
def last_activity(project, _, _) do
Operation
|> Query.join(:left, [o], r in assoc(o, :revision))
|> Query.where([o, r], r.project_id == ^project.id or o.project_id == ^project.id)
|> Query.order_by([o], desc: o.inserted_at)
|> Query.limit(1)
|> Repo.one()
|> (&{:ok, &1}).()
end
end
| 29.364407 | 122 | 0.598846 |
9ec21725d83485e4b759de812aba773883ab438e | 875 | exs | Elixir | test/epicenter_web/views/user_multifactor_auth_setup_view_test.exs | geometricservices/epi-viewpoin | ecb5316ea0f3f7299d5ff63e2de588539005ac1c | [
"Apache-2.0"
] | 5 | 2021-02-25T18:43:09.000Z | 2021-02-27T06:00:35.000Z | test/epicenter_web/views/user_multifactor_auth_setup_view_test.exs | geometricservices/epi-viewpoint | ecb5316ea0f3f7299d5ff63e2de588539005ac1c | [
"Apache-2.0"
] | 3 | 2021-12-13T17:52:47.000Z | 2021-12-17T01:35:31.000Z | test/epicenter_web/views/user_multifactor_auth_setup_view_test.exs | geometricservices/epi-viewpoint | ecb5316ea0f3f7299d5ff63e2de588539005ac1c | [
"Apache-2.0"
] | 1 | 2022-01-27T23:26:38.000Z | 2022-01-27T23:26:38.000Z | defmodule EpicenterWeb.UserMultifactorAuthSetupViewTest do
use EpicenterWeb.ConnCase, async: true
import EpicenterWeb.UserMultifactorAuthSetupView, only: [colorize_alphanumeric_string: 1]
describe "colorize_alphanumeric_string" do
test "wraps each character in a span with a 'letter' or 'number' class" do
assert colorize_alphanumeric_string("abc123DEF") ==
~s|<span class="letter">a</span>| <>
~s|<span class="letter">b</span>| <>
~s|<span class="letter">c</span>| <>
~s|<span class="number">1</span>| <>
~s|<span class="number">2</span>| <>
~s|<span class="number">3</span>| <>
~s|<span class="letter">D</span>| <>
~s|<span class="letter">E</span>| <>
~s|<span class="letter">F</span>|
end
end
end
| 41.666667 | 91 | 0.571429 |
9ec21898712ebceecd4897bd38d3d8ea295f2eb8 | 1,946 | exs | Elixir | apps/ewallet_api/test/ewallet_api/v1/views/user_view_test.exs | jimpeebles/ewallet | ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405 | [
"Apache-2.0"
] | null | null | null | apps/ewallet_api/test/ewallet_api/v1/views/user_view_test.exs | jimpeebles/ewallet | ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405 | [
"Apache-2.0"
] | null | null | null | apps/ewallet_api/test/ewallet_api/v1/views/user_view_test.exs | jimpeebles/ewallet | ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWalletAPI.V1.UserViewTest do
use EWalletAPI.ViewCase, :v1
alias Ecto.UUID
alias EWalletAPI.V1.UserView
alias EWalletDB.User
describe "EWalletAPI.V1.UserView.render/2" do
test "renders user.json with correct structure" do
user = %User{
id: UUID.generate(),
username: "johndoe",
full_name: "John Doe",
calling_name: "John",
provider_user_id: "provider_id_9999",
metadata: %{
first_name: "John",
last_name: "Doe"
}
}
expected = %{
version: @expected_version,
success: true,
data: %{
object: "user",
id: user.id,
socket_topic: "user:#{user.id}",
provider_user_id: user.provider_user_id,
username: user.username,
full_name: user.full_name,
calling_name: user.calling_name,
email: user.email,
enabled: user.enabled,
avatar: %{
original: nil,
large: nil,
small: nil,
thumb: nil
},
metadata: %{
first_name: "John",
last_name: "Doe"
},
created_at: nil,
updated_at: nil,
encrypted_metadata: %{}
}
}
assert render(UserView, "user.json", user: user) == expected
end
end
end
| 28.617647 | 74 | 0.598664 |
9ec22addb1ffe69802bae194954f27eda60f1976 | 2,158 | ex | Elixir | lib/wechat/utils/message_encryptor.ex | ZhengQingchen/wechat | c1d1cdbf2578fc27ece38b5ca702e22e7817da07 | [
"MIT"
] | 36 | 2016-09-08T06:12:34.000Z | 2018-08-14T03:45:16.000Z | lib/wechat/utils/message_encryptor.ex | ZhengQingchen/wechat | c1d1cdbf2578fc27ece38b5ca702e22e7817da07 | [
"MIT"
] | 15 | 2016-09-06T04:25:19.000Z | 2017-12-14T12:08:14.000Z | lib/wechat/utils/message_encryptor.ex | ZhengQingchen/wechat | c1d1cdbf2578fc27ece38b5ca702e22e7817da07 | [
"MIT"
] | 10 | 2016-09-06T03:58:10.000Z | 2017-12-14T06:36:51.000Z | defmodule Wechat.Utils.MessageEncryptor do
@moduledoc """
Encrypt and decrypt Wechat messages.
"""
@aes_block_size 16
@doc """
Encrypt plain text by AES-CBC padded by PKCS#7.
"""
def encrypt(msg, appid, encoding_aes_key) do
with aes_key <- aes_key(encoding_aes_key) do
msg
|> pack_appid(appid)
|> encode_padding_with_pkcs7(32)
|> encrypt_with_aes_cbc(aes_key)
|> Base.encode64()
end
end
@doc """
Decrypt cipher text with AES-CBC padded by PKCS#7.
"""
def decrypt(msg_encrypted, encoding_aes_key) do
with aes_key <- aes_key(encoding_aes_key) do
msg_encrypted
|> Base.decode64!()
|> decrypt_with_aes_cbc(aes_key)
|> decode_padding_with_pkcs7()
|> unpack_appid()
end
end
# random(16B) + msg_size(4B) + msg + appid
defp pack_appid(msg, appid) do
random = :crypto.strong_rand_bytes(16)
msg_size = byte_size(msg)
random <> <<msg_size::32>> <> msg <> appid
end
# random(16B) + msg_size(4B) + msg + appid
defp unpack_appid(<<_::binary-16, msg_size::32, msg::binary-size(msg_size), appid::binary>>) do
{appid, msg}
end
defp encode_padding_with_pkcs7(data, pad_block_size) do
pad = calc_pad(data, pad_block_size)
padding = String.duplicate(<<pad::8>>, pad)
data <> padding
end
defp decode_padding_with_pkcs7(data) do
data_size = byte_size(data)
<<pad::8>> = binary_part(data, data_size, -1)
binary_part(data, 0, data_size - pad)
end
defp encrypt_with_aes_cbc(plain_text, aes_key) do
iv = binary_part(aes_key, 0, @aes_block_size)
:crypto.block_encrypt(:aes_cbc, aes_key, iv, plain_text)
end
defp decrypt_with_aes_cbc(cipher_text, aes_key) do
iv = binary_part(aes_key, 0, @aes_block_size)
:crypto.block_decrypt(:aes_cbc, aes_key, iv, cipher_text)
end
defp calc_pad(data, pad_block_size) do
data_size = byte_size(data)
case rem(data_size, pad_block_size) do
0 -> pad_block_size
rem -> pad_block_size - rem
end
end
# get AES key from encoding_aes_key.
defp aes_key(encoding_aes_key) do
Base.decode64!(encoding_aes_key <> "=")
end
end
| 26.317073 | 97 | 0.675626 |
9ec23de7e699114490e882005a38f7f95bd43a48 | 75 | ex | Elixir | lib/termDirectory_web/views/layout_view.ex | nliechti/termDirectory | 267b3025c14e26575c7a9483692e94a7bf29e5fe | [
"MIT"
] | 1 | 2019-03-15T15:40:24.000Z | 2019-03-15T15:40:24.000Z | lib/termDirectory_web/views/layout_view.ex | nliechti/termDirectory | 267b3025c14e26575c7a9483692e94a7bf29e5fe | [
"MIT"
] | null | null | null | lib/termDirectory_web/views/layout_view.ex | nliechti/termDirectory | 267b3025c14e26575c7a9483692e94a7bf29e5fe | [
"MIT"
] | null | null | null | defmodule TermDirectoryWeb.LayoutView do
use TermDirectoryWeb, :view
end
| 18.75 | 40 | 0.84 |
9ec2581db0f9ba14883eb413b69d8c79aaa355f1 | 7,426 | ex | Elixir | apps/ewallet/lib/ewallet/web/filters/match_parser.ex | Macavirus/ewallet | ce62177b8bd3f7e72156930d384a1c4c047a3b5b | [
"Apache-2.0"
] | null | null | null | apps/ewallet/lib/ewallet/web/filters/match_parser.ex | Macavirus/ewallet | ce62177b8bd3f7e72156930d384a1c4c047a3b5b | [
"Apache-2.0"
] | null | null | null | apps/ewallet/lib/ewallet/web/filters/match_parser.ex | Macavirus/ewallet | ce62177b8bd3f7e72156930d384a1c4c047a3b5b | [
"Apache-2.0"
] | null | null | null | # Copyright 2018-2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWallet.Web.MatchParser do
@moduledoc """
This module allows parsing of arbitrary attributes into a filtering query.
It takes in a request's attributes, parses only the attributes needed for filtering,
then builds those attributes into a filtering query on top of the given `Ecto.Queryable`.
"""
import Ecto.Query
# Steps:
# 1. Parse the list of `%{"field" => _, "comparator" => _, "value" => _}`
# into a list of `{field, subfield, type, comparator, value}` filter rules.
# 2. Join the original queryable with the assocs needed to query for the parsed_input.
# Also build a map of positional reference of the joined assocs.
# 3. Condition the joined queryable by the parsed_input.
@spec build_query(Ecto.Queryable.t(), map(), map(), [atom()], boolean(), atom()) ::
Ecto.Queryable.t()
def build_query(queryable, inputs, whitelist, dynamic, query_module, mappings \\ %{}) do
with rules when is_list(rules) <- parse_rules(inputs, whitelist, mappings),
{queryable, assoc_positions} <- join_assocs(queryable, rules),
{:ok, queryable} <- filter(queryable, assoc_positions, rules, dynamic, query_module),
queryable <- add_distinct(queryable) do
queryable
else
error -> error
end
end
# Rejects if the user didn't provided the filters as a list.
defp parse_rules(inputs, _whitelist, _mappings) when not is_list(inputs) do
# In other usual cases where only a param is missing, the 3rd tuple element returned would be
# a map of parameters that the code successfully detected, so the user could figure out
# what is missing. In this case, where the params are totally invalid, the 3rd tuple element
# is therefore an empty map.
{:error, :missing_filter_param, %{}}
end
# Parses a list of arbitrary `%{"field" => _, "comparator" => _, "value" => _}`
# into a list of `{field, subfield, type, comparator, value}`.
defp parse_rules(inputs, whitelist, mappings) do
Enum.reduce_while(inputs, [], fn input, accumulator ->
case parse_rule(input, whitelist, mappings) do
{:error, _} = error ->
{:halt, error}
{:error, _, _} = error ->
{:halt, error}
parsed ->
{:cont, [parsed | accumulator]}
end
end)
end
# ------------------------------
# Parses a single filter
# ------------------------------
defp parse_rule(
%{"field" => field, "comparator" => comparator, "value" => value},
whitelist,
mappings
) do
fieldset = parse_fieldset(field, mappings)
case find_field_definition(fieldset, whitelist) do
nil ->
{:error, :not_allowed, field}
field_definition ->
{field_definition, comparator, value}
end
end
defp parse_rule(params, _, _) do
{:error, :missing_filter_param, params}
end
@spec parse_fieldset(String.t(), map()) ::
atom()
| {atom(), atom()}
| {:error, :not_supported, String.t()}
| {:error, :not_allowed, String.t()}
defp parse_fieldset(field, mappings) do
splitted = String.split(field, ".")
# Avoid unneccessarily counting deeply nested values by taking out only 3 items.
case Enum.take(splitted, 3) do
[field] ->
String.to_existing_atom(mappings[field] || field)
[field, subfield] ->
{String.to_existing_atom(mappings[field] || field), String.to_existing_atom(subfield)}
[field, _subfield, _too_deep] ->
{:error, :not_supported, field}
end
rescue
# Handles non-existing atom
_ in ArgumentError ->
{:error, :not_allowed, field}
end
# Find the field definition from the whitelist.
#
# Return values:
# - `{field, nil}` when the definition does not indicate a type
# - `{field, type}` when the definition indicates a type
# - `{field, subfield, nil}` when the definition does not indicate a type
# - `{field, subfield, type}` when the definition indicates a type
# - `nil` when no matching field definitions could be found
@spec find_field_definition(atom(), list()) :: {atom(), atom()} | {atom(), nil} | nil
@spec find_field_definition({atom(), atom()}, list()) ::
{atom(), atom(), atom()} | {atom(), atom(), nil} | nil
defp find_field_definition(field_or_tuple, whitelist) when is_list(whitelist) do
Enum.find_value(whitelist, fn w -> get_field_definition(field_or_tuple, w) end)
end
# If the parent field matches, find the definition of the `subfield` in the `allowed_subfields`
defp get_field_definition({field, subfield}, {field, allowed_subfields}) do
case find_field_definition(subfield, allowed_subfields) do
{_, nil} -> {field, subfield, nil}
{_, type} -> {field, subfield, type}
nil -> nil
end
end
# Returns `{field, nil}` if the type is not given
defp get_field_definition(field, field), do: {field, nil}
# Returns `{field, type}` if the type is given
defp get_field_definition(field, {field, type}), do: {field, type}
# Returns nil if the field does not match
defp get_field_definition(_, _), do: nil
defp join_assocs(queryable, rules) do
{queryable, joined_assocs} =
Enum.reduce(rules, {queryable, []}, fn rule, {queryable, joined_assocs} ->
{field_definition, _comparator, _value} = rule
case field_definition do
{_field, _type} ->
{queryable, joined_assocs}
{field, _subfield, _type} ->
queryable = join(queryable, :left, [q], assoc in assoc(q, ^field))
joined_assocs = [field | joined_assocs]
{queryable, joined_assocs}
end
end)
joined_assocs =
joined_assocs
|> Enum.reverse()
|> Enum.with_index()
{queryable, joined_assocs}
end
defp filter(queryable, assoc_positions, rules, initial_dynamic, query_module) do
dynamic =
Enum.reduce_while(rules, initial_dynamic, fn rule, dynamic ->
{field_definition, comparator, value} = rule
query =
case field_definition do
{field, type} ->
query_module.do_filter(dynamic, field, type, comparator, value)
{field, subfield, type} ->
position = assoc_positions[field] + 1
query_module.do_filter_assoc(dynamic, position, subfield, type, comparator, value)
end
case query do
{:error, _, _} = error ->
{:halt, error}
query ->
{:cont, query}
end
end)
case dynamic do
{:error, _, _} = error ->
error
dynamic ->
{:ok, from(queryable, where: ^dynamic)}
end
end
defp add_distinct(%Ecto.Query{distinct: nil} = queryable) do
distinct(queryable, true)
end
defp add_distinct(queryable), do: queryable
end
| 34.700935 | 97 | 0.641395 |
9ec25915170614716682494995d0a62217e46f9c | 1,477 | ex | Elixir | apps/streamer/lib/streamer/binance.ex | frathon/hedgehog | 3ed1469919ba819280709a8f26def761003a99df | [
"Unlicense"
] | 65 | 2020-07-07T01:51:27.000Z | 2021-09-27T00:13:59.000Z | apps/streamer/lib/streamer/binance.ex | Cinderella-Man/hedgehog | 3ed1469919ba819280709a8f26def761003a99df | [
"Unlicense"
] | 5 | 2021-02-12T08:21:15.000Z | 2021-09-01T21:17:27.000Z | apps/streamer/lib/streamer/binance.ex | frathon/hedgehog | 3ed1469919ba819280709a8f26def761003a99df | [
"Unlicense"
] | 10 | 2020-08-13T13:39:31.000Z | 2021-09-14T12:46:51.000Z | defmodule Streamer.Binance do
use WebSockex
require Logger
defmodule State do
@enforce_keys [:symbol]
defstruct [:symbol]
end
@stream_endpoint "wss://stream.binance.com:9443/ws/"
def start_link(symbol) do
Logger.info("Streamer.Binance is connecting to #{symbol} trade events stream")
WebSockex.start_link(
"#{@stream_endpoint}#{String.downcase(symbol)}@trade",
__MODULE__,
%State{
symbol: symbol
},
name: via_tuple(symbol)
)
end
def handle_frame({_type, msg}, state) do
case Jason.decode(msg) do
{:ok, event} -> process_event(event, state)
{:error, _} -> throw("Unable to parse msg: #{msg}")
end
{:ok, state}
end
def process_event(%{"e" => "trade"} = event, state) do
trade_event = %Core.Struct.TradeEvent{
:event_type => event["e"],
:event_time => event["E"],
:symbol => event["s"],
:trade_id => event["t"],
:price => event["p"],
:quantity => event["q"],
:buyer_order_id => event["b"],
:seller_order_id => event["a"],
:trade_time => event["T"],
:buyer_market_maker => event["m"]
}
Logger.debug(
"Trade event received " <>
"#{trade_event.symbol}@#{trade_event.price}"
)
Phoenix.PubSub.broadcast(
Core.PubSub,
"TRADE_EVENTS:#{state.symbol}",
trade_event
)
end
defp via_tuple(symbol) do
{:via, Registry, {:binance_workers, symbol}}
end
end
| 22.723077 | 82 | 0.593094 |
9ec25e229de280e94307002644bb0f44e48b282a | 7,553 | ex | Elixir | lib/nets_easy/model/create_payment_request.ex | hooplab/nets-easy-elixir | 5d60a31ef36bb1518d7a3768851d09b258f62411 | [
"MIT"
] | null | null | null | lib/nets_easy/model/create_payment_request.ex | hooplab/nets-easy-elixir | 5d60a31ef36bb1518d7a3768851d09b258f62411 | [
"MIT"
] | null | null | null | lib/nets_easy/model/create_payment_request.ex | hooplab/nets-easy-elixir | 5d60a31ef36bb1518d7a3768851d09b258f62411 | [
"MIT"
] | null | null | null | defmodule NetsEasy.Model.CreatePaymentRequest do
@moduledoc """
"""
@typedoc """
Root request for creating a payment request.
See https: //tech.dibspayment.com/easy/api/paymentapi
"""
@type t :: %__MODULE__{
order: Order.t(),
merchant_number: Integer.t() | nil,
checkout: Checkout.t(),
notifications: Notifications.t() | nil,
payment_methods: [PaymentMethod.t()] | nil
}
@derive Poison.Encoder
defstruct [
:order,
:merchant_number,
:checkout,
:notifications,
:payment_methods
]
defmodule Order do
@moduledoc """
"""
alias NetsEasy.Model
@typedoc """
.order
"""
@type t :: %__MODULE__{
items: [Model.OrderItem.t()],
amount: Integer.t(),
currency: Currency.t(),
reference: String.t()
}
@derive Poison.Encoder
defstruct [
:items,
:amount,
:currency,
:reference
]
end
defmodule Checkout do
@moduledoc """
"""
@typedoc """
.checkout
"""
@type t :: %__MODULE__{
charge: boolean() | nil,
public_device: boolean() | nil,
integration_type: integration_type() | nil,
url: String.t() | nil,
return_url: String.t() | nil,
terms_url: String.t(),
appearance: Appearance.t() | nil,
merchant_handles_consumer_data: boolean() | nil,
consumer: Consumer.t() | nil,
consumer_type: ConsumerType.t(),
merchant_handles_shipping_cost: boolean(),
shipping: Shipping.t() | nil
}
@derive Poison.Encoder
defstruct [
:charge,
:public_device,
:integration_type,
:url,
:return_url,
:terms_url,
:appearance,
:merchant_handles_consumer_data,
:consumer,
:consumer_type,
:merchant_handles_shipping_cost,
:shipping
]
# is this correct? seems weird
@type integration_type :: :EmbeddedCheckout | :hostedPaymentPage
defmodule Appearance do
@moduledoc """
"""
@typedoc """
.checkout.appearance
"""
@type t :: %__MODULE__{
display_options: DisplayOptions.t() | nil,
text_options: TextOptions.t() | nil
}
@derive Poison.Encoder
defstruct [
:display_options,
:text_options
]
defmodule DisplayOptions do
@moduledoc """
"""
@typedoc """
.checkout.appearance.display_options
"""
@type t :: %__MODULE__{
show_merchant_name: boolean() | nil,
show_order_summary: boolean() | nil
}
@derive Poison.Encoder
defstruct [
:show_merchant_name,
:show_order_summary
]
end
defmodule TextOptions do
@moduledoc """
"""
@typedoc """
.checkout.appearance.text_options
"""
@type t :: %__MODULE__{
complete_payment_button_text: String.t()
}
@derive Poison.Encoder
defstruct [
:complete_payment_button_text
]
end
end
defmodule Consumer do
@moduledoc """
"""
@typedoc """
.checkout.consumer
"""
@type t :: %__MODULE__{
reference: String.t(),
email: String.t(),
shipping_address: Model.ShippingAddress.t(),
phone_number: Model.PhoneNumber.t(),
private_person: PrivatePerson.t() | nil,
company: Company.t() | nil
}
@derive Poison.Encoder
defstruct [
:reference,
:email,
:shipping_address,
:phone_number,
:private_person,
:company
]
defmodule PrivatePerson do
@moduledoc """
"""
@typedoc """
.checkout.consumer.private_person
"""
@type t :: %__MODULE__{
first_name: String.t(),
last_name: String.t()
}
@derive Poison.Encoder
defstruct [
:first_name,
:last_name
]
end
defmodule Company do
@moduledoc """
"""
@typedoc """
.checkout.consumer.company
"""
@type t :: %__MODULE__{
name: String.t(),
contact: Contact.t()
}
@derive Poison.Encoder
defstruct [
:name,
:contact
]
defmodule Contact do
@moduledoc """
"""
@typedoc """
.checkout.consumer.company.contact
"""
@type t :: %__MODULE__{
first_name: String.t(),
last_name: String.t()
}
@derive Poison.Encoder
defstruct [
:first_name,
:last_name
]
end
end
end
defmodule ConsumerType do
@moduledoc """
"""
@typedoc """
.checkout.consumer_type
"""
@type t :: %__MODULE__{
default: consumer_type,
supported_types: [consumer_type]
}
@type consumer_type :: :B2C | :B2B
@derive Poison.Encoder
defstruct [
:default,
:supported_types
]
end
defmodule Shipping do
@moduledoc """
"""
@typedoc """
.checkout.shipping
"""
@type t :: %__MODULE__{
countries: [CountryCode.t()]
}
@derive Poison.Encoder
defstruct [
:countries
]
defmodule CountryCode do
# TODO: add more here
@type t :: :NO | :SWE | :DK
end
end
end
defmodule Notifications do
@moduledoc """
"""
@typedoc """
.notifications
"""
@type t :: %__MODULE__{
web_hooks: [WebHook.t()]
}
@derive Poison.Encoder
defstruct [
:web_hooks
]
defmodule WebHook do
@type t :: %__MODULE__{
event_name: String.t(),
url: String.t(),
authorization: String.t()
}
@derive Poison.Encoder
defstruct [
:event_name,
:url,
:authorization
]
end
end
defmodule PaymentMethod do
@moduledoc """
Used for setting fees for invoice payments, only works for invoice payments.
I don't think name can be anything other than 'easyinvoice'
"""
@typedoc """
.payment_method
"""
@type t :: %__MODULE__{
name: String.t(),
fee: Fee.t()
}
@derive Poison.Decoder
defstruct [
:name,
:fee
]
defmodule Fee do
@moduledoc """
"""
@typedoc """
.payment_method.fee
"""
@type t :: %__MODULE__{
reference: String.t(),
name: String.t(),
quantity: Integer.t(),
unit: String.t(),
unit_price: Integer.t(),
tax_rate: Integer.t(),
tax_amount: Integer.t(),
gross_total_amount: Integer.t(),
net_total_amount: Integer.t()
}
@derive Poison.Decoder
defstruct [
:reference,
:name,
:quantity,
:unit,
:unit_price,
:tax_rate,
:tax_amount,
:gross_total_amount,
:net_total_amount
]
end
end
end
| 21.704023 | 80 | 0.493314 |
9ec29b6d602da6e2327f357f7ec8b261b6989d49 | 2,902 | ex | Elixir | lib/exduckdb/pragma.ex | mpope9/exduckdb | 2295022d8b2d8bb902feaf63d1a06d22b386c630 | [
"MIT"
] | 2 | 2021-12-04T08:38:13.000Z | 2021-12-17T02:25:59.000Z | lib/exduckdb/pragma.ex | mpope9/exduckdb | 2295022d8b2d8bb902feaf63d1a06d22b386c630 | [
"MIT"
] | null | null | null | lib/exduckdb/pragma.ex | mpope9/exduckdb | 2295022d8b2d8bb902feaf63d1a06d22b386c630 | [
"MIT"
] | null | null | null | defmodule Exduckdb.Pragma do
@moduledoc """
Handles parsing extra options for the SQLite connection
"""
def busy_timeout(nil), do: busy_timeout([])
def busy_timeout(options) do
Keyword.get(options, :busy_timeout, 2000)
end
def journal_mode(nil), do: journal_mode([])
def journal_mode(options) do
case Keyword.get(options, :journal_mode, :delete) do
:delete -> "delete"
:memory -> "memory"
:off -> "off"
:persist -> "persist"
:truncate -> "truncate"
:wal -> "wal"
_ -> raise ArgumentError, "invalid :journal_mode"
end
end
def temp_store(nil), do: temp_store([])
def temp_store(options) do
case Keyword.get(options, :temp_store, :default) do
:file -> 1
:memory -> 2
:default -> 0
_ -> raise ArgumentError, "invalid :temp_store"
end
end
def synchronous(nil), do: synchronous([])
def synchronous(options) do
case Keyword.get(options, :synchronous, :normal) do
:extra -> 3
:full -> 2
:normal -> 1
:off -> 0
_ -> raise ArgumentError, "invalid :synchronous"
end
end
def foreign_keys(nil), do: foreign_keys([])
def foreign_keys(options) do
case Keyword.get(options, :foreign_keys, :on) do
:off -> 0
:on -> 1
_ -> raise ArgumentError, "invalid :foreign_keys"
end
end
def cache_size(nil), do: cache_size([])
def cache_size(options) do
Keyword.get(options, :cache_size, -2000)
end
def cache_spill(nil), do: cache_spill([])
def cache_spill(options) do
case Keyword.get(options, :cache_spill, :on) do
:off -> 0
:on -> 1
_ -> raise ArgumentError, "invalid :cache_spill"
end
end
def case_sensitive_like(nil), do: case_sensitive_like([])
def case_sensitive_like(options) do
case Keyword.get(options, :case_sensitive_like, :off) do
:off -> 0
:on -> 1
_ -> raise ArgumentError, "invalid :case_sensitive_like"
end
end
def auto_vacuum(nil), do: auto_vacuum([])
def auto_vacuum(options) do
case Keyword.get(options, :auto_vacuum, :none) do
:none -> 0
:full -> 1
:incremental -> 2
_ -> raise ArgumentError, "invalid :auto_vacuum"
end
end
def locking_mode(nil), do: locking_mode([])
def locking_mode(options) do
case Keyword.get(options, :locking_mode, :normal) do
:normal -> "NORMAL"
:exclusive -> "EXCLUSIVE"
_ -> raise ArgumentError, "invalid :locking_mode"
end
end
def secure_delete(nil), do: secure_delete([])
def secure_delete(options) do
case Keyword.get(options, :secure_delete, :off) do
:off -> 0
:on -> 1
_ -> raise ArgumentError, "invalid :secure_delete"
end
end
def wal_auto_check_point(nil), do: wal_auto_check_point([])
def wal_auto_check_point(options) do
Keyword.get(options, :wal_auto_check_point, 1000)
end
end
| 23.786885 | 62 | 0.634735 |
9ec2b30f76ab9e93bea71b6db6fb4313dd3517e7 | 533 | ex | Elixir | eyes/lib/application.ex | mashbytes/sentry | d4b13419694d0e30199af6ff0f8a2b68ef54fefa | [
"MIT"
] | null | null | null | eyes/lib/application.ex | mashbytes/sentry | d4b13419694d0e30199af6ff0f8a2b68ef54fefa | [
"MIT"
] | 2 | 2021-03-09T20:59:23.000Z | 2021-05-10T18:01:00.000Z | eyes/lib/application.ex | mashbytes/sentry | d4b13419694d0e30199af6ff0f8a2b68ef54fefa | [
"MIT"
] | null | null | null | defmodule Eyes.Application do
@moduledoc false
use Application
def start(_type, _args) do
import Supervisor.Spec, warn: false
camera = Application.get_env(:picam, :camera, Picam.Camera)
port = Application.get_env(:picam, :http_port, 4001)
IO.puts("Camera #{camera}")
children = [
worker(camera, []),
{Plug.Cowboy, scheme: :http, plug: Eyes.Router, options: [port: port]}
]
opts = [strategy: :one_for_one, name: Eyes.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 22.208333 | 76 | 0.666041 |
9ec2bf885d51f3b71263f63d27c82027c06d5814 | 2,813 | ex | Elixir | clients/big_query/lib/google_api/big_query/v2/model/table_data_insert_all_request.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/big_query/lib/google_api/big_query/v2/model/table_data_insert_all_request.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/big_query/lib/google_api/big_query/v2/model/table_data_insert_all_request.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.BigQuery.V2.Model.TableDataInsertAllRequest do
@moduledoc """
## Attributes
- ignoreUnknownValues (boolean()): [Optional] Accept rows that contain values that do not match the schema. The unknown values are ignored. Default is false, which treats unknown values as errors. Defaults to: `null`.
- kind (String.t): The resource type of the response. Defaults to: `null`.
- rows ([TableDataInsertAllRequestRows]): The rows to insert. Defaults to: `null`.
- skipInvalidRows (boolean()): [Optional] Insert all valid rows of a request, even if invalid rows exist. The default value is false, which causes the entire request to fail if any invalid rows exist. Defaults to: `null`.
- templateSuffix (String.t): [Experimental] If specified, treats the destination table as a base template, and inserts the rows into an instance table named \"{destination}{templateSuffix}\". BigQuery will manage creation of the instance table, using the schema of the base template table. See https://cloud.google.com/bigquery/streaming-data-into-bigquery#template-tables for considerations when working with templates tables. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:ignoreUnknownValues => any(),
:kind => any(),
:rows => list(GoogleApi.BigQuery.V2.Model.TableDataInsertAllRequestRows.t()),
:skipInvalidRows => any(),
:templateSuffix => any()
}
field(:ignoreUnknownValues)
field(:kind)
field(:rows, as: GoogleApi.BigQuery.V2.Model.TableDataInsertAllRequestRows, type: :list)
field(:skipInvalidRows)
field(:templateSuffix)
end
defimpl Poison.Decoder, for: GoogleApi.BigQuery.V2.Model.TableDataInsertAllRequest do
def decode(value, options) do
GoogleApi.BigQuery.V2.Model.TableDataInsertAllRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.BigQuery.V2.Model.TableDataInsertAllRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 46.883333 | 460 | 0.748667 |
9ec2c23b0872b479efba38e678fdbc94eaf41517 | 636 | exs | Elixir | test/codec_pcma_test.exs | xirsys/xmedialib | 8e59b7691ffae93ea62fd9f037bf380c7f676ba0 | [
"Apache-2.0"
] | 7 | 2018-12-10T13:50:05.000Z | 2020-10-20T14:16:42.000Z | test/codec_pcma_test.exs | xirsys/xmedialib | 8e59b7691ffae93ea62fd9f037bf380c7f676ba0 | [
"Apache-2.0"
] | null | null | null | test/codec_pcma_test.exs | xirsys/xmedialib | 8e59b7691ffae93ea62fd9f037bf380c7f676ba0 | [
"Apache-2.0"
] | 4 | 2019-01-09T17:46:44.000Z | 2020-02-02T12:00:04.000Z | defmodule XMediaLib.CodecPcmaTest do
use ExUnit.Case
alias XMediaLib.TestUtils
test "decoding from G.711a to PCM" do
assert TestUtils.codec_decode(
"test/samples/pcma/raw-alaw.raw",
"test/samples/pcma/raw-pcm16.from_alaw",
160,
"G.711a / PCMA",
{'PCMA', 8000, 1}
)
end
test "encoding from PCM to G.711a" do
assert TestUtils.codec_encode(
"test/samples/pcma/raw-pcm16.raw",
"test/samples/pcma/raw-alaw.from_pcm",
320,
"G.711a / PCMA",
{'PCMA', 8000, 1}
)
end
end
| 25.44 | 53 | 0.539308 |
9ec2c2ce2a8347be5eadf7c1003be22861fda8c0 | 805 | exs | Elixir | test/credo/check/refactor/long_quote_blocks_test.exs | elixir-twister/credo | 619e0ec6e244c5e0c12eeeb58ed9de97e1571d99 | [
"MIT"
] | null | null | null | test/credo/check/refactor/long_quote_blocks_test.exs | elixir-twister/credo | 619e0ec6e244c5e0c12eeeb58ed9de97e1571d99 | [
"MIT"
] | null | null | null | test/credo/check/refactor/long_quote_blocks_test.exs | elixir-twister/credo | 619e0ec6e244c5e0c12eeeb58ed9de97e1571d99 | [
"MIT"
] | null | null | null | defmodule Credo.Check.Refactor.LongQuoteBlocksTest do
use Credo.TestHelper
@described_check Credo.Check.Refactor.LongQuoteBlocks
#
# cases NOT raising issues
#
test "it should NOT report expected code" do
"""
defmodule CredoSampleModule do
defmacro __using__(opts) do
quote do
def some_fun do
some_stuff()
end
end
end
end
""" |> to_source_file
|> refute_issues(@described_check)
end
#
# cases raising issues
#
test "it should report a violation" do
"""
defmodule CredoSampleModule do
defmacro __using__(opts) do
quote do
def some_fun do
some_stuff()
end
def some_fun do
some_stuff()
end
end
end
end
""" |> to_source_file
|> assert_issue(@described_check, max_line_count: 2)
end
end
| 16.428571 | 56 | 0.667081 |
9ec2f20100cfee190b009508d1dd9d876289fb2a | 1,020 | ex | Elixir | lib/ex_twilio/resources/feedback.ex | antoinereyt/ex_twilio | 00276cb855574f2dfd89f72f260a1d56136d48aa | [
"MIT"
] | null | null | null | lib/ex_twilio/resources/feedback.ex | antoinereyt/ex_twilio | 00276cb855574f2dfd89f72f260a1d56136d48aa | [
"MIT"
] | null | null | null | lib/ex_twilio/resources/feedback.ex | antoinereyt/ex_twilio | 00276cb855574f2dfd89f72f260a1d56136d48aa | [
"MIT"
] | 2 | 2022-03-08T22:05:17.000Z | 2022-03-09T05:29:46.000Z | defmodule ExTwilio.Feedback do
@moduledoc """
Represents a Call Feedback resource in the Twilio API.
- [Twilio docs](https://www.twilio.com/docs/voice/api/feedback-resource)
## Examples
Since Call Feedback is a nested resource in the Twilio API, you must
pass in a parent Call SID to all functions in this module.
ExTwilio.Feedback.create([quality_score: 5], [call: "call_sid"])
ExTwilio.Feedback.find(call: "call_sid")
"""
defstruct quality_score: nil,
issues: nil
use ExTwilio.Resource, import: [:create]
@doc """
Find feedback for a given call. Any options other than `[call: "sid"]` will
result in a `FunctionClauseError`.
## Examples
ExTwilio.Feedback.find(call: "sid")
%ExTwilio.Feedback{issues: [], quality_score: 5}
"""
@spec find(call: String.t()) :: Parser.success() | Parser.error()
def find(call: sid) do
Api.find(__MODULE__, nil, call: sid)
end
def parents, do: [:account, :call]
def resource_name, do: "Feedback"
end
| 26.153846 | 77 | 0.67451 |
9ec30dadd25aa2cbe7f99c4e941807bdce19f4cf | 922 | exs | Elixir | farmbot_core/test/logger_test.exs | elbow-jason/farmbot_os | f5dfc8f58a309285ca3d441b1b7272f15315b2a9 | [
"MIT"
] | 1 | 2019-08-06T11:51:48.000Z | 2019-08-06T11:51:48.000Z | farmbot_core/test/logger_test.exs | SeppPenner/farmbot_os | 39ba5c5880f8aef71792e2c009514bed1177089c | [
"MIT"
] | null | null | null | farmbot_core/test/logger_test.exs | SeppPenner/farmbot_os | 39ba5c5880f8aef71792e2c009514bed1177089c | [
"MIT"
] | null | null | null | defmodule FarmbotCore.LoggerTest do
use ExUnit.Case
require FarmbotCore.Logger
test "allows handling a log more than once by re-inserting it." do
log = FarmbotCore.Logger.debug(1, "Test log ABC")
# Handling a log should delete it from the store.
assert Enum.find(
FarmbotCore.Logger.handle_all_logs(),
&Kernel.==(Map.fetch!(&1, :id), log.id)
)
# Thus, handling all logs again should mean the log
# isn't there any more
refute Enum.find(
FarmbotCore.Logger.handle_all_logs(),
&Kernel.==(Map.fetch!(&1, :id), log.id)
)
# insert the log again
assert FarmbotCore.Logger.insert_log!(Map.from_struct(log))
# Make sure the log is available for handling again.
assert Enum.find(
FarmbotCore.Logger.handle_all_logs(),
&Kernel.==(Map.fetch!(&1, :id), log.id)
)
end
end
| 30.733333 | 68 | 0.616052 |
9ec3272b02c31a77e7a60c95df567a731ffcca16 | 4,899 | exs | Elixir | test/image_test.exs | lgandersen/jocker | 99a2c6506416b46cfde0debc617f4e8814b1b3fb | [
"BSD-2-Clause"
] | 3 | 2020-07-14T13:44:30.000Z | 2022-03-14T21:06:00.000Z | test/image_test.exs | lgandersen/jocker | 99a2c6506416b46cfde0debc617f4e8814b1b3fb | [
"BSD-2-Clause"
] | null | null | null | test/image_test.exs | lgandersen/jocker | 99a2c6506416b46cfde0debc617f4e8814b1b3fb | [
"BSD-2-Clause"
] | null | null | null | defmodule ImageTest do
use ExUnit.Case
alias Jocker.Engine.{Config, Image, MetaData, Layer}
@moduletag :capture_log
@tmp_dockerfile "tmp_dockerfile"
@tmp_context "./"
setup do
on_exit(fn ->
MetaData.list_images() |> Enum.map(fn %Image{id: id} -> Image.destroy(id) end)
end)
:ok
end
test "create an image with a 'RUN' instruction" do
dockerfile = """
FROM scratch
RUN echo "lol1" > /root/test_1.txt
"""
TestHelper.create_tmp_dockerfile(dockerfile, @tmp_dockerfile)
%Image{layer_id: layer_id} =
TestHelper.build_and_return_image(@tmp_context, @tmp_dockerfile, "test:latest")
%Layer{mountpoint: mountpoint} = Jocker.Engine.MetaData.get_layer(layer_id)
assert File.read(Path.join(mountpoint, "/root/test_1.txt")) == {:ok, "lol1\n"}
assert MetaData.list_containers() == []
end
test "create an image with a 'COPY' instruction" do
dockerfile = """
FROM scratch
COPY test.txt /root/
"""
context = create_test_context("test_copy_instruction")
TestHelper.create_tmp_dockerfile(dockerfile, @tmp_dockerfile, context)
%Image{layer_id: layer_id} =
TestHelper.build_and_return_image(context, @tmp_dockerfile, "test:latest")
%Layer{mountpoint: mountpoint} = Jocker.Engine.MetaData.get_layer(layer_id)
assert File.read(Path.join(mountpoint, "root/test.txt")) == {:ok, "lol\n"}
assert [] == MetaData.list_containers()
end
test "create an image with a 'COPY' instruction using symlinks" do
dockerfile = """
FROM scratch
RUN mkdir /etc/testdir
RUN ln -s /etc/testdir /etc/symbolic_testdir
COPY test.txt /etc/symbolic_testdir/
"""
context = create_test_context("test_copy_instruction_symbolic")
TestHelper.create_tmp_dockerfile(dockerfile, @tmp_dockerfile, context)
%Image{layer_id: layer_id} =
TestHelper.build_and_return_image(context, @tmp_dockerfile, "test:latest")
%Layer{mountpoint: mountpoint} = Jocker.Engine.MetaData.get_layer(layer_id)
# we cannot check the symbolic link from the host:
assert File.read(Path.join(mountpoint, "etc/testdir/test.txt")) == {:ok, "lol\n"}
end
test "create an image with a 'CMD' instruction" do
dockerfile = """
FROM scratch
CMD /bin/sleep 10
"""
TestHelper.create_tmp_dockerfile(dockerfile, @tmp_dockerfile)
_image = TestHelper.build_and_return_image(@tmp_context, @tmp_dockerfile, "test:latest")
assert MetaData.list_containers() == []
end
test "create an image with 'ENV' instructions" do
dockerfile = """
FROM scratch
ENV TEST=lol
ENV TEST2="lool test"
CMD /bin/ls
"""
TestHelper.create_tmp_dockerfile(dockerfile, @tmp_dockerfile)
image = TestHelper.build_and_return_image(@tmp_context, @tmp_dockerfile, "test:latest")
assert image.env_vars == ["TEST2=lool test", "TEST=lol"]
end
test "create an image using three RUN/COPY instructions" do
dockerfile = """
FROM scratch
COPY test.txt /root/
RUN echo 'lol1' > /root/test_1.txt
RUN echo 'lol2' > /root/test_2.txt
"""
context = create_test_context("test_image_builder_three_layers")
TestHelper.create_tmp_dockerfile(dockerfile, @tmp_dockerfile, context)
%Image{layer_id: layer_id} =
TestHelper.build_and_return_image(context, @tmp_dockerfile, "test:latest")
%Layer{mountpoint: mountpoint} = Jocker.Engine.MetaData.get_layer(layer_id)
assert File.read(Path.join(mountpoint, "root/test.txt")) == {:ok, "lol\n"}
assert File.read(Path.join(mountpoint, "root/test_1.txt")) == {:ok, "lol1\n"}
assert File.read(Path.join(mountpoint, "root/test_2.txt")) == {:ok, "lol2\n"}
assert MetaData.list_containers() == []
end
test "receiving of status messages during build" do
dockerfile = """
FROM scratch
COPY test.txt /root/
RUN echo \
"this should be relayed back to the parent process"
USER ntpd
CMD /etc/rc
"""
context = create_test_context("test_image_builder_three_layers")
TestHelper.create_tmp_dockerfile(dockerfile, @tmp_dockerfile, context)
{:ok, pid} = Image.build(context, @tmp_dockerfile, "test:latest", false)
{_img, messages} = TestHelper.receive_imagebuilder_results(pid, [])
assert messages == [
"Step 1/5 : FROM scratch\n",
"Step 2/5 : COPY test.txt /root/\n",
"Step 3/5 : RUN echo \"this should be relayed back to the parent process\"\n",
"this should be relayed back to the parent process\n",
"Step 4/5 : USER ntpd\n",
"Step 5/5 : CMD /etc/rc\n"
]
end
defp create_test_context(name) do
dataset = Path.join(Config.get("zroot"), name)
mountpoint = Path.join("/", dataset)
Jocker.Engine.ZFS.create(dataset)
{"", 0} = System.cmd("sh", ["-c", "echo 'lol' > #{mountpoint}/test.txt"])
mountpoint
end
end
| 33.101351 | 93 | 0.674423 |
9ec33c04a9a0c4fbee8b582585e4198b8d216e4e | 1,540 | exs | Elixir | lib/mix/test/mix/task_test.exs | Nicd/elixir | e62ef92a4be1b562033d35b2d822cc9d6c661077 | [
"Apache-2.0"
] | 4 | 2016-04-05T05:51:36.000Z | 2019-10-31T06:46:35.000Z | lib/mix/test/mix/task_test.exs | Nicd/elixir | e62ef92a4be1b562033d35b2d822cc9d6c661077 | [
"Apache-2.0"
] | null | null | null | lib/mix/test/mix/task_test.exs | Nicd/elixir | e62ef92a4be1b562033d35b2d822cc9d6c661077 | [
"Apache-2.0"
] | 5 | 2015-02-01T06:01:19.000Z | 2019-08-29T09:02:35.000Z | Code.require_file "../test_helper.exs", __DIR__
defmodule Mix.TaskTest do
use MixTest.Case
test :run do
assert Mix.Task.run("hello") == "Hello, World!"
assert Mix.Task.run("hello") == :noop
assert_raise Mix.NoTaskError, "The task unknown could not be found", fn ->
Mix.Task.run("unknown")
end
assert_raise Mix.InvalidTaskError, "The task invalid does not respond to run/1", fn ->
Mix.Task.run("invalid")
end
end
test :clear do
Mix.Task.run("hello")
assert match?([ {"hello", _} ], Mix.Task.clear)
end
test :reenable do
assert Mix.Task.run("hello") == "Hello, World!"
Mix.Task.reenable("hello")
assert Mix.Task.run("hello") == "Hello, World!"
end
test :get do
assert Mix.Task.get("hello") == Mix.Tasks.Hello
assert_raise Mix.NoTaskError, "The task unknown could not be found", fn ->
Mix.Task.get("unknown")
end
assert_raise Mix.InvalidTaskError, "The task invalid does not respond to run/1", fn ->
Mix.Task.get("invalid")
end
end
test :all_modules do
Mix.Task.load_all
modules = Mix.Task.all_modules
assert Mix.Tasks.Hello in modules
assert Mix.Tasks.Compile in modules
end
test :moduledoc do
assert Mix.Task.moduledoc(Mix.Tasks.Hello) == "A test task.\n"
end
test :shortdoc do
assert Mix.Task.shortdoc(Mix.Tasks.Hello) == "This is short documentation, see"
end
test :hidden do
assert Mix.Task.hidden?(Mix.Tasks.Loadpaths)
refute Mix.Task.hidden?(Mix.Tasks.Compile)
end
end
| 24.83871 | 90 | 0.661688 |
9ec362b0cbdfeb31f1227c40bdc89df97f65b121 | 9,575 | ex | Elixir | lib/ecto_materialized_path.ex | mayel/ecto_materialized_path | 117ebd0693ec949a1e12fa75481611596d0f5d3d | [
"Apache-2.0"
] | null | null | null | lib/ecto_materialized_path.ex | mayel/ecto_materialized_path | 117ebd0693ec949a1e12fa75481611596d0f5d3d | [
"Apache-2.0"
] | null | null | null | lib/ecto_materialized_path.ex | mayel/ecto_materialized_path | 117ebd0693ec949a1e12fa75481611596d0f5d3d | [
"Apache-2.0"
] | null | null | null | defmodule EctoMaterializedPath do
defmacro __using__(opts) do
column_name = Keyword.get(opts, :column_name, "path")
namespace = Keyword.get(opts, :namespace, nil)
method_namespace = if is_nil(namespace), do: nil, else: "#{namespace}_"
quote bind_quoted: [
column_name: column_name,
method_namespace: method_namespace,
] do
~w(
parent
parent_id
root
root?
root_id
ancestors
ancestor_ids
path_ids
path
depth
) |> Enum.each(fn(function_name) ->
def unquote(:"#{method_namespace}#{function_name}")(schema = %{ __struct__: __MODULE__ }) do
path = Map.get(schema, unquote(:"#{column_name}"), [])
apply(EctoMaterializedPath, unquote(:"#{function_name}"), [schema, path])
end
end)
def unquote(:"#{method_namespace}children")(schema = %{ __struct__: __MODULE__ }) do
EctoMaterializedPath.children(schema, unquote(:"#{column_name}"))
end
def unquote(:"#{method_namespace}siblings")(schema = %{ __struct__: __MODULE__ }) do
EctoMaterializedPath.siblings(schema, unquote(:"#{column_name}"))
end
def unquote(:"#{method_namespace}descendants")(schema = %{ __struct__: __MODULE__ }) do
EctoMaterializedPath.descendants(schema, unquote(:"#{column_name}"))
end
def unquote(:"#{method_namespace}subtree")(schema = %{ __struct__: __MODULE__ }) do
EctoMaterializedPath.subtree(schema, unquote(:"#{column_name}"))
end
def unquote(:"#{method_namespace}build_child")(schema = %{ __struct__: __MODULE__ }) do
EctoMaterializedPath.build_child(schema, unquote(:"#{column_name}"))
end
def unquote(:"#{method_namespace}make_child_of")(changeset = %Ecto.Changeset{ data: %{ __struct__: __MODULE__ } }, parent = %{ __struct__: __MODULE }) do
EctoMaterializedPath.make_child_of(changeset, parent, unquote(:"#{column_name}"))
end
def unquote(:"#{method_namespace}make_child_of")(schema = %{ __struct__: __MODULE__ }, parent = %{ __struct__: __MODULE__ }) do
EctoMaterializedPath.make_child_of(Ecto.Changeset.change(schema, %{}), parent, unquote(:"#{column_name}"))
end
def unquote(:"#{method_namespace}where_depth")(query = %Ecto.Query{ from: { _, __MODULE__ } }, depth_params) do
EctoMaterializedPath.where_depth(query, depth_params, unquote(:"#{column_name}"))
end
def unquote(:"#{method_namespace}where_depth")(query = %Ecto.Query{}, depth_params) do
EctoMaterializedPath.where_depth(query, depth_params, unquote(:"#{column_name}"))
end
def unquote(:"#{method_namespace}where_depth")(schema = %{ __struct__: __MODULE__ }, depth_params) do
EctoMaterializedPath.where_depth(schema, depth_params, unquote(:"#{column_name}"))
end
def unquote(:"#{method_namespace}arrange")(structs_list) when is_list(structs_list), do: EctoMaterializedPath.arrange(structs_list, unquote(:"#{column_name}"))
end
end
require Ecto.Query
require Logger
def parent(schema = %{ __struct__: struct, }, path) do
parent_id = parent_id(schema, path)
Ecto.Query.from(q in struct, where: q.id in ^parent_id, limit: 1)
end
def parent_id(_, path), do: List.last(path)
def root(schema = %{ __struct__: struct }, path) when is_list(path) do
root_id = root_id(schema, path)
Ecto.Query.from(q in struct, where: q.id in ^root_id, limit: 1)
end
def root_id(%{ id: id }, []) when is_integer(id) or is_binary(id), do: id
def root_id(_, path) when is_list(path), do: path |> List.first()
def root?(%{ id: id }, []) when is_integer(id) or is_binary(id), do: true
def root?(_, path) when is_list(path), do: false
def ancestors(schema = %{ __struct__: struct }, path) when is_list(path) do
Ecto.Query.from(q in struct, where: q.id in ^ancestor_ids(schema, path))
end
def ancestor_ids(_, path) when is_list(path), do: path
def path_ids(struct = %{ id: id }, path), do: ancestor_ids(struct, path) ++ [id]
def path(struct = %{ __struct__: module }, path) do
path_ids = path_ids(struct, path)
Ecto.Query.from(q in module, where: q.id in ^path_ids)
end
def children(schema = %{ __struct__: module, id: id }, column_name) do
path = Map.get(schema, column_name, []) ++ [id]
Ecto.Query.from(q in module, where: fragment("(?) = ?", field(q, ^column_name), ^path))
end
def siblings(schema = %{ __struct__: module }, column_name) do
path = Map.get(schema, column_name, [])
Ecto.Query.from(q in module, where: fragment("? = ?", field(q, ^column_name), ^path))
end
def descendants(schema = %{ __struct__: module, id: id }, column_name) do
path = Map.get(schema, column_name, []) ++ [id]
Ecto.Query.from(q in module, where: fragment("? @> ?", field(q, ^column_name), ^path))
end
def subtree(schema = %{ __struct__: module, id: id }, column_name) do
path = Map.get(schema, column_name, []) ++ [id]
Ecto.Query.from(q in module, where: fragment("? @> ?", field(q, ^column_name), ^path) or q.id == ^id)
end
def depth(_, path) when is_list(path), do: length(path)
def where_depth(query = %Ecto.Query{}, depth_options, column_name) when is_list(depth_options) do
do_where_depth(query, depth_options, column_name)
end
def where_depth(module, depth_options, column_name) when is_list(depth_options) do
Ecto.Query.from(q in module)
|> do_where_depth(depth_options, column_name)
end
defp do_where_depth(query, [is_bigger_than: ibt], column_name) when is_integer(ibt) and ibt > 0 do
Ecto.Query.from(q in query, where: fragment("CARDINALITY(?) > ?", field(q, ^column_name), ^ibt))
end
defp do_where_depth(query, [is_bigger_than_or_equal_to: ibtoet], column_name) when is_integer(ibtoet) and ibtoet >= 0 do
Ecto.Query.from(q in query, where: fragment("CARDINALITY(?) >= ?", field(q, ^column_name), ^ibtoet))
end
defp do_where_depth(query, [is_equal_to: iet], column_name) when is_integer(iet) and iet > 0 do
Ecto.Query.from(q in query, where: fragment("CARDINALITY(?) = ?", field(q, ^column_name), ^iet))
end
defp do_where_depth(query, [is_smaller_than_or_equal_to: istoet], column_name) when is_integer(istoet) and istoet >= 0 do
Ecto.Query.from(q in query, where: fragment("CARDINALITY(?) <= ?", field(q, ^column_name), ^istoet))
end
defp do_where_depth(query, [is_smaller_than: ist], column_name) when is_integer(ist) and ist > 0 do
Ecto.Query.from(q in query, where: fragment("CARDINALITY(?) < ?", field(q, ^column_name), ^ist))
end
defp do_where_depth(_, _, _) do
raise ArgumentError, "invalid arguments"
end
def build_child(schema = %{ __struct__: struct, id: id }, column_name) when (is_integer(id) or is_binary(id)) and is_atom(column_name) do
new_path = Map.get(schema, column_name, []) ++ [id]
%{ __struct__: struct } |> Map.put(column_name, new_path)
end
def make_child_of(changeset, parent = %{ id: id }, column_name) do
new_path = Map.get(parent, column_name, []) ++ [id]
changeset |> Ecto.Changeset.change(%{ :"#{column_name}" => new_path })
end
def arrange([], _), do: []
def arrange(nodes_list, column_name) do
nodes_depth_map = nodes_list |> nodes_by_depth_map(%{}, column_name)
initial_depth_level = nodes_depth_map |> Map.keys() |> Enum.min()
initial_list = Map.get(nodes_depth_map, initial_depth_level)
initial_nodes_depth_map = Map.delete(nodes_depth_map, initial_depth_level)
{ tree, tree_nodes_count } = Enum.reduce(initial_list, { [], length(initial_list) }, &extract_to_resulting_structure(&1, &2, initial_nodes_depth_map, initial_depth_level, column_name))
check_nodes_arrangement_correctness(tree, tree_nodes_count, nodes_list)
tree
end
defp nodes_by_depth_map([], processed_map, _), do: processed_map
defp nodes_by_depth_map([node | tail], before_node_processed_map, column_name) do
path = Map.get(node, column_name, [])
node_depth = depth(node, path)
node_at_depth = Map.get(before_node_processed_map, node_depth, []) ++ [node]
after_node_processed_map = Map.put(before_node_processed_map, node_depth, node_at_depth)
nodes_by_depth_map(tail, after_node_processed_map, column_name)
end
defp extract_to_resulting_structure(node, { list, total_count }, nodes_depth_map, depth_level, column_name) do
next_depth_level = depth_level + 1
{ node_children, node_children_count } = nodes_depth_map
|> Map.get(next_depth_level, [])
|> Enum.filter(fn(possible_children) -> Map.get(possible_children, column_name, []) |> List.last() == node.id end)
|> Enum.reduce({ [], total_count }, &extract_to_resulting_structure(&1, &2, nodes_depth_map, next_depth_level, column_name))
{ list ++ [{ node, node_children }], length(node_children) + node_children_count }
end
defp check_nodes_arrangement_correctness(tree, tree_nodes_count, nodes_list) do
nodes_count = length(nodes_list)
if tree_nodes_count != nodes_count do
nodes_list_ids = nodes_list |> Enum.map(&Map.get(&1, :id))
tree_node_ids = Enum.map(tree, fn(element) -> get_node_ids_from_tree(element) end) |> List.flatten()
missing_node_ids = nodes_list_ids -- tree_node_ids
Logger.error("nodes with ids [#{Enum.join(missing_node_ids, ", ")}] can't be arranged")
end
end
defp get_node_ids_from_tree({ node, [] }), do: [node.id]
defp get_node_ids_from_tree({ node, list }) do
[node.id, Enum.map(list, &get_node_ids_from_tree(&1))]
end
end
| 43.130631 | 188 | 0.683238 |
9ec364af576ebfb67c47fd15f7bbf3d01024d8ca | 4,013 | ex | Elixir | lib/google_certs/certificates.ex | sammkj/google-certs | 6a1661d4673259524ec6f1d537d1b5b97bf8587b | [
"MIT"
] | null | null | null | lib/google_certs/certificates.ex | sammkj/google-certs | 6a1661d4673259524ec6f1d537d1b5b97bf8587b | [
"MIT"
] | 1 | 2021-11-08T04:12:01.000Z | 2021-11-08T04:12:01.000Z | lib/google_certs/certificates.ex | sammkj/google-certs | 6a1661d4673259524ec6f1d537d1b5b97bf8587b | [
"MIT"
] | null | null | null | defmodule GoogleCerts.CertificateDecodeException do
defexception message: "Could not decode certificate"
end
defmodule GoogleCerts.Certificate do
@moduledoc """
Struct to associate a `kid` to a certificate map.
kid is the id and cert can either be a map with a pem or a JWK map
version 1 cert is `%{"pem" => "-----BEGIN CERTIFICATE----- ..."}`
version 3 cert is `%{"kid" => "53c66aab5...". "e" => "AQAB", ...}`
"""
alias GoogleCerts.{Certificate, CertificateDecodeException}
@derive Jason.Encoder
defstruct kid: nil, cert: nil
@type t(kid, cert) :: %Certificate{kid: kid, cert: cert}
@type t :: %Certificate{kid: String.t(), cert: map()}
@spec decode!(map()) :: t | no_return
def decode!(%{"kid" => kid, "cert" => cert}) do
%__MODULE__{kid: kid, cert: cert}
end
def decode!(cert) do
raise CertificateDecodeException,
message: """
Could not decode certificate
Cert must have the following string keys: ["kid", "cert"]
Provided certificate: #{inspect(cert)}
"""
end
end
defmodule GoogleCerts.Certificates do
@moduledoc """
Struct that holds a list of Google.Oauth2.Certificate structs
with their expiration time algorithm and version
"""
alias GoogleCerts.{Certificate, CertificateDecodeException, Certificates}
@derive Jason.Encoder
defstruct certs: [], expire: DateTime.utc_now(), algorithm: "RS256", version: 1
@type t(certs, expire, algorithm, version) :: %__MODULE__{
certs: certs,
expire: expire,
algorithm: algorithm,
version: version
}
@type t :: %__MODULE__{
algorithm: String.t(),
certs: list(Certificate.t()),
expire: DateTime.t(),
version: integer()
}
@spec new :: Certificates.t()
def new, do: %__MODULE__{}
@doc """
Returns true if `expire` is is less than the current UTC time.
"""
@spec expired?(Certificates.t()) :: boolean
def expired?(%__MODULE__{expire: %DateTime{} = expire}) do
DateTime.compare(DateTime.utc_now(), expire) != :lt
end
def expired?(_), do: true
@spec set_expiration(Certificates.t(), DateTime.t()) :: Certificates.t()
def set_expiration(struct = %__MODULE__{}, expiration) do
%__MODULE__{struct | expire: expiration}
end
@spec set_version(Certificates.t(), integer()) :: Certificates.t()
def set_version(struct = %__MODULE__{}, version) do
%__MODULE__{struct | version: version}
end
@spec add_cert(Certificates.t(), String.t(), map) :: Certificates.t()
def add_cert(struct = %__MODULE__{certs: certs, version: 1}, kid, cert) do
%__MODULE__{
struct
| certs: [%Certificate{kid: kid, cert: %{"pem" => cert}} | certs]
}
end
def add_cert(struct = %__MODULE__{certs: certs, version: v}, kid, cert) when v in 2..3 do
%__MODULE__{
struct
| certs: [%Certificate{kid: kid, cert: cert} | certs],
algorithm: Map.get(cert, "alg")
}
end
@doc """
Returns a `GoogleCerts.Certificate` for a given kid that is in `certs`
"""
@spec find(Certificates.t(), String.t()) :: Certificate.t() | nil
def find(%__MODULE__{certs: certs}, kid) do
Enum.find(certs, fn %Certificate{kid: id} -> id == kid end)
end
@doc """
Returns a `GoogleCerts.Certificates` from the provided json or raw elixir map
"""
@spec decode!(String.t() | map) :: Certificates.t() | no_return
def decode!(json) when is_bitstring(json), do: json |> Jason.decode!() |> decode!()
def decode!(%{
"algorithm" => algorithm,
"certs" => certs,
"expire" => expire,
"version" => version
}) do
{:ok, expire, 0} = DateTime.from_iso8601(expire)
%__MODULE__{
certs: Enum.map(certs, &Certificate.decode!/1),
expire: expire,
algorithm: algorithm,
version: version
}
end
def decode!(arg) do
raise CertificateDecodeException,
message: "The provided arg does not conform to the required structure. arg: #{inspect(arg)}"
end
end
| 29.947761 | 98 | 0.637678 |
9ec3ac2f7a82c1117402294692ce4c1c2038beba | 1,819 | exs | Elixir | test/platform_web/plugs/www_redirect_plug_test.exs | lucab85/audioslides.io | cb502ccf6ed0b2db42d9fb20bb4c963bcca3cfa9 | [
"MIT"
] | 17 | 2017-11-14T14:03:18.000Z | 2021-12-10T04:18:48.000Z | test/platform_web/plugs/www_redirect_plug_test.exs | lucab85/audioslides.io | cb502ccf6ed0b2db42d9fb20bb4c963bcca3cfa9 | [
"MIT"
] | 21 | 2017-11-19T13:38:07.000Z | 2022-02-10T00:11:14.000Z | test/platform_web/plugs/www_redirect_plug_test.exs | lucab85/audioslides.io | cb502ccf6ed0b2db42d9fb20bb4c963bcca3cfa9 | [
"MIT"
] | 2 | 2019-09-03T03:32:13.000Z | 2021-02-23T21:52:57.000Z | defmodule PlatformWeb.WwwRedirectPlugTest do
alias PlatformWeb.WwwRedirectPlug
use PlatformWeb.ConnCase
describe "#init" do
test "passes the options unmodified" do
opts = %{test: 123}
assert WwwRedirectPlug.init(opts) == opts
end
end
describe "#call" do
test "redirects does nothing if there's no www at the beginning" do
conn = %{build_conn(:get, "/") | host: "workshops.de"}
response = WwwRedirectPlug.call(conn, nil)
assert List.last(response.resp_headers) == {"cache-control", "max-age=0, private, must-revalidate"}
end
test "test redirect of naked path" do
conn = %{build_conn(:get, "/") | host: "www.workshops.de"}
response = WwwRedirectPlug.call(conn, nil)
assert List.last(response.resp_headers) == {"location", "https://workshops.de/"}
assert response(response, 301)
end
test "test redirect of path" do
conn = %{build_conn(:get, "/some/path") | host: "www.workshops.de"}
response = WwwRedirectPlug.call(conn, nil)
assert List.last(response.resp_headers) == {"location", "https://workshops.de/some/path"}
assert response(response, 301)
end
test "test redirect of path with params" do
conn = %{build_conn(:get, "/some/path?foo=bar") | host: "www.workshops.de"}
response = WwwRedirectPlug.call(conn, nil)
assert List.last(response.resp_headers) == {"location", "https://workshops.de/some/path?foo=bar"}
assert response(response, 301)
end
test "test redirect with ssl" do
conn = %{build_conn(:get, "/some/path") | host: "www.workshops.de"}
response = WwwRedirectPlug.call(conn, nil)
assert List.last(response.resp_headers) == {"location", "https://workshops.de/some/path"}
assert response(response, 301)
end
end
end
| 33.685185 | 105 | 0.658604 |
9ec3c6554d08798599beb334d0afb69a10711a5a | 16,725 | exs | Elixir | lib/elixir/test/elixir/gen_event_test.exs | jbcrail/elixir | f30ef15d9d028a6d0f74d10c2bb320d5f8501bdb | [
"Apache-2.0"
] | 1 | 2015-02-23T00:01:48.000Z | 2015-02-23T00:01:48.000Z | lib/elixir/test/elixir/gen_event_test.exs | jbcrail/elixir | f30ef15d9d028a6d0f74d10c2bb320d5f8501bdb | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/gen_event_test.exs | jbcrail/elixir | f30ef15d9d028a6d0f74d10c2bb320d5f8501bdb | [
"Apache-2.0"
] | null | null | null | Code.require_file "test_helper.exs", __DIR__
defmodule GenEventTest do
use ExUnit.Case
defmodule ReplyHandler do
use GenEvent
def init(:raise) do
raise "oops"
end
def init({:throw, process}) do
{:ok, process}
end
def init({:raise, _}) do
raise "oops"
end
def init({:swap, {:error, :not_found}}) do
{:error, :not_found_on_swap}
end
def init({:swap, parent}) when is_pid(parent) do
send parent, :swapped
{:ok, parent}
end
def init({:custom, return}) do
return
end
def init({parent, :hibernate}) do
{:ok, parent, :hibernate}
end
def init({parent, trap}) when is_pid(parent) and is_boolean(trap) do
Process.flag(:trap_exit, trap)
{:ok, parent}
end
def handle_event(:raise, _parent) do
raise "oops"
end
def handle_event(:hibernate, parent) do
{:ok, parent, :hibernate}
end
def handle_event({:custom, reply}, _parent) do
reply
end
def handle_event(event, parent) do
send parent, {:event, event}
{:ok, parent}
end
def handle_call(:raise, _parent) do
raise "oops"
end
def handle_call(:hibernate, parent) do
{:ok, :ok, parent, :hibernate}
end
def handle_call({:custom, reply}, _parent) do
reply
end
def handle_call(event, parent) do
send parent, {:call, event}
{:ok, :ok, parent}
end
def handle_info(:hibernate, parent) do
{:ok, parent, :hibernate}
end
def handle_info(event, parent) do
send parent, {:info, event}
{:ok, parent}
end
def terminate(:raise, _parent) do
raise "oops"
end
def terminate(:swapped, parent) do
send parent, {:terminate, :swapped}
parent
end
def terminate(arg, parent) do
send parent, {:terminate, arg}
end
end
defmodule DefaultHandler do
use GenEvent
end
defmodule Via do
def register_name(name, pid) do
Process.register(pid, name)
:yes
end
def whereis_name(name) do
Process.whereis(name) || :undefined
end
end
@receive_timeout 1000
test "start/1" do
assert {:ok, pid} = GenEvent.start()
assert GenEvent.which_handlers(pid) == []
assert GenEvent.stop(pid) == :ok
assert {:ok, pid} = GenEvent.start(name: :my_gen_event_name)
assert GenEvent.which_handlers(:my_gen_event_name) == []
assert GenEvent.which_handlers(pid) == []
assert GenEvent.stop(:my_gen_event_name) == :ok
end
test "start_link/1" do
assert {:ok, pid} = GenEvent.start_link()
assert GenEvent.which_handlers(pid) == []
assert GenEvent.stop(pid) == :ok
assert {:ok, pid} = GenEvent.start_link(name: :my_gen_event_name)
assert GenEvent.which_handlers(:my_gen_event_name) == []
assert GenEvent.which_handlers(pid) == []
assert GenEvent.stop(:my_gen_event_name) == :ok
assert {:ok, pid} = GenEvent.start_link(name: {:global, :my_gen_event_name})
assert GenEvent.which_handlers({:global, :my_gen_event_name}) == []
assert GenEvent.which_handlers(pid) == []
assert GenEvent.stop({:global, :my_gen_event_name}) == :ok
assert {:ok, pid} = GenEvent.start_link(name: {:via, Via, :my_gen_event_name})
assert GenEvent.which_handlers({:via, Via, :my_gen_event_name}) == []
assert GenEvent.which_handlers(pid) == []
assert GenEvent.stop({:via, Via, :my_gen_event_name}) == :ok
assert {:ok, pid} = GenEvent.start_link(name: :my_gen_event_name)
assert GenEvent.start_link(name: :my_gen_event_name) ==
{:error, {:already_started, pid}}
end
test "handles exit signals" do
Process.flag(:trap_exit, true)
# Terminates on signal from parent when not trapping exits
{:ok, pid} = GenEvent.start_link()
:ok = GenEvent.add_handler(pid, ReplyHandler, {self(), false})
Process.exit(pid, :shutdown)
assert_receive {:EXIT, ^pid, :shutdown}
refute_received {:terminate, _}
# Terminates on signal from parent when trapping exits
{:ok, pid} = GenEvent.start_link()
:ok = GenEvent.add_handler(pid, ReplyHandler, {self(), true})
Process.exit(pid, :shutdown)
assert_receive {:EXIT, ^pid, :shutdown}
assert_receive {:terminate, :stop}
# Terminates on signal not from parent when not trapping exits
{:ok, pid} = GenEvent.start_link()
:ok = GenEvent.add_handler(pid, ReplyHandler, {self(), false})
spawn fn -> Process.exit(pid, :shutdown) end
assert_receive {:EXIT, ^pid, :shutdown}
refute_received {:terminate, _}
# Does not terminate on signal not from parent when trapping exits
{:ok, pid} = GenEvent.start_link()
:ok = GenEvent.add_handler(pid, ReplyHandler, {self(), true})
terminator = spawn fn -> Process.exit(pid, :shutdown) end
assert_receive {:info, {:EXIT, ^terminator, :shutdown}}
refute_received {:terminate, _}
end
test "hibernates" do
{:ok, pid} = GenEvent.start()
:ok = GenEvent.add_handler(pid, ReplyHandler, {self(), :hibernate})
wait_until fn -> hibernating?(pid) end
wake_up(pid)
refute hibernating?(pid)
:ok = GenEvent.call(pid, ReplyHandler, :hibernate)
wait_until fn -> hibernating?(pid) end
wake_up(pid)
:ok = GenEvent.sync_notify(pid, :hibernate)
wait_until fn -> hibernating?(pid) end
GenEvent.stop(pid)
end
test "add_handler/3" do
{:ok, pid} = GenEvent.start()
assert GenEvent.add_handler(pid, ReplyHandler, {:custom, {:error, :my_error}}) ==
{:error, :my_error}
assert GenEvent.add_handler(pid, ReplyHandler, {:custom, :oops}) ==
{:error, {:bad_return_value, :oops}}
assert {:error, {%RuntimeError{}, _}} =
GenEvent.add_handler(pid, ReplyHandler, :raise)
assert GenEvent.add_handler(pid, ReplyHandler, {:throw, self()}) == :ok
assert GenEvent.which_handlers(pid) == [ReplyHandler]
assert GenEvent.add_handler(pid, ReplyHandler, {:throw, self()}) == {:error, :already_present}
assert GenEvent.add_handler(pid, {ReplyHandler, self()}, {self(), false}) == :ok
assert GenEvent.which_handlers(pid) == [{ReplyHandler, self()}, ReplyHandler]
end
test "add_mon_handler/3" do
{:ok, pid} = GenEvent.start()
parent = self()
{mon_pid, mon_ref} = spawn_monitor(fn ->
assert GenEvent.add_mon_handler(pid, ReplyHandler, {self(), false}) == :ok
send parent, :ok
receive after: (:infinity -> :ok)
end)
assert_receive :ok
assert GenEvent.add_handler(pid, {ReplyHandler, self()}, {self(), false}) == :ok
assert GenEvent.which_handlers(pid) == [{ReplyHandler, self()}, ReplyHandler]
# A regular monitor message is passed forward
send pid, {:DOWN, make_ref(), :process, self(), :oops}
assert_receive {:info, {:DOWN, _, :process, _, :oops}}
# Killing the monitor though is not passed forward
Process.exit(mon_pid, :oops)
assert_receive {:DOWN, ^mon_ref, :process, ^mon_pid, :oops}
refute_received {:info, {:DOWN, _, :process, _, :oops}}
assert GenEvent.which_handlers(pid) == [{ReplyHandler, self()}]
end
test "add_mon_handler/3 with notifications" do
{:ok, pid} = GenEvent.start()
self = self()
GenEvent.add_mon_handler(pid, ReplyHandler, {self(), false})
GenEvent.remove_handler(pid, ReplyHandler, :ok)
assert_receive {:gen_event_EXIT, ReplyHandler, :normal}
:ok = GenEvent.add_mon_handler(pid, ReplyHandler, {self(), false})
:ok = GenEvent.swap_handler(pid, ReplyHandler, :swapped, ReplyHandler, :swap)
assert_receive {:gen_event_EXIT, ReplyHandler, {:swapped, ReplyHandler, nil}}
:ok = GenEvent.swap_mon_handler(pid, ReplyHandler, :swapped, ReplyHandler, :swap)
:ok = GenEvent.swap_mon_handler(pid, ReplyHandler, :swapped, ReplyHandler, :swap)
assert_receive {:gen_event_EXIT, ReplyHandler, {:swapped, ReplyHandler, ^self}}
GenEvent.stop(pid)
assert_receive {:gen_event_EXIT, ReplyHandler, :shutdown}
end
test "remove_handler/3" do
{:ok, pid} = GenEvent.start()
GenEvent.add_mon_handler(pid, ReplyHandler, {self(), false})
assert GenEvent.remove_handler(pid, {ReplyHandler, self()}, :ok) ==
{:error, :not_found}
assert GenEvent.remove_handler(pid, ReplyHandler, :ok) ==
{:terminate, :ok}
assert_receive {:terminate, :ok}
GenEvent.add_mon_handler(pid, {ReplyHandler, self()}, {self(), false})
assert GenEvent.remove_handler(pid, ReplyHandler, :ok) ==
{:error, :not_found}
assert {:error, {%RuntimeError{}, _}} =
GenEvent.remove_handler(pid, {ReplyHandler, self()}, :raise)
assert GenEvent.which_handlers(pid) == []
end
test "swap_handler/5" do
{:ok, pid} = GenEvent.start()
GenEvent.add_handler(pid, ReplyHandler, {self(), false})
assert GenEvent.swap_handler(pid, ReplyHandler, :swapped,
{ReplyHandler, self()}, :swap) == :ok
assert_receive {:terminate, :swapped}
assert_receive :swapped
assert GenEvent.add_handler(pid, ReplyHandler, {self(), false}) == :ok
assert GenEvent.swap_handler(pid, ReplyHandler, :swapped,
{ReplyHandler, self()}, :swap) == {:error, :already_present}
assert GenEvent.which_handlers(pid) == [{ReplyHandler, self()}]
assert GenEvent.remove_handler(pid, {ReplyHandler, self()}, :remove_handler) ==
{:terminate, :remove_handler}
# The handler is initialized even when the module does not exist
# on swap. However, in this case, we are returning an error on init.
assert GenEvent.swap_handler(pid, ReplyHandler, :swapped, ReplyHandler, :swap) ==
{:error, :not_found_on_swap}
end
test "notify/2" do
{:ok, pid} = GenEvent.start()
GenEvent.add_handler(pid, ReplyHandler, {self(), false})
assert GenEvent.notify(pid, :hello) == :ok
assert_receive {:event, :hello}
assert GenEvent.notify(pid, {:custom, :remove_handler}) == :ok
assert_receive {:terminate, :remove_handler}
assert GenEvent.which_handlers(pid) == []
Logger.remove_backend(:console)
GenEvent.add_handler(pid, ReplyHandler, {self(), false})
assert GenEvent.notify(pid, {:custom, :oops}) == :ok
assert_receive {:terminate, {:error, {:bad_return_value, :oops}}}
GenEvent.add_handler(pid, ReplyHandler, {self(), false})
assert GenEvent.notify(pid, :raise) == :ok
assert_receive {:terminate, {:error, {%RuntimeError{}, _}}}
after
Logger.add_backend(:console, flush: true)
end
test "notify/2 with bad args" do
assert GenEvent.notify({:global, :foo}, :bar) == :ok
assert GenEvent.notify({:foo, :bar}, :bar) == :ok
assert GenEvent.notify(self, :bar) == :ok
assert_raise ArgumentError, fn ->
GenEvent.notify(:foo, :bar)
end
end
test "ack_notify/2" do
{:ok, pid} = GenEvent.start()
GenEvent.add_handler(pid, ReplyHandler, {self(), false})
assert GenEvent.ack_notify(pid, :hello) == :ok
assert_receive {:event, :hello}
assert GenEvent.ack_notify(pid, {:custom, :remove_handler}) == :ok
assert_receive {:terminate, :remove_handler}
assert GenEvent.which_handlers(pid) == []
Logger.remove_backend(:console)
GenEvent.add_handler(pid, ReplyHandler, {self(), false})
assert GenEvent.ack_notify(pid, {:custom, :oops}) == :ok
assert_receive {:terminate, {:error, {:bad_return_value, :oops}}}
GenEvent.add_handler(pid, ReplyHandler, {self(), false})
assert GenEvent.ack_notify(pid, :raise) == :ok
assert_receive {:terminate, {:error, {%RuntimeError{}, _}}}
after
Logger.add_backend(:console, flush: true)
end
test "sync_notify/2" do
{:ok, pid} = GenEvent.start()
GenEvent.add_handler(pid, ReplyHandler, {self(), false})
assert GenEvent.sync_notify(pid, :hello) == :ok
assert_received {:event, :hello}
assert GenEvent.sync_notify(pid, {:custom, :remove_handler}) == :ok
assert_received {:terminate, :remove_handler}
assert GenEvent.which_handlers(pid) == []
Logger.remove_backend(:console)
GenEvent.add_handler(pid, ReplyHandler, {self(), false})
assert GenEvent.sync_notify(pid, {:custom, :oops}) == :ok
assert_received {:terminate, {:error, {:bad_return_value, :oops}}}
GenEvent.add_handler(pid, ReplyHandler, {self(), false})
assert GenEvent.sync_notify(pid, :raise) == :ok
assert_received {:terminate, {:error, {%RuntimeError{}, _}}}
after
Logger.add_backend(:console, flush: true)
end
test "call/3" do
{:ok, pid} = GenEvent.start()
GenEvent.add_handler(pid, ReplyHandler, {self(), false})
assert GenEvent.call(pid, ReplyHandler, :hello) == :ok
assert_receive {:call, :hello}
assert GenEvent.call(pid, ReplyHandler, {:custom, {:remove_handler, :ok}}) == :ok
assert_receive {:terminate, :remove_handler}
assert GenEvent.which_handlers(pid) == []
Logger.remove_backend(:console)
GenEvent.add_handler(pid, ReplyHandler, {self(), false})
assert {:error, {:bad_return_value, :oops}} =
GenEvent.call(pid, ReplyHandler, {:custom, :oops})
assert_receive {:terminate, {:error, {:bad_return_value, :oops}}}
assert GenEvent.which_handlers(pid) == []
GenEvent.add_handler(pid, ReplyHandler, {self(), false})
assert {:error, {%RuntimeError{}, _}} = GenEvent.call(pid, ReplyHandler, :raise)
assert_receive {:terminate, {:error, {%RuntimeError{}, _}}}
assert GenEvent.which_handlers(pid) == []
after
Logger.add_backend(:console, flush: true)
end
test "call/2 with bad args" do
Logger.remove_backend(:console)
{:ok, pid} = GenEvent.start_link()
assert GenEvent.add_handler(pid, DefaultHandler, []) == :ok
assert GenEvent.call(pid, UnknownHandler, :messages) ==
{:error, :not_found}
assert GenEvent.call(pid, DefaultHandler, :whatever) ==
{:error, {:bad_call, :whatever}}
assert GenEvent.which_handlers(pid) == []
assert GenEvent.stop(pid) == :ok
after
Logger.add_backend(:console, flush: true)
end
test "add_process_handler/2 with GenEvent" do
{:ok, snd} = GenEvent.start_link()
GenEvent.add_handler(snd, ReplyHandler, {self(), false})
{:ok, fst} = GenEvent.start_link()
:gen.call(fst, self(), {:add_process_handler, snd, snd})
assert GenEvent.notify(fst, :hello) == :ok
assert_receive {:event, :hello}
assert GenEvent.ack_notify(fst, :hello) == :ok
assert_receive {:event, :hello}
assert GenEvent.sync_notify(fst, :hello) == :ok
assert_received {:event, :hello}
end
test ":sys.get_status/2" do
{:ok, pid} = GenEvent.start(name: :my_gen_event_name)
:ok = GenEvent.add_handler(pid, {ReplyHandler, :ok}, {self(), true})
self = self()
status = :sys.get_status(pid)
GenEvent.stop(pid)
assert {:status, ^pid, {:module, GenEvent},
[pdict, _, ^pid, [], data]} = status
assert pdict[:"$ancestors"] == [self()]
assert pdict[:"$initial_call"] == {GenEvent, :init_it, 6}
assert {'Installed handlers', [
{:handler, ReplyHandler, {ReplyHandler, :ok}, ^self, nil, nil}]} = data[:items]
end
test ":sys.get_state/1 and :sys.replace_state/2" do
{:ok, pid} = GenEvent.start_link(name: :my_gen_event_name)
self = self()
assert [] = :sys.get_state(pid)
:ok = GenEvent.add_handler(pid, ReplyHandler, {self, true})
assert [{ReplyHandler, ReplyHandler, ^self}] = :sys.get_state(pid)
replacer = fn {ReplyHandler, ReplyHandler, _} -> {ReplyHandler, ReplyHandler, :unknown} end
:sys.replace_state(pid, replacer)
assert [{ReplyHandler, ReplyHandler, :unknown}] = :sys.get_state(pid)
# Fail while replacing does not cause a crash
:sys.replace_state(pid, fn _ -> exit(:fail) end)
assert [{ReplyHandler, ReplyHandler, :unknown}] = :sys.get_state(pid)
:ok = GenEvent.add_handler(pid, {ReplyHandler, :ok}, {self, true})
assert [{ReplyHandler, {ReplyHandler, :ok}, ^self},
{ReplyHandler, ReplyHandler, :unknown}] = :sys.get_state(pid)
:ok = :sys.suspend(pid)
assert [{ReplyHandler, {ReplyHandler, :ok}, ^self},
{ReplyHandler, ReplyHandler, :unknown}] = :sys.get_state(pid)
:ok = :sys.resume(pid)
end
defp hibernating?(pid) do
Process.info(pid, :current_function) ==
{:current_function,{:erlang,:hibernate,3}}
end
defp wait_until(fun, counter \\ 0) do
cond do
counter > 100 ->
flunk "Waited for 1s, but #{inspect fun} never returned true"
fun.() ->
true
true ->
receive after: (10 -> wait_until(fun, counter + 1))
end
end
defp wake_up(pid) do
send pid, :wake
assert_receive {:info, :wake}
end
end
| 32.412791 | 98 | 0.650463 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.