hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
795b2e5bc20bcf76eb1095d8edbd56e86d34fe0d | 906 | ex | Elixir | lib/ecto/adapter/transaction.ex | jccf091/ecto | 42d47a6da0711f842e1a0e6724a89b318b9b2144 | [
"Apache-2.0"
] | 1 | 2017-11-27T06:00:32.000Z | 2017-11-27T06:00:32.000Z | lib/ecto/adapter/transaction.ex | jccf091/ecto | 42d47a6da0711f842e1a0e6724a89b318b9b2144 | [
"Apache-2.0"
] | null | null | null | lib/ecto/adapter/transaction.ex | jccf091/ecto | 42d47a6da0711f842e1a0e6724a89b318b9b2144 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Adapter.Transaction do
@moduledoc """
Specifies the adapter transactions API.
"""
@doc """
Runs the given function inside a transaction.
Returns `{:ok, value}` if the transaction was successful where `value`
is the value return by the function or `{:error, value}` if the transaction
was rolled back where `value` is the value given to `rollback/1`.
See `Ecto.Repo.transaction/1`.
"""
@callback transaction(repo :: Ecto.Repo.t, options :: Keyword.t, function :: fun) :: {:ok, any} | {:error, any}
@doc """
Returns true if the given process is inside a transaction.
"""
@callback in_transaction?(repo :: Ecto.Repo.t) :: boolean
@doc """
Rolls back the current transaction.
The transaction will return the value given as `{:error, value}`.
See `Ecto.Repo.rollback/1`.
"""
@callback rollback(repo :: Ecto.Repo.t, value :: any) :: no_return
end
| 29.225806 | 113 | 0.677704 |
795b30af8833435a7d44da86e146feb72ce83e33 | 790 | exs | Elixir | code/spawn/link3.exs | alvarocamillont/introdu-o_elixir | 1d72d4f4b01d9312c4b066ce3c0fe8d9bfaaade1 | [
"MIT"
] | null | null | null | code/spawn/link3.exs | alvarocamillont/introdu-o_elixir | 1d72d4f4b01d9312c4b066ce3c0fe8d9bfaaade1 | [
"MIT"
] | 1 | 2021-03-09T16:27:25.000Z | 2021-03-09T16:27:25.000Z | programming-elixir-book/code/spawn/link3.exs | jordanhubbard/elixir-projects | dee341d672e83a45a17a4a85abd54a480f95c506 | [
"BSD-2-Clause"
] | null | null | null | #---
# Excerpted from "Programming Elixir ≥ 1.6",
# published by The Pragmatic Bookshelf.
# Copyrights apply to this code. It may not be used to create training material,
# courses, books, articles, and the like. Contact us if you are in doubt.
# We make no guarantees that this code is fit for any purpose.
# Visit http://www.pragmaticprogrammer.com/titles/elixir16 for more book information.
#---
defmodule Link3 do
import :timer, only: [ sleep: 1 ]
def sad_function do
sleep 500
exit(:boom)
end
def run do
Process.flag(:trap_exit, true)
spawn_link(Link3, :sad_function, [])
receive do
msg ->
IO.puts "MESSAGE RECEIVED: #{inspect msg}"
after 1000 ->
IO.puts "Nothing happened as far as I am concerned"
end
end
end
Link3.run
| 26.333333 | 85 | 0.683544 |
795b4d5b2a3a23850e6cbaf282825bb79f905216 | 1,768 | ex | Elixir | deps/makeup/lib/makeup/styles/html/pygments/friendly.ex | arduino-man/fona_modern | 61845bbbbc46a61a50e59a97c68709f2722078a6 | [
"MIT"
] | null | null | null | deps/makeup/lib/makeup/styles/html/pygments/friendly.ex | arduino-man/fona_modern | 61845bbbbc46a61a50e59a97c68709f2722078a6 | [
"MIT"
] | null | null | null | deps/makeup/lib/makeup/styles/html/pygments/friendly.ex | arduino-man/fona_modern | 61845bbbbc46a61a50e59a97c68709f2722078a6 | [
"MIT"
] | null | null | null |
defmodule Makeup.Styles.HTML.FriendlyStyle do
@moduledoc false
@styles %{
:error => "border:#FF0000",
:keyword => "bold #007020",
:keyword_pseudo => "nobold",
:keyword_type => "nobold #902000",
:name_attribute => "#4070a0",
:name_builtin => "#007020",
:name_class => "bold #0e84b5",
:name_constant => "#60add5",
:name_decorator => "bold #555555",
:name_entity => "bold #d55537",
:name_exception => "#007020",
:name_function => "#06287e",
:name_label => "bold #002070",
:name_namespace => "bold #0e84b5",
:name_tag => "bold #062873",
:name_variable => "#bb60d5",
:string => "#4070a0",
:string_doc => "italic",
:string_escape => "bold #4070a0",
:string_interpol => "italic #70a0d0",
:string_other => "#c65d09",
:string_regex => "#235388",
:string_symbol => "#517918",
:number => "#40a070",
:operator => "#666666",
:operator_word => "bold #007020",
:comment => "italic #60a0b0",
:comment_preproc => "noitalic #007020",
:comment_special => "noitalic bg:#fff0f0",
:generic_deleted => "#A00000",
:generic_emph => "italic",
:generic_error => "#FF0000",
:generic_heading => "bold #000080",
:generic_inserted => "#00A000",
:generic_output => "#888",
:generic_prompt => "bold #c65d09",
:generic_strong => "bold",
:generic_subheading => "bold #800080",
:generic_traceback => "#04D"
}
alias Makeup.Styles.HTML.Style
@style_struct Style.make_style(
short_name: "friendly",
long_name: "Friendly Style",
background_color: "#f0f0f0",
highlight_color: "#ffffcc",
styles: @styles)
def style() do
@style_struct
end
end | 29.466667 | 47 | 0.582014 |
795b65a3c6712136c542d3fbd3c8b4314af3a7d0 | 374 | ex | Elixir | lib/myapp/posts/post.ex | joshddunn/graphql-phoenix | 40a8ee9fa5ad74cbd85a64721309934d3091833b | [
"MIT"
] | null | null | null | lib/myapp/posts/post.ex | joshddunn/graphql-phoenix | 40a8ee9fa5ad74cbd85a64721309934d3091833b | [
"MIT"
] | null | null | null | lib/myapp/posts/post.ex | joshddunn/graphql-phoenix | 40a8ee9fa5ad74cbd85a64721309934d3091833b | [
"MIT"
] | null | null | null | defmodule MyApp.Posts.Post do
use Ecto.Schema
import Ecto.Changeset
schema "posts" do
field :body, :string
field :title, :string
belongs_to :user, MyApp.Accounts.User
timestamps()
end
@doc false
def changeset(post, attrs) do
post
|> cast(attrs, [:title, :body, :user_id])
|> validate_required([:title, :body, :user_id])
end
end
| 17.809524 | 51 | 0.652406 |
795b8dfa725831a324ee176fb2051b3dd94f7773 | 7,827 | ex | Elixir | lib/gim/query.ex | jan-sti/gim | 1b8be6c2163577f375825170cc9b01674e59b646 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | lib/gim/query.ex | jan-sti/gim | 1b8be6c2163577f375825170cc9b01674e59b646 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | lib/gim/query.ex | jan-sti/gim | 1b8be6c2163577f375825170cc9b01674e59b646 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | defmodule Gim.Query do
@moduledoc """
Defines queries on schemas.
"""
defstruct type: nil,
filter: {:and, []},
expand: []
import Gim.Queryable
alias Gim.Index
def query(%__MODULE__{} = query) do
query
end
def query(node) when is_map(node) do
query([node])
end
def query([%type{} | _rest] = nodes) do
type
|> to_query()
|> __query__(nodes)
end
def query(queryable) do
to_query(queryable)
end
def query(node, edge) when is_map(node) do
query([node], edge)
end
def query([%type{} | _rest] = nodes, edge) do
case type.__schema__(:association, edge) do
{_, _, type, _} ->
__query__(to_query(type), edge, nodes)
_ ->
raise Gim.QueryError, "No edge #{inspect(edge)} in #{type}"
end
end
def __query__(query, []) do
query
end
def __query__(%__MODULE__{type: type} = query, [%type{__id__: id} | nodes])
when not is_nil(id) do
query
|> filter(:or, __id__: id)
|> __query__(nodes)
end
def __query__(query, _edge, []) do
query
end
def __query__(%__MODULE__{} = query, edge, [%{} = node | nodes]) do
edge = Map.fetch!(node, edge)
edge
|> List.wrap()
|> Enum.reduce(query, fn node_or_id, query ->
if is_integer(node_or_id) do
filter(query, :or, __id__: node_or_id)
else
__query__(query, node_or_id)
end
end)
|> __query__(nodes)
end
@doc """
Adds a new filter to the query.
"""
def filter(queryable, op \\ nil, filter)
def filter(%__MODULE__{} = query, nil, {op, _} = filter) when op in [:and, :or] do
%__MODULE__{query | filter: __join_filter__(query.filter, filter)}
end
def filter(%__MODULE__{} = query, op, {op, _} = filter) when op in [:and, :or] do
%__MODULE__{query | filter: __join_filter__(query.filter, filter)}
end
def filter(%__MODULE__{} = query, opx, {op, _} = filter) when op in [:and, :or] do
%__MODULE__{query | filter: __join_filter__(query.filter, {opx, [filter]})}
end
def filter(%__MODULE__{} = query, op, filter) when is_list(filter) do
%__MODULE__{query | filter: __join_filter__(query.filter, {op || :and, filter})}
end
@doc false
def __join_filter__({_, []}, filter) do
filter
end
def __join_filter__(filter, {_, []}) do
filter
end
def __join_filter__({op, filter_left}, {op, filter_right}) do
{op, filter_left ++ filter_right}
end
def __join_filter__(left_filter, {op, filter_right}) do
{op, [left_filter | filter_right]}
end
def expand(queryable, edge_or_path)
def expand(%__MODULE__{type: type, expand: expand} = query, path) do
%__MODULE__{query | expand: __join_expand__(type, expand, path)}
end
@doc false
def __join_expand__(type, expand, edge) when not is_list(edge) do
__join_expand__(type, expand, [edge])
end
def __join_expand__(type, expand, [{edge, nested} | path]) do
case type.__schema__(:association, edge) do
{_name, _cardinality, nested_type, _} ->
nested_expand = Keyword.get(expand, edge, [])
expand = Keyword.put(expand, edge, __join_expand__(nested_type, nested_expand, nested))
__join_expand__(type, expand, path)
nil ->
raise Gim.QueryError, "No edge #{inspect(edge)} in #{type}"
end
end
def __join_expand__(type, expand, [edge | path]) do
__join_expand__(type, expand, [{edge, []} | path])
end
def __join_expand__(_type, expand, []) do
expand
end
@doc """
Returns the target nodes following the edges of given label for the given node.
"""
def edges([%{__repo__: repo} | _] = nodes, assoc) do
nodes
|> query(assoc)
|> repo.resolve!()
end
def edges(node, assoc) when is_map(node) do
edges([node], assoc)
end
@doc """
Returns wether the given node has any outgoing edges.
"""
def has_edges?(%type{} = node) do
assocs = type.__schema__(:associations)
Enum.any?(assocs, &has_edge?(node, &1))
end
@doc """
Returns wether the given node has any outgoing edges for the given label.
"""
def has_edge?(%type{} = node, assoc) do
edge = Map.get(node, assoc)
case type.__schema__(:association, assoc) do
{_, :one, _, _} ->
!is_nil(edge)
{_, :many, _, _} ->
length(edge) > 0
_ ->
raise Gim.UnknownEdgeError, "No edge #{inspect(assoc)} in #{inspect(type)}"
end
end
def add_edge(%struct{} = node, assoc, targets) when is_list(targets) do
assoc = struct.__schema__(:association, assoc)
Enum.reduce(targets, node, &__add_edge__(&2, assoc, &1))
end
def add_edge(%struct{} = node, assoc, target) do
assoc = struct.__schema__(:association, assoc)
__add_edge__(node, assoc, target)
end
def __add_edge__(node, {assoc, :one, type, _}, %type{__id__: id}) do
%{node | assoc => id}
end
def __add_edge__(node, {assoc, :many, type, _}, %type{__id__: id}) do
ids = Index.add(Map.fetch!(node, assoc), id)
%{node | assoc => ids}
end
def delete_edge(%struct{} = node, assoc, targets) when is_list(targets) do
assoc = struct.__schema__(:association, assoc)
Enum.reduce(targets, node, &__delete_edge__(&2, assoc, &1))
end
def delete_edge(%struct{} = node, assoc, target) do
assoc = struct.__schema__(:association, assoc)
__delete_edge__(node, assoc, target)
end
@doc false
def __delete_edge__(node, {assoc, :one, type, _}, %type{}) do
%{node | assoc => nil}
end
def __delete_edge__(node, {assoc, :many, type, _}, %type{__id__: id}) do
ids = Index.remove(Map.fetch!(node, assoc), id)
%{node | assoc => ids}
end
def clear_edges(%struct{} = node) do
assocs = struct.__schema__(:associations)
Enum.reduce(assocs, node, fn assoc, node ->
clear_edge(node, assoc)
end)
end
def clear_edge(%struct{} = node, assoc) do
case struct.__schema__(:association, assoc) do
{_, :one, _, _} ->
Map.put(node, assoc, nil)
{_, :many, _, _} ->
Map.put(node, assoc, [])
_ ->
node
end
end
# Node set operations
def intersection(nodes1, nodes2) when is_list(nodes1) and is_list(nodes2) do
# TODO: check node type
Enum.filter(nodes1, fn %{__id__: a} ->
Enum.any?(nodes2, fn %{__id__: b} ->
a == b
end)
end)
end
def reachable(nodes, edge, target) when is_list(nodes) do
Enum.filter(nodes, fn node ->
reachable(node, edge, target)
end)
end
def reachable(node, edge, target) do
# TODO: check node type
edges = Map.fetch!(node, edge)
reachable(edges, target)
end
defp reachable(edges, target) when is_list(edges) do
Enum.any?(edges, fn e ->
reachable(e, target)
end)
end
defp reachable(edge, %{__id__: id}) do
edge == id
end
# Repo operations
@doc """
Computes all isolated nodes from a repo.
"""
def isolated(repo) do
all_nodes = repo.dump()
lonely =
all_nodes
|> Enum.reject(&has_edges?/1)
|> Enum.map(fn %{__struct__: struct, __id__: id} -> {struct, id} end)
|> Enum.into(MapSet.new())
Enum.reduce(all_nodes, lonely, fn %{__struct__: struct} = node, lonely ->
assocs = struct.__schema__(:associations)
Enum.reduce(assocs, lonely, fn assoc, lonely ->
type = struct.__schema__(:type, assoc)
edges = Map.fetch!(node, assoc)
set_delete(lonely, type, edges)
end)
end)
all_nodes
|> Enum.filter(fn %{__struct__: struct, __id__: id} ->
MapSet.member?(lonely, {struct, id})
end)
end
defp set_delete(set, type, edges) when is_list(edges) do
Enum.reduce(edges, set, fn edge, set ->
set_delete(set, type, edge)
end)
end
defp set_delete(set, type, edge) do
MapSet.delete(set, {type, edge})
end
end
| 24.613208 | 95 | 0.619905 |
795b92eb41d0e3d2f674a04d3df14283888cd03c | 8,359 | ex | Elixir | apps/astarte_realm_management_api/lib/astarte_realm_management_api/rpc/realm_management.ex | Spidey20202022/astarte | 3950855c592b34363af0cf7f8a921762ce64e512 | [
"Apache-2.0"
] | 1 | 2020-02-04T13:15:22.000Z | 2020-02-04T13:15:22.000Z | apps/astarte_realm_management_api/lib/astarte_realm_management_api/rpc/realm_management.ex | Spidey20202022/astarte | 3950855c592b34363af0cf7f8a921762ce64e512 | [
"Apache-2.0"
] | 1 | 2020-01-20T09:52:48.000Z | 2020-01-20T09:52:48.000Z | apps/astarte_realm_management_api/lib/astarte_realm_management_api/rpc/realm_management.ex | Spidey20202022/astarte | 3950855c592b34363af0cf7f8a921762ce64e512 | [
"Apache-2.0"
] | 1 | 2020-02-04T13:15:50.000Z | 2020-02-04T13:15:50.000Z | #
# This file is part of Astarte.
#
# Copyright 2017-2018 Ispirata Srl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
defmodule Astarte.RealmManagement.API.RPC.RealmManagement do
alias Astarte.RPC.Protocol.RealmManagement.{
Call,
DeleteInterface,
DeleteTrigger,
GenericErrorReply,
GenericOkReply,
GetHealth,
GetHealthReply,
GetInterfacesList,
GetInterfacesListReply,
GetInterfaceSource,
GetInterfaceSourceReply,
GetInterfaceVersionsList,
GetInterfaceVersionsListReply,
GetInterfaceVersionsListReplyVersionTuple,
GetJWTPublicKeyPEM,
GetJWTPublicKeyPEMReply,
GetTrigger,
GetTriggerReply,
GetTriggersList,
GetTriggersListReply,
InstallInterface,
InstallTrigger,
Reply,
UpdateInterface,
UpdateJWTPublicKeyPEM
}
alias Astarte.Core.Triggers.SimpleTriggersProtobuf.TaggedSimpleTrigger
alias Astarte.Core.Triggers.Trigger
alias Astarte.RealmManagement.API.Config
require Logger
@rpc_client Config.rpc_client()
@destination Astarte.RPC.Protocol.RealmManagement.amqp_queue()
def get_interface_versions_list(realm_name, interface_name) do
%GetInterfaceVersionsList{
realm_name: realm_name,
interface_name: interface_name
}
|> encode_call(:get_interface_versions_list)
|> @rpc_client.rpc_call(@destination)
|> decode_reply()
|> extract_reply()
end
def get_interfaces_list(realm_name) do
%GetInterfacesList{
realm_name: realm_name
}
|> encode_call(:get_interfaces_list)
|> @rpc_client.rpc_call(@destination)
|> decode_reply()
|> extract_reply()
end
def get_interface(realm_name, interface_name, interface_major_version) do
%GetInterfaceSource{
realm_name: realm_name,
interface_name: interface_name,
interface_major_version: interface_major_version
}
|> encode_call(:get_interface_source)
|> @rpc_client.rpc_call(@destination)
|> decode_reply()
|> extract_reply()
end
def install_interface(realm_name, interface_json) do
%InstallInterface{
realm_name: realm_name,
interface_json: interface_json,
async_operation: true
}
|> encode_call(:install_interface)
|> @rpc_client.rpc_call(@destination)
|> decode_reply()
|> extract_reply()
end
def update_interface(realm_name, interface_json) do
%UpdateInterface{
realm_name: realm_name,
interface_json: interface_json,
async_operation: true
}
|> encode_call(:update_interface)
|> @rpc_client.rpc_call(@destination)
|> decode_reply()
|> extract_reply()
end
def delete_interface(realm_name, interface_name, interface_major_version) do
%DeleteInterface{
realm_name: realm_name,
interface_name: interface_name,
interface_major_version: interface_major_version,
async_operation: true
}
|> encode_call(:delete_interface)
|> @rpc_client.rpc_call(@destination)
|> decode_reply()
|> extract_reply()
end
def get_jwt_public_key_pem(realm_name) do
%GetJWTPublicKeyPEM{
realm_name: realm_name
}
|> encode_call(:get_jwt_public_key_pem)
|> @rpc_client.rpc_call(@destination)
|> decode_reply()
|> extract_reply()
end
def update_jwt_public_key_pem(realm_name, jwt_public_key_pem) do
%UpdateJWTPublicKeyPEM{
realm_name: realm_name,
jwt_public_key_pem: jwt_public_key_pem
}
|> encode_call(:update_jwt_public_key_pem)
|> @rpc_client.rpc_call(@destination)
|> decode_reply()
|> extract_reply()
end
def install_trigger(realm_name, trigger_name, action, tagged_simple_triggers) do
serialized_tagged_simple_triggers =
Enum.map(tagged_simple_triggers, &TaggedSimpleTrigger.encode/1)
%InstallTrigger{
realm_name: realm_name,
trigger_name: trigger_name,
action: action,
serialized_tagged_simple_triggers: serialized_tagged_simple_triggers
}
|> encode_call(:install_trigger)
|> @rpc_client.rpc_call(@destination)
|> decode_reply()
|> extract_reply()
end
def get_trigger(realm_name, trigger_name) do
%GetTrigger{
realm_name: realm_name,
trigger_name: trigger_name
}
|> encode_call(:get_trigger)
|> @rpc_client.rpc_call(@destination)
|> decode_reply()
|> extract_reply()
end
def get_triggers_list(realm_name) do
%GetTriggersList{
realm_name: realm_name
}
|> encode_call(:get_triggers_list)
|> @rpc_client.rpc_call(@destination)
|> decode_reply()
|> extract_reply()
end
def delete_trigger(realm_name, trigger_name) do
%DeleteTrigger{
realm_name: realm_name,
trigger_name: trigger_name
}
|> encode_call(:delete_trigger)
|> @rpc_client.rpc_call(@destination)
|> decode_reply()
|> extract_reply()
end
def get_health do
%GetHealth{}
|> encode_call(:get_health)
|> @rpc_client.rpc_call(@destination)
|> decode_reply()
|> extract_reply()
end
defp encode_call(call, callname) do
%Call{call: {callname, call}}
|> Call.encode()
end
defp decode_reply({:ok, encoded_reply}) when is_binary(encoded_reply) do
%Reply{reply: reply} = Reply.decode(encoded_reply)
reply
end
defp decode_reply({:error, reason}) do
{:error, reason}
end
defp extract_reply({:generic_ok_reply, %GenericOkReply{async_operation: async}}) do
if async do
{:ok, :started}
else
:ok
end
end
defp extract_reply({:get_health_reply, %GetHealthReply{status: status}}) do
lowercase_status =
case status do
:READY -> :ready
:DEGRADED -> :degraded
:BAD -> :bad
:ERROR -> :error
end
{:ok, %{status: lowercase_status}}
end
defp extract_reply({:generic_error_reply, %GenericErrorReply{error_name: name}}) do
try do
reason = String.to_existing_atom(name)
{:error, reason}
rescue
ArgumentError ->
_ = Logger.warn("Received unknown error: #{inspect(name)}.", tag: "amqp_generic_error")
{:error, :unknown}
end
end
defp extract_reply(
{:get_interface_versions_list_reply, %GetInterfaceVersionsListReply{versions: versions}}
) do
result =
for version <- versions do
%GetInterfaceVersionsListReplyVersionTuple{
major_version: major_version,
minor_version: minor_version
} = version
[major_version: major_version, minor_version: minor_version]
end
{:ok, result}
end
defp extract_reply(
{:get_interfaces_list_reply, %GetInterfacesListReply{interfaces_names: list}}
) do
{:ok, list}
end
defp extract_reply({:get_interface_source_reply, %GetInterfaceSourceReply{source: source}}) do
{:ok, source}
end
defp extract_reply(
{:get_jwt_public_key_pem_reply, %GetJWTPublicKeyPEMReply{jwt_public_key_pem: pem}}
) do
{:ok, pem}
end
defp extract_reply(
{:get_trigger_reply,
%GetTriggerReply{
trigger_data: trigger_data,
serialized_tagged_simple_triggers: serialized_tagged_simple_triggers
}}
) do
%Trigger{
name: trigger_name,
action: trigger_action
} = Trigger.decode(trigger_data)
tagged_simple_triggers =
for serialized_tagged_simple_trigger <- serialized_tagged_simple_triggers do
TaggedSimpleTrigger.decode(serialized_tagged_simple_trigger)
end
{
:ok,
%{
trigger_name: trigger_name,
trigger_action: trigger_action,
tagged_simple_triggers: tagged_simple_triggers
}
}
end
defp extract_reply({:get_triggers_list_reply, %GetTriggersListReply{triggers_names: triggers}}) do
{:ok, triggers}
end
defp extract_reply({:error, :rpc_error}) do
{:error, :rpc_error}
end
end
| 26.452532 | 100 | 0.695777 |
795bae92ed8b51b218182b2ae795eb03ee0a629b | 15,667 | ex | Elixir | lib/elixir/lib/io.ex | kenichi/elixir | 8c27da88c70623cbe516d5310c885943395a82a2 | [
"Apache-2.0"
] | 1 | 2019-08-13T23:22:33.000Z | 2019-08-13T23:22:33.000Z | lib/elixir/lib/io.ex | kenichi/elixir | 8c27da88c70623cbe516d5310c885943395a82a2 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/io.ex | kenichi/elixir | 8c27da88c70623cbe516d5310c885943395a82a2 | [
"Apache-2.0"
] | null | null | null | defmodule IO do
@moduledoc """
Functions handling input/output (IO).
Many functions in this module expect an IO device as an argument.
An IO device must be a PID or an atom representing a process.
For convenience, Elixir provides `:stdio` and `:stderr` as
shortcuts to Erlang's `:standard_io` and `:standard_error`.
The majority of the functions expect chardata, i.e. strings or
lists of characters and strings. In case another type is given,
functions will convert to string via the `String.Chars` protocol
(as shown in typespecs).
The functions starting with `bin` expect iodata as an argument,
i.e. binaries or lists of bytes and binaries.
## IO devices
An IO device may be an atom or a PID. In case it is an atom,
the atom must be the name of a registered process. In addition,
Elixir provides two shortcuts:
* `:stdio` - a shortcut for `:standard_io`, which maps to
the current `Process.group_leader/0` in Erlang
* `:stderr` - a shortcut for the named process `:standard_error`
provided in Erlang
IO devices maintain their position, which means subsequent calls to any
reading or writing functions will start from the place where the device
was last accessed. The position of files can be changed using the
`:file.position/2` function.
"""
@type device :: atom | pid
@type nodata :: {:error, term} | :eof
@type chardata :: String.t() | maybe_improper_list(char | chardata, String.t() | [])
defguardp is_iodata(data) when is_list(data) or is_binary(data)
@doc """
Reads from the IO `device`.
The `device` is iterated by the given number of characters or line by line if
`:line` is given.
Alternatively, if `:all` is given, then whole `device` is returned.
It returns:
* `data` - the output characters
* `:eof` - end of file was encountered
* `{:error, reason}` - other (rare) error condition;
for instance, `{:error, :estale}` if reading from an
NFS volume
If `:all` is given, `:eof` is never returned, but an
empty string in case the device has reached EOF.
"""
@spec read(device, :all | :line | non_neg_integer) :: chardata | nodata
def read(device \\ :stdio, line_or_chars)
def read(device, :all) do
do_read_all(map_dev(device), "")
end
def read(device, :line) do
:io.get_line(map_dev(device), '')
end
def read(device, count) when is_integer(count) and count >= 0 do
:io.get_chars(map_dev(device), '', count)
end
defp do_read_all(mapped_dev, acc) do
case :io.get_line(mapped_dev, "") do
line when is_binary(line) -> do_read_all(mapped_dev, acc <> line)
:eof -> acc
other -> other
end
end
@doc """
Reads from the IO `device`. The operation is Unicode unsafe.
The `device` is iterated by the given number of bytes or line by line if
`:line` is given.
Alternatively, if `:all` is given, then whole `device` is returned.
It returns:
* `data` - the output bytes
* `:eof` - end of file was encountered
* `{:error, reason}` - other (rare) error condition;
for instance, `{:error, :estale}` if reading from an
NFS volume
If `:all` is given, `:eof` is never returned, but an
empty string in case the device has reached EOF.
Note: do not use this function on IO devices in Unicode mode
as it will return the wrong result.
"""
@spec binread(device, :all | :line | non_neg_integer) :: iodata | nodata
def binread(device \\ :stdio, line_or_chars)
def binread(device, :all) do
do_binread_all(map_dev(device), "")
end
def binread(device, :line) do
case :file.read_line(map_dev(device)) do
{:ok, data} -> data
other -> other
end
end
def binread(device, count) when is_integer(count) and count >= 0 do
case :file.read(map_dev(device), count) do
{:ok, data} -> data
other -> other
end
end
@read_all_size 4096
defp do_binread_all(mapped_dev, acc) do
case :file.read(mapped_dev, @read_all_size) do
{:ok, data} -> do_binread_all(mapped_dev, acc <> data)
:eof -> acc
other -> other
end
end
@doc """
Writes `chardata` to the given `device`.
By default, the `device` is the standard output.
## Examples
IO.write("sample")
#=> sample
IO.write(:stderr, "error")
#=> error
"""
@spec write(device, chardata | String.Chars.t()) :: :ok
def write(device \\ :stdio, chardata) do
:io.put_chars(map_dev(device), to_chardata(chardata))
end
@doc """
Writes `iodata` to the given `device`.
This operation is meant to be used with "raw" devices
that are started without an encoding. The given `iodata`
is written as is to the device, without conversion.
Use `write/2` for devices with encoding.
Important: do **not** use this function on IO devices in
Unicode mode as it will write the wrong data. In particular,
the standard IO device is set to Unicode by default, so writing
to stdio with this function will likely result in the wrong data
being sent down the wire.
"""
@spec binwrite(device, iodata) :: :ok | {:error, term}
def binwrite(device \\ :stdio, iodata) when is_iodata(iodata) do
:file.write(map_dev(device), iodata)
end
@doc """
Writes `item` to the given `device`, similar to `write/2`,
but adds a newline at the end.
By default, the `device` is the standard output. It returns `:ok`
if it succeeds.
## Examples
IO.puts("Hello World!")
#=> Hello World!
IO.puts(:stderr, "error")
#=> error
"""
@spec puts(device, chardata | String.Chars.t()) :: :ok
def puts(device \\ :stdio, item) do
:io.put_chars(map_dev(device), [to_chardata(item), ?\n])
end
@doc """
Writes a `message` to stderr, along with the given `stacktrace`.
This function also notifies the compiler a warning was printed
(in case --warnings-as-errors was enabled). It returns `:ok`
if it succeeds.
An empty list can be passed to avoid stacktrace printing.
## Examples
stacktrace = [{MyApp, :main, 1, [file: 'my_app.ex', line: 4]}]
IO.warn("variable bar is unused", stacktrace)
#=> warning: variable bar is unused
#=> my_app.ex:4: MyApp.main/1
"""
@spec warn(chardata | String.Chars.t(), Exception.stacktrace()) :: :ok
def warn(message, []) do
:elixir_errors.io_warn(nil, nil, [to_chardata(message), ?\n])
end
def warn(message, [{_, _, _, opts} | _] = stacktrace) do
formatted_trace = Enum.map_join(stacktrace, "\n ", &Exception.format_stacktrace_entry(&1))
message = [to_chardata(message), ?\n, " ", formatted_trace, ?\n]
line = opts[:line]
file = opts[:file]
:elixir_errors.io_warn(line, file && List.to_string(file), message)
end
@doc """
Writes a `message` to stderr, along with the current stacktrace.
It returns `:ok` if it succeeds.
## Examples
IO.warn("variable bar is unused")
#=> warning: variable bar is unused
#=> (iex) evaluator.ex:108: IEx.Evaluator.eval/4
"""
@spec warn(chardata | String.Chars.t()) :: :ok
def warn(message) do
{:current_stacktrace, stacktrace} = Process.info(self(), :current_stacktrace)
warn(message, Enum.drop(stacktrace, 2))
end
@doc """
Inspects and writes the given `item` to the device.
It's important to note that it returns the given `item` unchanged.
This makes it possible to "spy" on values by inserting an
`IO.inspect/2` call almost anywhere in your code, for example,
in the middle of a pipeline.
It enables pretty printing by default with width of
80 characters. The width can be changed by explicitly
passing the `:width` option.
The output can be decorated with a label, by providing the `:label`
option to easily distinguish it from other `IO.inspect/2` calls.
The label will be printed before the inspected `item`.
See `Inspect.Opts` for a full list of remaining formatting options.
## Examples
IO.inspect(<<0, 1, 2>>, width: 40)
Prints:
<<0, 1, 2>>
We can use the `:label` option to decorate the output:
IO.inspect(1..100, label: "a wonderful range")
Prints:
a wonderful range: 1..100
The `:label` option is especially useful with pipelines:
[1, 2, 3]
|> IO.inspect(label: "before")
|> Enum.map(&(&1 * 2))
|> IO.inspect(label: "after")
|> Enum.sum()
Prints:
before: [1, 2, 3]
after: [2, 4, 6]
"""
@spec inspect(item, keyword) :: item when item: var
def inspect(item, opts \\ []) do
inspect(:stdio, item, opts)
end
@doc """
Inspects `item` according to the given options using the IO `device`.
See `inspect/2` for a full list of options.
"""
@spec inspect(device, item, keyword) :: item when item: var
def inspect(device, item, opts) when is_list(opts) do
label = if label = opts[:label], do: [to_chardata(label), ": "], else: []
opts = struct(Inspect.Opts, opts)
doc = Inspect.Algebra.group(Inspect.Algebra.to_doc(item, opts))
chardata = Inspect.Algebra.format(doc, opts.width)
puts(device, [label, chardata])
item
end
@doc """
Gets a number of bytes from IO device `:stdio`.
If `:stdio` is a Unicode device, `count` implies
the number of Unicode code points to be retrieved.
Otherwise, `count` is the number of raw bytes to be retrieved.
See `IO.getn/3` for a description of return values.
"""
@spec getn(chardata | String.Chars.t(), pos_integer) :: chardata | nodata
@spec getn(device, chardata | String.Chars.t()) :: chardata | nodata
def getn(prompt, count \\ 1)
def getn(prompt, count) when is_integer(count) and count > 0 do
getn(:stdio, prompt, count)
end
def getn(device, prompt) when not is_integer(prompt) do
getn(device, prompt, 1)
end
@doc """
Gets a number of bytes from the IO `device`.
If the IO `device` is a Unicode device, `count` implies
the number of Unicode code points to be retrieved.
Otherwise, `count` is the number of raw bytes to be retrieved.
It returns:
* `data` - the input characters
* `:eof` - end of file was encountered
* `{:error, reason}` - other (rare) error condition;
for instance, `{:error, :estale}` if reading from an
NFS volume
"""
@spec getn(device, chardata | String.Chars.t(), pos_integer) :: chardata | nodata
def getn(device, prompt, count) when is_integer(count) and count > 0 do
:io.get_chars(map_dev(device), to_chardata(prompt), count)
end
@doc ~S"""
Reads a line from the IO `device`.
It returns:
* `data` - the characters in the line terminated
by a line-feed (LF) or end of file (EOF)
* `:eof` - end of file was encountered
* `{:error, reason}` - other (rare) error condition;
for instance, `{:error, :estale}` if reading from an
NFS volume
## Examples
To display "What is your name?" as a prompt and await user input:
IO.gets("What is your name?\n")
"""
@spec gets(device, chardata | String.Chars.t()) :: chardata | nodata
def gets(device \\ :stdio, prompt) do
:io.get_line(map_dev(device), to_chardata(prompt))
end
@doc """
Converts the IO `device` into an `IO.Stream`.
An `IO.Stream` implements both `Enumerable` and
`Collectable`, allowing it to be used for both read
and write.
The `device` is iterated by the given number of characters or line by line if
`:line` is given.
This reads from the IO as UTF-8. Check out
`IO.binstream/2` to handle the IO as a raw binary.
Note that an IO stream has side effects and every time
you go over the stream you may get different results.
## Examples
Here is an example on how we mimic an echo server
from the command line:
Enum.each(IO.stream(:stdio, :line), &IO.write(&1))
"""
@spec stream(device, :line | pos_integer) :: Enumerable.t()
def stream(device, line_or_codepoints)
when line_or_codepoints == :line
when is_integer(line_or_codepoints) and line_or_codepoints > 0 do
IO.Stream.__build__(map_dev(device), false, line_or_codepoints)
end
@doc """
Converts the IO `device` into an `IO.Stream`. The operation is Unicode unsafe.
An `IO.Stream` implements both `Enumerable` and
`Collectable`, allowing it to be used for both read
and write.
The `device` is iterated by the given number of bytes or line by line if
`:line` is given.
This reads from the IO device as a raw binary.
Note that an IO stream has side effects and every time
you go over the stream you may get different results.
Finally, do not use this function on IO devices in Unicode
mode as it will return the wrong result.
"""
@spec binstream(device, :line | pos_integer) :: Enumerable.t()
def binstream(device, line_or_bytes)
when line_or_bytes == :line
when is_integer(line_or_bytes) and line_or_bytes > 0 do
IO.Stream.__build__(map_dev(device), true, line_or_bytes)
end
@doc """
Converts chardata (a list of integers representing code points,
lists and strings) into a string.
In case the conversion fails, it raises an `UnicodeConversionError`.
If a string is given, it returns the string itself.
## Examples
iex> IO.chardata_to_string([0x00E6, 0x00DF])
"æß"
iex> IO.chardata_to_string([0x0061, "bc"])
"abc"
iex> IO.chardata_to_string("string")
"string"
"""
@spec chardata_to_string(chardata) :: String.t()
def chardata_to_string(string) when is_binary(string) do
string
end
def chardata_to_string(list) when is_list(list) do
List.to_string(list)
end
@doc """
Converts iodata (a list of integers representing bytes, lists
and binaries) into a binary.
The operation is Unicode unsafe.
Notice that this function treats lists of integers as raw bytes
and does not perform any kind of encoding conversion. If you want
to convert from a charlist to a string (UTF-8 encoded), please
use `chardata_to_string/1` instead.
If this function receives a binary, the same binary is returned.
Inlined by the compiler.
## Examples
iex> bin1 = <<1, 2, 3>>
iex> bin2 = <<4, 5>>
iex> bin3 = <<6>>
iex> IO.iodata_to_binary([bin1, 1, [2, 3, bin2], 4 | bin3])
<<1, 2, 3, 1, 2, 3, 4, 5, 4, 6>>
iex> bin = <<1, 2, 3>>
iex> IO.iodata_to_binary(bin)
<<1, 2, 3>>
"""
@spec iodata_to_binary(iodata) :: binary
def iodata_to_binary(iodata) do
:erlang.iolist_to_binary(iodata)
end
@doc """
Returns the size of an iodata.
Inlined by the compiler.
## Examples
iex> IO.iodata_length([1, 2 | <<3, 4>>])
4
"""
@spec iodata_length(iodata) :: non_neg_integer
def iodata_length(iodata) do
:erlang.iolist_size(iodata)
end
@doc false
def each_stream(device, line_or_codepoints) do
case read(device, line_or_codepoints) do
:eof ->
{:halt, device}
{:error, reason} ->
raise IO.StreamError, reason: reason
data ->
{[data], device}
end
end
@doc false
def each_binstream(device, line_or_chars) do
case binread(device, line_or_chars) do
:eof ->
{:halt, device}
{:error, reason} ->
raise IO.StreamError, reason: reason
data ->
{[data], device}
end
end
@compile {:inline, map_dev: 1, to_chardata: 1}
# Map the Elixir names for standard IO and error to Erlang names
defp map_dev(:stdio), do: :standard_io
defp map_dev(:stderr), do: :standard_error
defp map_dev(other) when is_atom(other) or is_pid(other) or is_tuple(other), do: other
defp to_chardata(list) when is_list(list), do: list
defp to_chardata(other), do: to_string(other)
end
| 28.178058 | 95 | 0.662411 |
795bbfdd9b65c7a3324f5c2fc9c9c22fced2c395 | 135 | exs | Elixir | test/ums_pay_test.exs | leozhang37/ums_pay | f36345452388d228bc7f5f1f0106ec5dee63c382 | [
"MIT"
] | null | null | null | test/ums_pay_test.exs | leozhang37/ums_pay | f36345452388d228bc7f5f1f0106ec5dee63c382 | [
"MIT"
] | null | null | null | test/ums_pay_test.exs | leozhang37/ums_pay | f36345452388d228bc7f5f1f0106ec5dee63c382 | [
"MIT"
] | null | null | null | defmodule UmsPayTest do
use ExUnit.Case
doctest UmsPay
test "greets the world" do
assert UmsPay.hello() == :world
end
end
| 15 | 35 | 0.703704 |
795bf92c3c069d4c27d60c5bb8dd7c2388bfff21 | 27,461 | ex | Elixir | lib/ecto/adapters/postgres/connection.ex | DavidAlphaFox/ecto | a3eae38d1d05c440893f724c2f04a8332e32d7ac | [
"Apache-2.0"
] | 1 | 2019-05-07T15:05:52.000Z | 2019-05-07T15:05:52.000Z | lib/ecto/adapters/postgres/connection.ex | DavidAlphaFox/ecto | a3eae38d1d05c440893f724c2f04a8332e32d7ac | [
"Apache-2.0"
] | null | null | null | lib/ecto/adapters/postgres/connection.ex | DavidAlphaFox/ecto | a3eae38d1d05c440893f724c2f04a8332e32d7ac | [
"Apache-2.0"
] | null | null | null | if Code.ensure_loaded?(Postgrex.Connection) do
defmodule Ecto.Adapters.Postgres.Connection do
@moduledoc false
@default_port 5432
@behaviour Ecto.Adapters.Connection
@behaviour Ecto.Adapters.SQL.Query
## Connection
def connect(opts) do
json = Application.get_env(:ecto, :json_library)
extensions = [{Ecto.Adapters.Postgres.DateTime, []},
{Postgrex.Extensions.JSON, library: json}]
opts =
opts
|> Keyword.update(:extensions, extensions, &(&1 ++ extensions))
|> Keyword.update(:port, @default_port, &normalize_port/1)
Postgrex.Connection.start_link(opts)
end
def query(conn, sql, params, opts) do
params = Enum.map params, fn
%Ecto.Query.Tagged{value: value} -> value
value -> value
end
# 使用Connection进行query
case Postgrex.Connection.query(conn, sql, params, opts) do
{:ok, res} -> {:ok, Map.from_struct(res)}
{:error, _} = err -> err
end
end
defp normalize_port(port) when is_binary(port), do: String.to_integer(port)
defp normalize_port(port) when is_integer(port), do: port
def to_constraints(%Postgrex.Error{postgres: %{code: :unique_violation, constraint: constraint}}),
do: [unique: constraint]
def to_constraints(%Postgrex.Error{postgres: %{code: :foreign_key_violation, constraint: constraint}}),
do: [foreign_key: constraint]
def to_constraints(%Postgrex.Error{postgres: %{code: :exclusion_violation, constraint: constraint}}),
do: [exclude: constraint]
# Postgres 9.2 and earlier does not provide the constraint field
def to_constraints(%Postgrex.Error{postgres: %{code: :unique_violation, message: message}}) do
case :binary.split(message, " unique constraint ") do
[_, quoted] -> [unique: strip_quotes(quoted)]
_ -> []
end
end
def to_constraints(%Postgrex.Error{postgres: %{code: :foreign_key_violation, message: message}}) do
case :binary.split(message, " foreign key constraint ") do
[_, quoted] -> [foreign_key: strip_quotes(quoted)]
_ -> []
end
end
def to_constraints(%Postgrex.Error{postgres: %{code: :exclusion_violation, message: message}}) do
case :binary.split(message, " exclude constraint ") do
[_, quoted] -> [exclude: strip_quotes(quoted)]
_ -> []
end
end
def to_constraints(%Postgrex.Error{}),
do: []
defp strip_quotes(quoted) do
size = byte_size(quoted) - 2
<<_, unquoted::binary-size(size), _>> = quoted
unquoted
end
## Transaction
def begin_transaction do
"BEGIN"
end
def rollback do
"ROLLBACK"
end
def commit do
"COMMIT"
end
def savepoint(savepoint) do
"SAVEPOINT " <> savepoint
end
def rollback_to_savepoint(savepoint) do
"ROLLBACK TO SAVEPOINT " <> savepoint
end
## Query
alias Ecto.Query
alias Ecto.Query.SelectExpr
alias Ecto.Query.QueryExpr
alias Ecto.Query.JoinExpr
def all(query) do
sources = create_names(query)
distinct_exprs = distinct_exprs(query, sources)
from = from(sources)
select = select(query, distinct_exprs, sources)
join = join(query, sources)
where = where(query, sources)
group_by = group_by(query, sources)
having = having(query, sources)
order_by = order_by(query, distinct_exprs, sources)
limit = limit(query, sources)
offset = offset(query, sources)
lock = lock(query.lock)
assemble([select, from, join, where, group_by, having, order_by, limit, offset, lock])
end
def update_all(query) do
sources = create_names(query)
{table, name, _model} = elem(sources, 0)
fields = update_fields(query, sources)
{join, wheres} = update_join(query, sources)
where = where(%{query | wheres: wheres ++ query.wheres}, sources)
assemble(["UPDATE #{table} AS #{name} SET", fields, join, where])
end
def delete_all(query) do
sources = create_names(query)
{table, name, _model} = elem(sources, 0)
join = using(query, sources)
where = delete_all_where(query.joins, query, sources)
assemble(["DELETE FROM #{table} AS #{name}", join, where])
end
def insert(prefix, table, fields, returning) do
values =
if fields == [] do
"DEFAULT VALUES"
else
"(" <> Enum.map_join(fields, ", ", "e_name/1) <> ") " <>
"VALUES (" <> Enum.map_join(1..length(fields), ", ", &"$#{&1}") <> ")"
end
"INSERT INTO #{quote_table(prefix, table)} " <> values <> returning(returning)
end
def update(prefix, table, fields, filters, returning) do
{fields, count} = Enum.map_reduce fields, 1, fn field, acc ->
{"#{quote_name(field)} = $#{acc}", acc + 1}
end
{filters, _count} = Enum.map_reduce filters, count, fn field, acc ->
{"#{quote_name(field)} = $#{acc}", acc + 1}
end
"UPDATE #{quote_table(prefix, table)} SET " <> Enum.join(fields, ", ") <>
" WHERE " <> Enum.join(filters, " AND ") <>
returning(returning)
end
def delete(prefix, table, filters, returning) do
{filters, _} = Enum.map_reduce filters, 1, fn field, acc ->
{"#{quote_name(field)} = $#{acc}", acc + 1}
end
"DELETE FROM #{quote_table(prefix, table)} WHERE " <>
Enum.join(filters, " AND ") <> returning(returning)
end
## Query generation
binary_ops =
[==: "=", !=: "!=", <=: "<=", >=: ">=", <: "<", >: ">",
and: "AND", or: "OR",
ilike: "ILIKE", like: "LIKE"]
@binary_ops Keyword.keys(binary_ops)
Enum.map(binary_ops, fn {op, str} ->
defp handle_call(unquote(op), 2), do: {:binary_op, unquote(str)}
end)
defp handle_call(fun, _arity), do: {:fun, Atom.to_string(fun)}
defp select(%Query{select: %SelectExpr{fields: fields}, distinct: distinct} = query,
distinct_exprs, sources) do
"SELECT " <>
distinct(distinct, distinct_exprs) <>
Enum.map_join(fields, ", ", &expr(&1, sources, query))
end
defp distinct_exprs(%Query{distinct: %QueryExpr{expr: exprs}} = query, sources)
when is_list(exprs) do
Enum.map_join(exprs, ", ", &expr(&1, sources, query))
end
defp distinct_exprs(_, _), do: ""
defp distinct(nil, _sources), do: ""
defp distinct(%QueryExpr{expr: true}, _exprs), do: "DISTINCT "
defp distinct(%QueryExpr{expr: false}, _exprs), do: ""
defp distinct(_query, exprs), do: "DISTINCT ON (" <> exprs <> ") "
defp from(sources) do
{table, name, _model} = elem(sources, 0)
"FROM #{table} AS #{name}"
end
defp using(%Query{joins: []}, _sources), do: []
defp using(%Query{joins: joins} = query, sources) do
Enum.map_join(joins, " ", fn
%JoinExpr{qual: :inner, on: %QueryExpr{expr: expr}, ix: ix} ->
{table, name, _model} = elem(sources, ix)
where = expr(expr, sources, query)
"USING #{table} AS #{name} WHERE " <> where
%JoinExpr{qual: qual} ->
error!(query, "PostgreSQL supports only inner joins on delete_all, got: `#{qual}`")
end)
end
defp update_fields(%Query{updates: updates} = query, sources) do
for(%{expr: expr} <- updates,
{op, kw} <- expr,
{key, value} <- kw,
do: update_op(op, key, value, sources, query)) |> Enum.join(", ")
end
defp update_op(:set, key, value, sources, query) do
quote_name(key) <> " = " <> expr(value, sources, query)
end
defp update_op(:inc, key, value, sources, query) do
quoted = quote_name(key)
quoted <> " = " <> quoted <> " + " <> expr(value, sources, query)
end
defp update_op(:push, key, value, sources, query) do
quoted = quote_name(key)
quoted <> " = array_append(" <> quoted <> ", " <> expr(value, sources, query) <> ")"
end
defp update_op(:pull, key, value, sources, query) do
quoted = quote_name(key)
quoted <> " = array_remove(" <> quoted <> ", " <> expr(value, sources, query) <> ")"
end
defp update_op(command, _key, _value, _sources, query) do
error!(query, "Unknown update operation #{inspect command} for PostgreSQL")
end
defp update_join(%Query{joins: []}, _sources), do: {[], []}
defp update_join(%Query{joins: joins} = query, sources) do
froms =
"FROM " <> Enum.map_join(joins, ", ", fn
%JoinExpr{qual: :inner, ix: ix, source: source} ->
{join, name, _model} = elem(sources, ix)
join = join || "(" <> expr(source, sources, query) <> ")"
join <> " AS " <> name
%JoinExpr{qual: qual} ->
error!(query, "PostgreSQL supports only inner joins on update_all, got: `#{qual}`")
end)
wheres =
for %JoinExpr{on: %QueryExpr{expr: value} = expr} <- joins,
value != true,
do: expr
{froms, wheres}
end
defp join(%Query{joins: []}, _sources), do: []
defp join(%Query{joins: joins} = query, sources) do
Enum.map_join(joins, " ", fn
%JoinExpr{on: %QueryExpr{expr: expr}, qual: qual, ix: ix, source: source} ->
{join, name, _model} = elem(sources, ix)
qual = join_qual(qual)
join = join || "(" <> expr(source, sources, query) <> ")"
"#{qual} JOIN " <> join <> " AS " <> name <> " ON " <> expr(expr, sources, query)
end)
end
defp join_qual(:inner), do: "INNER"
defp join_qual(:left), do: "LEFT OUTER"
defp join_qual(:right), do: "RIGHT OUTER"
defp join_qual(:full), do: "FULL OUTER"
defp delete_all_where([], query, sources), do: where(query, sources)
defp delete_all_where(_joins, %Query{wheres: wheres} = query, sources) do
boolean("AND", wheres, sources, query)
end
defp where(%Query{wheres: wheres} = query, sources) do
boolean("WHERE", wheres, sources, query)
end
defp having(%Query{havings: havings} = query, sources) do
boolean("HAVING", havings, sources, query)
end
defp group_by(%Query{group_bys: group_bys} = query, sources) do
exprs =
Enum.map_join(group_bys, ", ", fn
%QueryExpr{expr: expr} ->
Enum.map_join(expr, ", ", &expr(&1, sources, query))
end)
case exprs do
"" -> []
_ -> "GROUP BY " <> exprs
end
end
defp order_by(%Query{order_bys: order_bys} = query, distinct_exprs, sources) do
exprs =
Enum.map_join(order_bys, ", ", fn
%QueryExpr{expr: expr} ->
Enum.map_join(expr, ", ", &order_by_expr(&1, sources, query))
end)
case {distinct_exprs, exprs} do
{_, ""} ->
[]
{"", _} ->
"ORDER BY " <> exprs
{_, _} ->
"ORDER BY " <> distinct_exprs <> ", " <> exprs
end
end
defp order_by_expr({dir, expr}, sources, query) do
str = expr(expr, sources, query)
case dir do
:asc -> str
:desc -> str <> " DESC"
end
end
defp limit(%Query{limit: nil}, _sources), do: []
defp limit(%Query{limit: %QueryExpr{expr: expr}} = query, sources) do
"LIMIT " <> expr(expr, sources, query)
end
defp offset(%Query{offset: nil}, _sources), do: []
defp offset(%Query{offset: %QueryExpr{expr: expr}} = query, sources) do
"OFFSET " <> expr(expr, sources, query)
end
defp lock(nil), do: []
defp lock(lock_clause), do: lock_clause
defp boolean(_name, [], _sources, _query), do: []
defp boolean(name, query_exprs, sources, query) do
name <> " " <>
Enum.map_join(query_exprs, " AND ", fn
%QueryExpr{expr: expr} ->
"(" <> expr(expr, sources, query) <> ")"
end)
end
defp expr({:^, [], [ix]}, _sources, _query) do
"$#{ix+1}"
end
defp expr({{:., _, [{:&, _, [idx]}, field]}, _, []}, sources, _query) when is_atom(field) do
{_, name, _} = elem(sources, idx)
"#{name}.#{quote_name(field)}"
end
defp expr({:&, _, [idx]}, sources, query) do
{table, name, model} = elem(sources, idx)
unless model do
error!(query, "PostgreSQL requires a model when using selector " <>
"#{inspect name} but only the table #{inspect table} was given. " <>
"Please specify a model or specify exactly which fields from " <>
"#{inspect name} you desire")
end
fields = model.__schema__(:fields)
Enum.map_join(fields, ", ", &"#{name}.#{quote_name(&1)}")
end
defp expr({:in, _, [_left, []]}, _sources, _query) do
"false"
end
defp expr({:in, _, [left, right]}, sources, query) when is_list(right) do
args = Enum.map_join right, ",", &expr(&1, sources, query)
expr(left, sources, query) <> " IN (" <> args <> ")"
end
defp expr({:in, _, [left, {:^, _, [ix, length]}]}, sources, query) do
args = Enum.map_join ix+1..ix+length, ",", &"$#{&1}"
expr(left, sources, query) <> " IN (" <> args <> ")"
end
defp expr({:in, _, [left, right]}, sources, query) do
expr(left, sources, query) <> " = ANY(" <> expr(right, sources, query) <> ")"
end
defp expr({:is_nil, _, [arg]}, sources, query) do
"#{expr(arg, sources, query)} IS NULL"
end
defp expr({:not, _, [expr]}, sources, query) do
"NOT (" <> expr(expr, sources, query) <> ")"
end
defp expr({:fragment, _, [kw]}, _sources, query) when is_list(kw) or tuple_size(kw) == 3 do
error!(query, "PostgreSQL adapter does not support keyword or interpolated fragments")
end
defp expr({:fragment, _, parts}, sources, query) do
Enum.map_join(parts, "", fn
{:raw, part} -> part
{:expr, expr} -> expr(expr, sources, query)
end)
end
defp expr({:datetime_add, _, [datetime, count, interval]}, sources, query) do
"(" <> expr(datetime, sources, query) <> "::timestamp + "
<> interval(count, interval, sources, query) <> ")::timestamp"
end
defp expr({:date_add, _, [date, count, interval]}, sources, query) do
"(" <> expr(date, sources, query) <> "::date + "
<> interval(count, interval, sources, query) <> ")::date"
end
defp expr({fun, _, args}, sources, query) when is_atom(fun) and is_list(args) do
{modifier, args} =
case args do
[rest, :distinct] -> {"DISTINCT ", [rest]}
_ -> {"", args}
end
case handle_call(fun, length(args)) do
{:binary_op, op} ->
[left, right] = args
op_to_binary(left, sources, query) <>
" #{op} "
<> op_to_binary(right, sources, query)
{:fun, fun} ->
"#{fun}(" <> modifier <> Enum.map_join(args, ", ", &expr(&1, sources, query)) <> ")"
end
end
defp expr(list, sources, query) when is_list(list) do
"ARRAY[" <> Enum.map_join(list, ",", &expr(&1, sources, query)) <> "]"
end
defp expr(%Decimal{} = decimal, _sources, _query) do
Decimal.to_string(decimal, :normal)
end
defp expr(%Ecto.Query.Tagged{value: binary, type: :binary}, _sources, _query)
when is_binary(binary) do
hex = Base.encode16(binary, case: :lower)
"'\\x#{hex}'::bytea"
end
defp expr(%Ecto.Query.Tagged{value: other, type: type}, sources, query) do
expr(other, sources, query) <> "::" <> ecto_to_db(type)
end
defp expr(nil, _sources, _query), do: "NULL"
defp expr(true, _sources, _query), do: "TRUE"
defp expr(false, _sources, _query), do: "FALSE"
defp expr(literal, _sources, _query) when is_binary(literal) do
"'#{escape_string(literal)}'"
end
defp expr(literal, _sources, _query) when is_integer(literal) do
String.Chars.Integer.to_string(literal)
end
defp expr(literal, _sources, _query) when is_float(literal) do
String.Chars.Float.to_string(literal) <> "::float"
end
defp interval(count, interval, _sources, _query) when is_integer(count) do
"interval '" <> String.Chars.Integer.to_string(count) <> " " <> interval <> "'"
end
defp interval(count, interval, _sources, _query) when is_float(count) do
count = :erlang.float_to_binary(count, [:compact, decimals: 16])
"interval '" <> count <> " " <> interval <> "'"
end
defp interval(count, interval, sources, query) do
"(" <> expr(count, sources, query) <> "::numeric * "
<> interval(1, interval, sources, query) <> ")"
end
defp op_to_binary({op, _, [_, _]} = expr, sources, query) when op in @binary_ops do
"(" <> expr(expr, sources, query) <> ")"
end
defp op_to_binary(expr, sources, query) do
expr(expr, sources, query)
end
defp returning([]),
do: ""
defp returning(returning),
do: " RETURNING " <> Enum.map_join(returning, ", ", "e_name/1)
defp create_names(%{prefix: prefix, sources: sources}) do
create_names(prefix, sources, 0, tuple_size(sources)) |> List.to_tuple()
end
defp create_names(prefix, sources, pos, limit) when pos < limit do
current =
case elem(sources, pos) do
{table, model} ->
name = String.first(table) <> Integer.to_string(pos)
{quote_table(prefix, table), name, model}
{:fragment, _, _} ->
{nil, "f" <> Integer.to_string(pos), nil}
end
[current|create_names(prefix, sources, pos + 1, limit)]
end
defp create_names(_prefix, _sources, pos, pos) do
[]
end
# DDL
alias Ecto.Migration.Table
alias Ecto.Migration.Index
alias Ecto.Migration.Reference
@drops [:drop, :drop_if_exists]
def execute_ddl({command, %Table{}=table, columns}) when command in [:create, :create_if_not_exists] do
options = options_expr(table.options)
if_not_exists = if command == :create_if_not_exists, do: " IF NOT EXISTS", else: ""
"CREATE TABLE" <> if_not_exists <>
" #{quote_table(table.prefix, table.name)} (#{column_definitions(table, columns)})" <> options
end
def execute_ddl({command, %Table{}=table}) when command in @drops do
if_exists = if command == :drop_if_exists, do: " IF EXISTS", else: ""
"DROP TABLE" <> if_exists <> " #{quote_table(table.prefix, table.name)}"
end
def execute_ddl({:alter, %Table{}=table, changes}) do
"ALTER TABLE #{quote_table(table.prefix, table.name)} #{column_changes(table, changes)}"
end
def execute_ddl({:create, %Index{}=index}) do
fields = Enum.map_join(index.columns, ", ", &index_expr/1)
assemble(["CREATE",
if_do(index.unique, "UNIQUE"),
"INDEX",
if_do(index.concurrently, "CONCURRENTLY"),
quote_name(index.name),
"ON",
quote_table(index.prefix, index.table),
if_do(index.using, "USING #{index.using}"),
"(#{fields})"])
end
def execute_ddl({:create_if_not_exists, %Index{}=index}) do
assemble(["DO $$",
"BEGIN",
execute_ddl({:create, index}) <> ";",
"EXCEPTION WHEN duplicate_table THEN END; $$;"])
end
def execute_ddl({command, %Index{}=index}) when command in @drops do
if_exists = if command == :drop_if_exists, do: "IF EXISTS", else: []
assemble(["DROP",
"INDEX",
if_do(index.concurrently, "CONCURRENTLY"),
if_exists,
quote_table(index.prefix, index.name)])
end
def execute_ddl({:rename, %Table{}=current_table, %Table{}=new_table}) do
"ALTER TABLE #{quote_table(current_table.prefix, current_table.name)} RENAME TO #{quote_table(new_table.prefix, new_table.name)}"
end
def execute_ddl({:rename, %Table{}=table, current_column, new_column}) do
"ALTER TABLE #{quote_table(table.prefix, table.name)} RENAME #{quote_name(current_column)} TO #{quote_name(new_column)}"
end
def execute_ddl(string) when is_binary(string), do: string
def execute_ddl(keyword) when is_list(keyword),
do: error!(nil, "PostgreSQL adapter does not support keyword lists in execute")
defp column_definitions(table, columns) do
Enum.map_join(columns, ", ", &column_definition(table, &1))
end
defp column_definition(table, {:add, name, %Reference{} = ref, opts}) do
assemble([
quote_name(name), reference_column_type(ref.type, opts),
column_options(ref.type, opts), reference_expr(ref, table, name)
])
end
defp column_definition(_table, {:add, name, type, opts}) do
assemble([quote_name(name), column_type(type, opts), column_options(type, opts)])
end
defp column_changes(table, columns) do
Enum.map_join(columns, ", ", &column_change(table, &1))
end
defp column_change(table, {:add, name, %Reference{} = ref, opts}) do
assemble([
"ADD COLUMN", quote_name(name), reference_column_type(ref.type, opts),
column_options(ref.type, opts), reference_expr(ref, table, name)
])
end
defp column_change(_table, {:add, name, type, opts}) do
assemble(["ADD COLUMN", quote_name(name), column_type(type, opts), column_options(type, opts)])
end
defp column_change(table, {:modify, name, %Reference{} = ref, opts}) do
assemble([
"ALTER COLUMN", quote_name(name), "TYPE", reference_column_type(ref.type, opts),
constraint_expr(ref, table, name), modify_null(name, opts), modify_default(name, ref.type, opts)
])
end
defp column_change(_table, {:modify, name, type, opts}) do
assemble(["ALTER COLUMN", quote_name(name), "TYPE",
column_type(type, opts), modify_null(name, opts), modify_default(name, type, opts)])
end
defp column_change(_table, {:remove, name}), do: "DROP COLUMN #{quote_name(name)}"
defp modify_null(name, opts) do
case Keyword.get(opts, :null) do
true -> ", ALTER COLUMN #{quote_name(name)} DROP NOT NULL"
false -> ", ALTER COLUMN #{quote_name(name)} SET NOT NULL"
nil -> []
end
end
defp modify_default(name, type, opts) do
case Keyword.fetch(opts, :default) do
{:ok, val} -> ", ALTER COLUMN #{quote_name(name)} SET #{default_expr({:ok, val}, type)}"
:error -> []
end
end
defp column_options(type, opts) do
default = Keyword.fetch(opts, :default)
null = Keyword.get(opts, :null)
pk = Keyword.get(opts, :primary_key)
[default_expr(default, type), null_expr(null), pk_expr(pk)]
end
defp pk_expr(true), do: "PRIMARY KEY"
defp pk_expr(_), do: []
defp null_expr(false), do: "NOT NULL"
defp null_expr(true), do: "NULL"
defp null_expr(_), do: []
defp default_expr({:ok, nil}, _type),
do: "DEFAULT NULL"
defp default_expr({:ok, []}, type),
do: "DEFAULT ARRAY[]::#{ecto_to_db(type)}"
defp default_expr({:ok, literal}, _type) when is_binary(literal),
do: "DEFAULT '#{escape_string(literal)}'"
defp default_expr({:ok, literal}, _type) when is_number(literal) or is_boolean(literal),
do: "DEFAULT #{literal}"
defp default_expr({:ok, {:fragment, expr}}, _type),
do: "DEFAULT #{expr}"
defp default_expr({:ok, expr}, type),
do: raise(ArgumentError, "unknown default `#{inspect expr}` for type `#{inspect type}`. " <>
":default may be a string, number, boolean, empty list or a fragment(...)")
defp default_expr(:error, _),
do: []
defp index_expr(literal) when is_binary(literal),
do: literal
defp index_expr(literal),
do: quote_name(literal)
defp options_expr(nil),
do: ""
defp options_expr(keyword) when is_list(keyword),
do: error!(nil, "PostgreSQL adapter does not support keyword lists in :options")
defp options_expr(options),
do: " #{options}"
defp column_type({:array, type}, opts),
do: column_type(type, opts) <> "[]"
defp column_type(type, opts) do
size = Keyword.get(opts, :size)
precision = Keyword.get(opts, :precision)
scale = Keyword.get(opts, :scale)
type_name = ecto_to_db(type)
cond do
size -> "#{type_name}(#{size})"
precision -> "#{type_name}(#{precision},#{scale || 0})"
type == :string -> "#{type_name}(255)"
true -> "#{type_name}"
end
end
defp reference_expr(%Reference{} = ref, table, name),
do: "CONSTRAINT #{reference_name(ref, table, name)} REFERENCES " <>
"#{quote_table(table.prefix, ref.table)}(#{quote_name(ref.column)})" <>
reference_on_delete(ref.on_delete)
defp constraint_expr(%Reference{} = ref, table, name),
do: ", ADD CONSTRAINT #{reference_name(ref, table, name)} " <>
"FOREIGN KEY (#{quote_name(name)}) " <>
"REFERENCES #{quote_table(table.prefix, ref.table)}(#{quote_name(ref.column)})" <>
reference_on_delete(ref.on_delete)
# A reference pointing to a serial column becomes integer in postgres
defp reference_name(%Reference{name: nil}, table, column),
do: quote_name("#{table.name}_#{column}_fkey")
defp reference_name(%Reference{name: name}, _table, _column),
do: quote_name(name)
defp reference_column_type(:serial, _opts), do: "integer"
defp reference_column_type(type, opts), do: column_type(type, opts)
defp reference_on_delete(:nilify_all), do: " ON DELETE SET NULL"
defp reference_on_delete(:delete_all), do: " ON DELETE CASCADE"
defp reference_on_delete(_), do: ""
## Helpers
defp quote_name(name)
defp quote_name(name) when is_atom(name),
do: quote_name(Atom.to_string(name))
defp quote_name(name) do
if String.contains?(name, "\"") do
error!(nil, "bad field name #{inspect name}")
end
<<?", name::binary, ?">>
end
defp quote_table(nil, name), do: quote_table(name)
defp quote_table(prefix, name), do: quote_table(prefix) <> "." <> quote_table(name)
defp quote_table(name) when is_atom(name),
do: quote_table(Atom.to_string(name))
defp quote_table(name) do
if String.contains?(name, "\"") do
error!(nil, "bad table name #{inspect name}")
end
<<?", name::binary, ?">>
end
defp assemble(list) do
list
|> List.flatten
|> Enum.join(" ")
end
defp if_do(condition, value) do
if condition, do: value, else: []
end
defp escape_string(value) when is_binary(value) do
:binary.replace(value, "'", "''", [:global])
end
defp ecto_to_db({:array, t}), do: ecto_to_db(t) <> "[]"
defp ecto_to_db(:id), do: "integer"
defp ecto_to_db(:binary_id), do: "uuid"
defp ecto_to_db(:string), do: "varchar"
defp ecto_to_db(:datetime), do: "timestamp"
defp ecto_to_db(:binary), do: "bytea"
defp ecto_to_db(:map), do: "jsonb"
defp ecto_to_db(other), do: Atom.to_string(other)
defp error!(nil, message) do
raise ArgumentError, message
end
defp error!(query, message) do
raise Ecto.QueryError, query: query, message: message
end
end
end
| 34.198007 | 135 | 0.590874 |
795c0df4c7f1d6c1f08d8970a168cf1f7ab6f16e | 385 | ex | Elixir | lib/waveschainex/consensus.ex | cyberpunk-ventures/waveschainex | a75190fc4b61f6351abc08f894310bcc05630251 | [
"MIT"
] | 2 | 2018-07-27T13:13:01.000Z | 2018-07-29T00:07:08.000Z | lib/waveschainex/consensus.ex | cyberpunk-ventures/waveschainex | a75190fc4b61f6351abc08f894310bcc05630251 | [
"MIT"
] | null | null | null | lib/waveschainex/consensus.ex | cyberpunk-ventures/waveschainex | a75190fc4b61f6351abc08f894310bcc05630251 | [
"MIT"
] | null | null | null | defmodule Waveschainex.Consensus do
use Tesla, docs: false, only: ~w(get)a
@doc """
Shows which consensus algo being using
## Parameters
- client : Tesla client
- opts: Optional parameters
"""
@spec algo(Tesla.Env.client(), keyword()) :: {:ok, Tesla.Env.t()} | {:error, Tesla.Env.t()}
def algo(client, _opts \\ []) do
get(client, "/consensus/algo")
end
end
| 21.388889 | 93 | 0.636364 |
795c972f7ceeb262e79fe7668812a5ee9bb843b3 | 4,503 | ex | Elixir | lib/game/command/give.ex | jgsmith/ex_venture | 546adaa8fe80d45a72fde6de8d8d6906902c12d4 | [
"MIT"
] | 2 | 2019-05-14T11:36:44.000Z | 2020-07-01T08:54:04.000Z | lib/game/command/give.ex | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | null | null | null | lib/game/command/give.ex | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | 1 | 2021-01-29T14:12:40.000Z | 2021-01-29T14:12:40.000Z | defmodule Game.Command.Give do
@moduledoc """
The "give" command
"""
use Game.Command
use Game.Currency
use Game.Zone
import Game.Room.Helpers, only: [find_character: 2]
alias Game.Character
alias Game.Events.CurrencyReceived
alias Game.Events.ItemReceived
alias Game.Format
alias Game.Item
alias Game.Items
alias Game.Player
commands(["give"], parse: false)
@impl Game.Command
def help(:topic), do: "Give"
def help(:short), do: "Give items to players"
def help(:full) do
"""
#{help(:short)}. Give an item to a player or NPC in your room.
[ ] > {command}give potion to guard{/command}
"""
end
@impl true
def parse(command, _context), do: parse(command)
@impl Game.Command
@doc """
Parse the command into arguments
iex> Game.Command.Give.parse("give potion to guard")
{"potion", :to, "guard"}
iex> Game.Command.Give.parse("give potion guard")
{:error, :bad_parse, "give potion guard"}
iex> Game.Command.Give.parse("give extra")
{:error, :bad_parse, "give extra"}
iex> Game.Command.Give.parse("unknown hi")
{:error, :bad_parse, "unknown hi"}
"""
@spec parse(String.t()) :: {atom}
def parse(command)
def parse("give " <> item_to_character), do: _parse_give_command(item_to_character)
@doc """
Handle the common parsing code for an item name and then the character
"""
@spec _parse_give_command(String.t()) :: :ok
def _parse_give_command(string) do
case Regex.run(~r/(?<item>.+) to (?<character>.+)/i, string, capture: :all) do
nil ->
{:error, :bad_parse, "give " <> string}
[_string, item_name, character_name] ->
{String.trim(item_name), :to, character_name}
end
end
@impl Game.Command
@doc """
Give items to another character
"""
def run(command, state)
def run({item_name, :to, character_name}, state = %{save: save}) do
{:ok, room} = Environment.look(save.room_id)
case find_item_or_currency(state.save, item_name) do
{:error, :not_found} ->
message = "\"#{item_name}\" could not be found."
state |> Socket.echo(message)
{:ok, instance, item} ->
state |> maybe_give_to_character(room, instance, item, character_name)
end
end
defp find_item_or_currency(save, item_name) do
case Regex.match?(~r/^\d+ #{@currency}$/, item_name) do
false ->
find_item(save, item_name)
true ->
[currency | _] = String.split(item_name, " ")
{:ok, String.to_integer(currency), :currency}
end
end
defp find_item(save, item_name) do
items = Items.items_keep_instance(save.items)
item =
Enum.find(items, fn {_instance, item} ->
Item.matches_lookup?(item, item_name)
end)
case item do
nil ->
{:error, :not_found}
{instance, item} ->
{:ok, instance, item}
end
end
defp maybe_give_to_character(state, room, instance, item, character_name) do
case find_character(room, character_name) do
{:error, :not_found} ->
message = "\"#{character_name}\" could not be found."
state |> Socket.echo(message)
{:ok, character} ->
send_item_to_character(state, instance, item, character)
end
end
defp send_item_to_character(state = %{save: save}, currency, :currency, character) do
case save.currency >= currency do
false ->
name = Format.name(character)
message = "You do not have enough #{currency()} to give to #{name}."
state |> Socket.echo(message)
true ->
name = Format.name(character)
message = "Gave #{Format.currency(currency)} to #{name}."
Socket.echo(state, message)
event = %CurrencyReceived{character: Character.to_simple(state.character), amount: currency}
Character.notify(character, event)
state = Player.update_save(state, %{save | currency: save.currency - currency})
{:update, state}
end
end
defp send_item_to_character(state = %{save: save}, instance, item, character) do
item = Format.item_name(item)
name = Format.name(character)
message = "Gave #{item} to #{name}."
state |> Socket.echo(message)
event = %ItemReceived{character: Character.to_simple(state.character), instance: instance}
Character.notify(character, event)
items = List.delete(save.items, instance)
state = Player.update_save(state, %{save | items: items})
{:update, state}
end
end
| 26.964072 | 100 | 0.636465 |
795cafb3c14808ae87bb4ac92ac636cd541f9e79 | 219 | exs | Elixir | samples/phoenix_wobserver/test/web/controllers/page_controller_test.exs | IanLuites/wobserver-elixirconf-2017 | 86a56a392a5877d2d9a51dc7fbd7e0d8b576c711 | [
"MIT"
] | 11 | 2017-05-05T12:28:35.000Z | 2020-02-26T09:16:10.000Z | samples/phoenix_wobserver/test/web/controllers/page_controller_test.exs | IanLuites/wobserver-elixirconf-2017 | 86a56a392a5877d2d9a51dc7fbd7e0d8b576c711 | [
"MIT"
] | null | null | null | samples/phoenix_wobserver/test/web/controllers/page_controller_test.exs | IanLuites/wobserver-elixirconf-2017 | 86a56a392a5877d2d9a51dc7fbd7e0d8b576c711 | [
"MIT"
] | null | null | null | defmodule PhoenixWobserver.Web.PageControllerTest do
use PhoenixWobserver.Web.ConnCase
test "GET /", %{conn: conn} do
conn = get conn, "/"
assert html_response(conn, 200) =~ "Welcome to Phoenix!"
end
end
| 24.333333 | 60 | 0.703196 |
795cd31c125b5c22b69c2a0cde5f9adcc89ed49f | 87 | exs | Elixir | config/dev.exs | Around25/bitcoin_rpc | 22fcf139b300dbf08b5a16d327a09ea99ba4ca29 | [
"MIT"
] | 1 | 2020-11-23T16:22:44.000Z | 2020-11-23T16:22:44.000Z | config/dev.exs | Around25/bitcoin_rpc | 22fcf139b300dbf08b5a16d327a09ea99ba4ca29 | [
"MIT"
] | 1 | 2018-05-10T07:55:56.000Z | 2018-05-10T07:55:56.000Z | config/dev.exs | Around25/bitcoin_rpc | 22fcf139b300dbf08b5a16d327a09ea99ba4ca29 | [
"MIT"
] | null | null | null | use Mix.Config
config :logger, :console, format: "[$level] $message\n", level: :debug
| 21.75 | 70 | 0.689655 |
795cd6c7e282e2a9653944179b5d56bb2381e766 | 4,408 | ex | Elixir | clients/you_tube/lib/google_api/you_tube/v3/api/sponsors.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/api/sponsors.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/api/sponsors.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.YouTube.V3.Api.Sponsors do
@moduledoc """
API calls for all endpoints tagged `Sponsors`.
"""
alias GoogleApi.YouTube.V3.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Retrieves a list of sponsors that match the request criteria for a channel.
## Parameters
* `connection` (*type:* `GoogleApi.YouTube.V3.Connection.t`) - Connection to server
* `part` (*type:* `list(String.t)`) - The *part* parameter specifies the sponsor resource parts that the API response will include. Supported values are id and snippet.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:filter` (*type:* `String.t`) - Parameter that specifies which channel sponsors to return.
* `:maxResults` (*type:* `integer()`) - The *maxResults* parameter specifies the maximum number of items that should be returned in the result set.
* `:pageToken` (*type:* `String.t`) - The *pageToken* parameter identifies a specific page in the result set that should be returned. In an API response, the nextPageToken and prevPageToken properties identify other pages that could be retrieved.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.YouTube.V3.Model.SponsorListResponse{}}` on success
* `{:error, info}` on failure
"""
@spec youtube_sponsors_list(Tesla.Env.client(), list(String.t()), keyword(), keyword()) ::
{:ok, GoogleApi.YouTube.V3.Model.SponsorListResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def youtube_sponsors_list(connection, part, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:filter => :query,
:maxResults => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/youtube/v3/sponsors", %{})
|> Request.add_param(:query, :part, part)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.YouTube.V3.Model.SponsorListResponse{}])
end
end
| 47.913043 | 254 | 0.659936 |
795cf96ae759468e81b7ba869a421d0805a2cd87 | 2,254 | ex | Elixir | lib/codes/codes_f20.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_f20.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_f20.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | defmodule IcdCode.ICDCode.Codes_F20 do
alias IcdCode.ICDCode
def _F200 do
%ICDCode{full_code: "F200",
category_code: "F20",
short_code: "0",
full_name: "Paranoid schizophrenia",
short_name: "Paranoid schizophrenia",
category_name: "Paranoid schizophrenia"
}
end
def _F201 do
%ICDCode{full_code: "F201",
category_code: "F20",
short_code: "1",
full_name: "Disorganized schizophrenia",
short_name: "Disorganized schizophrenia",
category_name: "Disorganized schizophrenia"
}
end
def _F202 do
%ICDCode{full_code: "F202",
category_code: "F20",
short_code: "2",
full_name: "Catatonic schizophrenia",
short_name: "Catatonic schizophrenia",
category_name: "Catatonic schizophrenia"
}
end
def _F203 do
%ICDCode{full_code: "F203",
category_code: "F20",
short_code: "3",
full_name: "Undifferentiated schizophrenia",
short_name: "Undifferentiated schizophrenia",
category_name: "Undifferentiated schizophrenia"
}
end
def _F205 do
%ICDCode{full_code: "F205",
category_code: "F20",
short_code: "5",
full_name: "Residual schizophrenia",
short_name: "Residual schizophrenia",
category_name: "Residual schizophrenia"
}
end
def _F2081 do
%ICDCode{full_code: "F2081",
category_code: "F20",
short_code: "81",
full_name: "Schizophreniform disorder",
short_name: "Schizophreniform disorder",
category_name: "Schizophreniform disorder"
}
end
def _F2089 do
%ICDCode{full_code: "F2089",
category_code: "F20",
short_code: "89",
full_name: "Other schizophrenia",
short_name: "Other schizophrenia",
category_name: "Other schizophrenia"
}
end
def _F209 do
%ICDCode{full_code: "F209",
category_code: "F20",
short_code: "9",
full_name: "Schizophrenia, unspecified",
short_name: "Schizophrenia, unspecified",
category_name: "Schizophrenia, unspecified"
}
end
end
| 28.531646 | 57 | 0.598935 |
795cfb827e72c3355057f552402ef09ca65ad128 | 134 | exs | Elixir | docs.exs | vkatsuba/hex_core | 455054d0952d63baaafd7b97e38af2d40e81881e | [
"Apache-2.0"
] | null | null | null | docs.exs | vkatsuba/hex_core | 455054d0952d63baaafd7b97e38af2d40e81881e | [
"Apache-2.0"
] | null | null | null | docs.exs | vkatsuba/hex_core | 455054d0952d63baaafd7b97e38af2d40e81881e | [
"Apache-2.0"
] | null | null | null | [
source_url: "https://github.com/hexpm/hex_core",
extras: ["README.md", "CHANGELOG.md"],
main: "readme",
proglang: :erlang
]
| 19.142857 | 50 | 0.641791 |
795cfdefd59241a57abde99d57a9ca64e51df667 | 2,322 | ex | Elixir | clients/sheets/lib/google_api/sheets/v4/model/developer_metadata_location.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/sheets/lib/google_api/sheets/v4/model/developer_metadata_location.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/sheets/lib/google_api/sheets/v4/model/developer_metadata_location.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Sheets.V4.Model.DeveloperMetadataLocation do
@moduledoc """
A location where metadata may be associated in a spreadsheet.
## Attributes
* `dimensionRange` (*type:* `GoogleApi.Sheets.V4.Model.DimensionRange.t`, *default:* `nil`) - Represents the row or column when metadata is associated with
a dimension. The specified DimensionRange must represent a single row
or column; it cannot be unbounded or span multiple rows or columns.
* `locationType` (*type:* `String.t`, *default:* `nil`) - The type of location this object represents. This field is read-only.
* `sheetId` (*type:* `integer()`, *default:* `nil`) - The ID of the sheet when metadata is associated with an entire sheet.
* `spreadsheet` (*type:* `boolean()`, *default:* `nil`) - True when metadata is associated with an entire spreadsheet.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:dimensionRange => GoogleApi.Sheets.V4.Model.DimensionRange.t(),
:locationType => String.t(),
:sheetId => integer(),
:spreadsheet => boolean()
}
field(:dimensionRange, as: GoogleApi.Sheets.V4.Model.DimensionRange)
field(:locationType)
field(:sheetId)
field(:spreadsheet)
end
defimpl Poison.Decoder, for: GoogleApi.Sheets.V4.Model.DeveloperMetadataLocation do
def decode(value, options) do
GoogleApi.Sheets.V4.Model.DeveloperMetadataLocation.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Sheets.V4.Model.DeveloperMetadataLocation do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.034483 | 159 | 0.726529 |
795d18a5395f9425413d72dc9bf8aacbe376bf14 | 20,043 | exs | Elixir | lib/mix/test/mix/dep_test.exs | xtian/elixir | c680eb1a3992309c272e8f808e15990ea5318d6e | [
"Apache-2.0"
] | null | null | null | lib/mix/test/mix/dep_test.exs | xtian/elixir | c680eb1a3992309c272e8f808e15990ea5318d6e | [
"Apache-2.0"
] | null | null | null | lib/mix/test/mix/dep_test.exs | xtian/elixir | c680eb1a3992309c272e8f808e15990ea5318d6e | [
"Apache-2.0"
] | null | null | null | Code.require_file "../test_helper.exs", __DIR__
defmodule Mix.DepTest do
use MixTest.Case
defmodule DepsApp do
def project do
[deps: [
{:ok, "0.1.0", path: "deps/ok"},
{:invalidvsn, "0.2.0", path: "deps/invalidvsn"},
{:invalidapp, "0.1.0", path: "deps/invalidapp"},
{:noappfile, "0.1.0", path: "deps/noappfile"},
{:uncloned, git: "https://github.com/elixir-lang/uncloned.git"},
{:optional, git: "https://github.com/elixir-lang/optional.git", optional: true}
]]
end
end
defmodule ProcessDepsApp do
def project do
[app: :process_deps_app, deps: Process.get(:mix_deps)]
end
end
defp with_deps(deps, fun) do
Process.put(:mix_deps, deps)
Mix.Project.push ProcessDepsApp
fun.()
after
Mix.Project.pop
end
defp assert_wrong_dependency(deps) do
with_deps deps, fn ->
assert_raise Mix.Error, ~r"Dependency specified in the wrong format", fn ->
Mix.Dep.loaded([])
end
end
end
test "respects the MIX_NO_DEPS flag" do
Mix.Project.push DepsApp
in_fixture "deps_status", fn ->
deps = Mix.Dep.cached()
assert length(deps) == 6
System.put_env("MIX_NO_DEPS", "1")
deps = Mix.Dep.cached()
assert length(deps) == 0
end
after
System.delete_env("MIX_NO_DEPS")
end
test "extracts all dependencies from the given project" do
Mix.Project.push DepsApp
in_fixture "deps_status", fn ->
deps = Mix.Dep.loaded([])
assert length(deps) == 6
assert Enum.find deps, &match?(%Mix.Dep{app: :ok, status: {:ok, _}}, &1)
assert Enum.find deps, &match?(%Mix.Dep{app: :invalidvsn, status: {:invalidvsn, :ok}}, &1)
assert Enum.find deps, &match?(%Mix.Dep{app: :invalidapp, status: {:invalidapp, _}}, &1)
assert Enum.find deps, &match?(%Mix.Dep{app: :noappfile, status: {:noappfile, _}}, &1)
assert Enum.find deps, &match?(%Mix.Dep{app: :uncloned, status: {:unavailable, _}}, &1)
assert Enum.find deps, &match?(%Mix.Dep{app: :optional, status: {:unavailable, _}}, &1)
end
end
test "extracts all dependencies paths from the given project" do
Mix.Project.push DepsApp
in_fixture "deps_status", fn ->
paths = Mix.Project.deps_paths
assert map_size(paths) == 6
assert paths[:ok] =~ "deps/ok"
assert paths[:uncloned] =~ "deps/uncloned"
end
end
test "fails on invalid dependencies" do
assert_wrong_dependency [{:ok}]
assert_wrong_dependency [{:ok, nil}]
assert_wrong_dependency [{:ok, nil, []}]
end
test "use requirements for dependencies" do
with_deps [{:ok, "~> 0.1", path: "deps/ok"}], fn ->
in_fixture "deps_status", fn ->
deps = Mix.Dep.loaded([])
assert Enum.find deps, &match?(%Mix.Dep{app: :ok, status: {:ok, _}}, &1)
end
end
end
test "raises when no SCM is specified" do
with_deps [{:ok, "~> 0.1", not_really: :ok}], fn ->
in_fixture "deps_status", fn ->
send self(), {:mix_shell_input, :yes?, false}
msg = "Could not find an SCM for dependency :ok from Mix.DepTest.ProcessDepsApp"
assert_raise Mix.Error, msg, fn -> Mix.Dep.loaded([]) end
end
end
end
test "does not set the manager before the dependency was loaded" do
# It is important to not eagerly set the manager because the dependency
# needs to be loaded (i.e. available in the filesystem) in order to get
# the proper manager.
Mix.Project.push DepsApp
{_, true, _} =
Mix.Dep.Converger.converge(false, [], nil, fn dep, acc, lock ->
assert is_nil(dep.manager)
{dep, acc or true, lock}
end)
end
test "raises on invalid deps req" do
with_deps [{:ok, "+- 0.1.0", path: "deps/ok"}], fn ->
in_fixture "deps_status", fn ->
assert_raise Mix.Error, ~r"Invalid requirement", fn ->
Mix.Dep.loaded([])
end
end
end
end
test "nested deps come first" do
with_deps [{:deps_repo, "0.1.0", path: "custom/deps_repo"}], fn ->
in_fixture "deps_status", fn ->
assert Enum.map(Mix.Dep.loaded([]), &(&1.app)) == [:git_repo, :deps_repo]
end
end
end
test "nested optional deps are never added" do
with_deps [{:deps_repo, "0.1.0", path: "custom/deps_repo"}], fn ->
in_fixture "deps_status", fn ->
File.write! "custom/deps_repo/mix.exs", """
defmodule DepsRepo do
use Mix.Project
def project do
[app: :deps_repo,
version: "0.1.0",
deps: [{:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo"), optional: true}]]
end
end
"""
assert Enum.map(Mix.Dep.loaded([]), &(&1.app)) == [:deps_repo]
end
end
end
test "nested deps with convergence" do
deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo"},
{:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo")}]
with_deps deps, fn ->
in_fixture "deps_status", fn ->
assert Enum.map(Mix.Dep.loaded([]), &(&1.app)) == [:git_repo, :deps_repo]
end
end
end
test "nested deps with convergence and managers" do
Process.put(:custom_deps_git_repo_opts, [manager: :make])
deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo", manager: :rebar},
{:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo")}]
with_deps deps, fn ->
in_fixture "deps_status", fn ->
[dep1, dep2] = Mix.Dep.loaded([])
assert dep1.manager == nil
assert dep2.manager == :rebar
end
end
end
test "nested deps with convergence and optional dependencies" do
deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo"},
{:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo")}]
with_deps deps, fn ->
in_fixture "deps_status", fn ->
File.write! "custom/deps_repo/mix.exs", """
defmodule DepsRepo do
use Mix.Project
def project do
[app: :deps_repo,
version: "0.1.0",
deps: [{:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo"), optional: true}]]
end
end
"""
assert Enum.map(Mix.Dep.loaded([]), &(&1.app)) == [:git_repo, :deps_repo]
end
end
end
test "nested deps with optional dependencies and cousin conflict" do
with_deps [{:deps_repo1, "0.1.0", path: "custom/deps_repo1"},
{:deps_repo2, "0.1.0", path: "custom/deps_repo2"}], fn ->
in_fixture "deps_status", fn ->
File.mkdir_p!("custom/deps_repo1")
File.write! "custom/deps_repo1/mix.exs", """
defmodule DepsRepo1 do
use Mix.Project
def project do
[app: :deps_repo1,
version: "0.1.0",
deps: [{:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo"), optional: true}]]
end
end
"""
File.mkdir_p!("custom/deps_repo2")
File.write! "custom/deps_repo2/mix.exs", """
defmodule DepsRepo2 do
use Mix.Project
def project do
[app: :deps_repo2,
version: "0.1.0",
deps: [{:git_repo, "0.2.0", path: "somewhere"}]]
end
end
"""
Mix.Tasks.Deps.run([])
assert_received {:mix_shell, :info, ["* git_repo" <> _]}
assert_received {:mix_shell, :info, [msg]}
assert msg =~ "different specs were given for the git_repo"
end
end
end
## Remove converger
defmodule IdentityRemoteConverger do
@behaviour Mix.RemoteConverger
def remote?(%Mix.Dep{app: :deps_repo}), do: false
def remote?(%Mix.Dep{}), do: true
def deps(_dep, _lock), do: []
def post_converge, do: :ok
def converge(deps, lock) do
Process.put(:remote_converger, deps)
lock
end
end
test "remote converger" do
deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo"},
{:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo")}]
with_deps deps, fn ->
Mix.RemoteConverger.register(IdentityRemoteConverger)
in_fixture "deps_status", fn ->
Mix.Tasks.Deps.Get.run([])
message = "* Getting git_repo (#{fixture_path("git_repo")})"
assert_received {:mix_shell, :info, [^message]}
assert Process.get(:remote_converger)
end
end
after
Mix.RemoteConverger.register(nil)
end
test "pass dependencies to remote converger in defined order" do
deps = [
{:ok, "0.1.0", path: "deps/ok"},
{:invalidvsn, "0.2.0", path: "deps/invalidvsn"},
{:deps_repo, "0.1.0", path: "custom/deps_repo"},
{:invalidapp, "0.1.0", path: "deps/invalidapp"},
{:noappfile, "0.1.0", path: "deps/noappfile"}
]
with_deps deps, fn ->
Mix.RemoteConverger.register(IdentityRemoteConverger)
in_fixture "deps_status", fn ->
Mix.Tasks.Deps.Get.run([])
deps = Process.get(:remote_converger) |> Enum.map(& &1.app)
assert deps == [:ok, :invalidvsn, :deps_repo, :invalidapp, :noappfile, :git_repo]
end
end
after
Mix.RemoteConverger.register(nil)
end
defmodule RaiseRemoteConverger do
@behaviour Mix.RemoteConverger
def remote?(_app), do: false
def deps(_dep, _lock), do: :ok
def post_converge, do: :ok
def converge(_deps, lock) do
Process.put(:remote_converger, true)
lock
end
end
test "remote converger is not invoked if deps diverge" do
deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo"},
{:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo"), only: :test}]
with_deps deps, fn ->
Mix.RemoteConverger.register(RaiseRemoteConverger)
in_fixture "deps_status", fn ->
assert_raise Mix.Error, fn ->
Mix.Tasks.Deps.Get.run([])
end
assert_received {:mix_shell, :error, ["Dependencies have diverged:"]}
refute Process.get(:remote_converger)
end
end
after
Mix.RemoteConverger.register(nil)
end
## Only handling
test "only extract deps matching environment" do
with_deps [{:foo, github: "elixir-lang/foo"},
{:bar, github: "elixir-lang/bar", only: :other_env}], fn ->
in_fixture "deps_status", fn ->
deps = Mix.Dep.loaded([env: :other_env])
assert length(deps) == 2
deps = Mix.Dep.loaded([])
assert length(deps) == 2
assert [dep] = Mix.Dep.loaded([env: :prod])
assert dep.app == :foo
end
end
end
test "only fetch parent deps matching specified env" do
with_deps [{:only, github: "elixir-lang/only", only: [:dev]}], fn ->
in_fixture "deps_status", fn ->
Mix.Tasks.Deps.Get.run(["--only", "prod"])
refute_received {:mix_shell, :info, ["* Getting" <> _]}
assert_raise Mix.Error, "Can't continue due to errors on dependencies", fn ->
Mix.Tasks.Deps.Loadpaths.run([])
end
Mix.ProjectStack.clear_cache()
Mix.env(:prod)
Mix.Tasks.Deps.Loadpaths.run([])
end
end
end
test "nested deps selects only prod dependencies" do
Process.put(:custom_deps_git_repo_opts, [only: :test])
deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo"}]
with_deps deps, fn ->
in_fixture "deps_status", fn ->
loaded = Mix.Dep.loaded([])
assert [:deps_repo] = Enum.map(loaded, &(&1.app))
loaded = Mix.Dep.loaded([env: :test])
assert [:deps_repo] = Enum.map(loaded, &(&1.app))
end
end
end
test "nested deps on only matching" do
# deps_repo wants git_repo for test, git_repo is restricted to only test
# We assert the dependencies match as expected, happens in umbrella apps
Process.put(:custom_deps_git_repo_opts, [only: :test])
# We need to pass env: :test so the child dependency is loaded
# in the first place (otherwise only :prod deps are loaded)
deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo", env: :test},
{:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo"), only: :test}]
with_deps deps, fn ->
in_fixture "deps_status", fn ->
loaded = Mix.Dep.loaded([])
assert [:git_repo, :deps_repo] = Enum.map(loaded, &(&1.app))
assert [unavailable: _, noappfile: _] = Enum.map(loaded, &(&1.status))
loaded = Mix.Dep.loaded([env: :dev])
assert [:deps_repo] = Enum.map(loaded, &(&1.app))
assert [noappfile: _] = Enum.map(loaded, &(&1.status))
loaded = Mix.Dep.loaded([env: :test])
assert [:git_repo, :deps_repo] = Enum.map(loaded, &(&1.app))
assert [unavailable: _, noappfile: _] = Enum.map(loaded, &(&1.status))
end
end
end
test "nested deps on only conflict" do
# deps_repo wants all git_repo, git_repo is restricted to only test
deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo"},
{:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo"), only: :test}]
with_deps deps, fn ->
in_fixture "deps_status", fn ->
loaded = Mix.Dep.loaded([])
assert [:git_repo, :deps_repo] = Enum.map(loaded, &(&1.app))
assert [divergedonly: _, noappfile: _] = Enum.map(loaded, &(&1.status))
loaded = Mix.Dep.loaded([env: :dev])
assert [:git_repo, :deps_repo] = Enum.map(loaded, &(&1.app))
assert [divergedonly: _, noappfile: _] = Enum.map(loaded, &(&1.status))
loaded = Mix.Dep.loaded([env: :test])
assert [:git_repo, :deps_repo] = Enum.map(loaded, &(&1.app))
assert [divergedonly: _, noappfile: _] = Enum.map(loaded, &(&1.status))
Mix.Tasks.Deps.run([])
assert_received {:mix_shell, :info, ["* git_repo" <> _]}
assert_received {:mix_shell, :info, [msg]}
assert msg =~ "Remove the :only restriction from your dep"
end
end
end
test "nested deps on only conflict does not happen with optional deps" do
Process.put(:custom_deps_git_repo_opts, [optional: true])
# deps_repo wants all git_repo, git_repo is restricted to only test
deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo"},
{:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo"), only: :test}]
with_deps deps, fn ->
in_fixture "deps_status", fn ->
loaded = Mix.Dep.loaded([])
assert [:git_repo, :deps_repo] = Enum.map(loaded, &(&1.app))
assert [unavailable: _, noappfile: _] = Enum.map(loaded, &(&1.status))
loaded = Mix.Dep.loaded([env: :dev])
assert [:deps_repo] = Enum.map(loaded, &(&1.app))
assert [noappfile: _] = Enum.map(loaded, &(&1.status))
loaded = Mix.Dep.loaded([env: :test])
assert [:git_repo, :deps_repo] = Enum.map(loaded, &(&1.app))
assert [unavailable: _, noappfile: _] = Enum.map(loaded, &(&1.status))
end
end
end
test "nested deps with valid only subset" do
# deps_repo wants git_repo for prod, git_repo is restricted to only prod and test
deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo", only: :prod},
{:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo"), only: [:prod, :test]}]
with_deps deps, fn ->
in_fixture "deps_status", fn ->
loaded = Mix.Dep.loaded([])
assert [:git_repo, :deps_repo] = Enum.map(loaded, &(&1.app))
assert [unavailable: _, noappfile: _] = Enum.map(loaded, &(&1.status))
loaded = Mix.Dep.loaded([env: :dev])
assert [] = Enum.map(loaded, &(&1.app))
loaded = Mix.Dep.loaded([env: :test])
assert [:git_repo] = Enum.map(loaded, &(&1.app))
assert [unavailable: _] = Enum.map(loaded, &(&1.status))
loaded = Mix.Dep.loaded([env: :prod])
assert [:git_repo, :deps_repo] = Enum.map(loaded, &(&1.app))
assert [unavailable: _, noappfile: _] = Enum.map(loaded, &(&1.status))
end
end
end
test "nested deps with invalid only subset" do
# deps_repo wants git_repo for dev, git_repo is restricted to only test
deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo", only: :dev},
{:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo"), only: [:test]}]
with_deps deps, fn ->
in_fixture "deps_status", fn ->
loaded = Mix.Dep.loaded([])
assert [:git_repo, :deps_repo] = Enum.map(loaded, &(&1.app))
assert [divergedonly: _, noappfile: _] = Enum.map(loaded, &(&1.status))
loaded = Mix.Dep.loaded([env: :dev])
assert [:git_repo, :deps_repo] = Enum.map(loaded, &(&1.app))
assert [divergedonly: _, noappfile: _] = Enum.map(loaded, &(&1.status))
loaded = Mix.Dep.loaded([env: :test])
assert [:git_repo] = Enum.map(loaded, &(&1.app))
assert [unavailable: _] = Enum.map(loaded, &(&1.status))
Mix.Tasks.Deps.run([])
assert_received {:mix_shell, :info, ["* git_repo" <> _]}
assert_received {:mix_shell, :info, [msg]}
assert msg =~ "Ensure you specify at least the same environments in :only in your dep"
end
end
end
test "nested deps with valid only in both parent and child" do
Process.put(:custom_deps_git_repo_opts, [only: :test])
# deps_repo has environment set to test so it loads the deps_git_repo set to test too
deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo", env: :test, only: [:dev, :test]},
{:git_repo, "0.1.0", git: MixTest.Case.fixture_path("git_repo"), only: :test}]
with_deps deps, fn ->
in_fixture "deps_status", fn ->
loaded = Mix.Dep.loaded([])
assert [:git_repo, :deps_repo] = Enum.map(loaded, &(&1.app))
assert [unavailable: _, noappfile: _] = Enum.map(loaded, &(&1.status))
loaded = Mix.Dep.loaded([env: :dev])
assert [:deps_repo] = Enum.map(loaded, &(&1.app))
assert [noappfile: _] = Enum.map(loaded, &(&1.status))
loaded = Mix.Dep.loaded([env: :test])
assert [:git_repo, :deps_repo] = Enum.map(loaded, &(&1.app))
assert [unavailable: _, noappfile: _] = Enum.map(loaded, &(&1.status))
loaded = Mix.Dep.loaded([env: :prod])
assert [] = Enum.map(loaded, &(&1.app))
end
end
end
test "nested deps converge and diverge when only is not in_upper" do
loaded_only = fn deps ->
with_deps deps, fn ->
in_fixture "deps_status", fn ->
File.mkdir_p! "custom/other_repo"
File.write! "custom/other_repo/mix.exs", """
defmodule OtherRepo do
use Mix.Project
def project do
[app: :deps_repo,
version: "0.1.0",
deps: [{:git_repo, "0.1.0", git: MixTest.Case.fixture_path("git_repo")}]]
end
end
"""
Mix.ProjectStack.clear_cache
loaded = Mix.Dep.loaded([])
assert [:git_repo, _, _] = Enum.map(loaded, &(&1.app))
hd(loaded).opts[:only]
end
end
end
deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo", only: :prod},
{:other_repo, "0.1.0", path: "custom/other_repo", only: :test}]
assert loaded_only.(deps) == [:test, :prod]
deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo"},
{:other_repo, "0.1.0", path: "custom/other_repo", only: :test}]
refute loaded_only.(deps)
deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo", only: :prod},
{:other_repo, "0.1.0", path: "custom/other_repo"}]
refute loaded_only.(deps)
deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo"},
{:other_repo, "0.1.0", path: "custom/other_repo"}]
refute loaded_only.(deps)
Process.put(:custom_deps_git_repo_opts, [optional: true])
deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo", only: :prod},
{:other_repo, "0.1.0", path: "custom/other_repo", only: :test}]
assert loaded_only.(deps) == :test
end
end
| 33.856419 | 102 | 0.591279 |
795d2fd54c8da3589f9b02010ba1ca0107d6b415 | 963 | ex | Elixir | clients/cloud_shell/lib/google_api/cloud_shell/v1/request_builder.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/cloud_shell/lib/google_api/cloud_shell/v1/request_builder.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/cloud_shell/lib/google_api/cloud_shell/v1/request_builder.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2018 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.CloudShell.V1.RequestBuilder do
@moduledoc """
Helper functions for building Tesla requests.
This module is no longer used. Please use GoogleApi.Gax.Request instead.
"""
end
| 37.038462 | 77 | 0.764278 |
795d4519f55f018d44790dc472c7c37b19d838cb | 17,295 | ex | Elixir | lib/rihanna/job.ex | zven21/rihanna | 5c78ef3aaecf1624bbef1ba7cf87f224ff98e416 | [
"MIT"
] | null | null | null | lib/rihanna/job.ex | zven21/rihanna | 5c78ef3aaecf1624bbef1ba7cf87f224ff98e416 | [
"MIT"
] | null | null | null | lib/rihanna/job.ex | zven21/rihanna | 5c78ef3aaecf1624bbef1ba7cf87f224ff98e416 | [
"MIT"
] | null | null | null | defmodule Rihanna.Job do
require Logger
@type result :: any
@type reason :: any
@type arg :: any
@type t :: %__MODULE__{}
@callback perform(arg :: any) :: :ok | {:ok, result} | :error | {:error, reason}
@callback after_error({:error, reason} | :error | Exception.t(), arg) :: any()
@callback retry_at({:error, reason} | :error | Exception.t(), arg, pos_integer) ::
{:ok, DateTime.t()} | :noop
@callback priority() :: pos_integer()
@optional_callbacks after_error: 2, retry_at: 3, priority: 0
@moduledoc """
A behaviour for Rihanna jobs.
You must implement `c:Rihanna.Job.perform/1` in your job, and it must return
one of the following values:
- `:ok`
- `{:ok, result}`
- `:error`
- `{:error, reason}`
You can define your job like the example below:
```
defmodule MyApp.MyJob do
@behaviour Rihanna.Job
# NOTE: `perform/1` is a required callback. It takes exactly one argument. To
# pass multiple arguments, wrap them in a list and destructure in the
# function head as in this example
def perform([arg1, arg2]) do
success? = do_some_work(arg1, arg2)
if success? do
# job completed successfully
:ok
else
# job execution failed
{:error, :failed}
end
end
end
```
This behaviour allows you to tailor what you'd like to happen after your
job either fails, or raises an exception.
You can define an `after_error/2` method which will run before the job is
placed on the failed job queue.
If you don't define this callback, it will add it to the failed job queue
without running anything.
If you wish to re-enqueue a job to run at a different time, you can simply
return `{:reenqueue, due_at}` where `due_at` is some DateTime timestamp.
```
def after_error(failure_reason, args) do
notify_someone(__MODULE__, failure_reason, args)
end
```
You can define a `retry_at/3` callback function. Returning `{:ok, due_at}`
will schedule the job to run again at that time. Returning :noop (the default)
proceeds with normal job failure behavior. The value of `attempts` counts up
from 0, to allow backing off `due_at` to be calculated.
```
def retry_at(_failure_reason, _args, attempts) when attempts < 3 do
due_at = DateTime.add(DateTime.utc_now(), attempts * 5, :second)
{:ok, due_at}
end
def retry_at(_failure_reason, _args, _attempts) do
warn("Job failed after 3 attempts")
:noop
end
```
You can define a `priority/0` function which will be called if no priority
is set when a job is enqueued. It should return a single integer. A priority
of 1 will be the highest priority, with larger numbers being run after.
If you don't define this callback it will default to a priority of 50, in an
attempt to have it run at a lower priority.
```
def priority(), do: 2
```
"""
@fields [
:id,
:term,
:enqueued_at,
:due_at,
:failed_at,
:fail_reason,
:rihanna_internal_meta,
:priority
]
defstruct @fields
@sql_fields @fields
|> Enum.map(&to_string/1)
|> Enum.join(", ")
@select_fields_for_recursive_lock_query @fields
|> Enum.map(fn field ->
"(j).#{field}"
end)
|> Enum.join(", ")
@default_priority 50
@doc false
def start(job) do
GenServer.call(Rihanna.JobManager, job)
end
@doc false
def enqueue(term, opts \\ []) do
serialized_term = :erlang.term_to_binary(term)
# Fetch job module if it is a Rihanna.Job
job_module =
case term do
{m, _args} -> m
_ -> nil
end
priority = opts[:priority] || priority(job_module)
now = DateTime.utc_now()
result =
producer_query(
"""
INSERT INTO "#{table()}" (term, enqueued_at, due_at, priority)
VALUES ($1, $2, $3, $4)
RETURNING #{@sql_fields}
""",
[serialized_term, now, opts[:due_at], priority],
opts
)
case result do
{:ok, %Postgrex.Result{rows: [row]}} ->
job = from_sql(row)
:telemetry.execute([:rihanna, :job, :enqueued], %{}, telemetry_metadata(job))
{:ok, job}
{:error, %Postgrex.Error{postgres: %{pg_code: "42P01"}}} ->
# Undefined table error (e.g. `rihanna_jobs` table missing), warn user
# to create their Rihanna jobs table
Rihanna.Migration.raise_jobs_table_missing!()
{:error, %Postgrex.Error{postgres: %{pg_code: "42703"}}} ->
# Undefined column error (e.g. `due_at` missing), warn user to upgrade
# their Rihanna jobs table
Rihanna.Migration.raise_upgrade_required!()
{:error, err} ->
raise err
end
end
@doc false
def from_sql(rows = [row | _]) when is_list(rows) and is_list(row) do
for row <- rows, do: from_sql(row)
end
@doc false
def from_sql([
id,
serialized_term,
enqueued_at,
due_at,
failed_at,
fail_reason,
rihanna_internal_meta,
priority
]) do
%__MODULE__{
id: id,
term: :erlang.binary_to_term(serialized_term),
enqueued_at: enqueued_at,
due_at: due_at,
failed_at: failed_at,
fail_reason: fail_reason,
rihanna_internal_meta: rihanna_internal_meta,
priority: priority
}
end
@doc false
def from_sql([]), do: []
@doc false
def retry_failed(job_id, opts \\ []) when is_integer(job_id) do
now = DateTime.utc_now()
{:ok, result} =
producer_query(
"""
UPDATE "#{table()}"
SET
failed_at = NULL,
fail_reason = NULL,
enqueued_at = $1
WHERE
failed_at IS NOT NULL AND id = $2
""",
[now, job_id],
opts
)
case result.num_rows do
0 ->
{:error, :job_not_found}
1 ->
{:ok, :retried}
end
end
@doc false
def delete_by(args, opts) do
ids_to_delete =
args
|> filter_term_list(opts)
|> Enum.join(",")
if ids_to_delete != "" do
case producer_query(
"""
DELETE FROM "#{table()}"
WHERE id IN (#{ids_to_delete})
""",
[],
opts
) do
{:ok, %Postgrex.Result{num_rows: 0}} ->
{:error, :job_not_found}
{:ok, %Postgrex.Result{num_rows: n}} ->
:telemetry.execute([:rihanna, :job, :deleted], %{}, %{count: n})
{:ok, :deleted}
error ->
error
end
else
{:error, :job_not_found}
end
end
defp filter_term_list([mod: mod, fun: fun], opts) when not is_nil(mod) and not is_nil(fun) do
Enum.flat_map(retrieve_all_jobs(opts), fn [id, binary] ->
term = :erlang.binary_to_term(binary)
if match?({^mod, ^fun, _}, term) or match?({^mod, ^fun}, term) do
[id]
else
[]
end
end)
end
defp filter_term_list([mod: mod], opts) when not is_nil(mod) do
Enum.flat_map(retrieve_all_jobs(opts), fn [id, binary] ->
term = :erlang.binary_to_term(binary)
if match?({^mod, _, _}, term) or match?({^mod, _}, term) do
[id]
else
[]
end
end)
end
defp filter_term_list([fun: fun], opts) when not is_nil(fun) do
Enum.flat_map(retrieve_all_jobs(opts), fn [id, binary] ->
term = :erlang.binary_to_term(binary)
if match?({_, ^fun, _}, term) or match?({_, ^fun}, term) do
[id]
else
[]
end
end)
end
defp retrieve_all_jobs(opts) do
{:ok, result} =
producer_query(
"""
SELECT id, term
FROM "#{table()}"
""",
[],
opts
)
result.rows
end
def delete(job_id, opts \\ []) do
result =
producer_query(
"""
DELETE FROM "#{table()}"
WHERE
id = $1
RETURNING #{@sql_fields}
""",
[job_id],
opts
)
case result do
{:ok, %Postgrex.Result{rows: [row]}} ->
job = from_sql(row)
:telemetry.execute([:rihanna, :job, :deleted], %{}, telemetry_metadata(job))
{:ok, job}
{:ok, %Postgrex.Result{num_rows: 0}} ->
{:error, :job_not_found}
end
end
@doc false
def lock(pg) when is_pid(pg) do
lock(pg, [])
end
@doc false
def lock(pg, exclude_ids) when is_pid(pg) and is_list(exclude_ids) do
case lock(pg, 1, exclude_ids) do
[job] ->
job
[] ->
nil
end
end
# This query is at the heart of the how the dispatcher pulls jobs from the queue.
#
# It is heavily inspired by a similar query in Que: https://github.com/chanks/que/blob/0.x/lib/que/sql.rb#L5
#
# There are some minor additions:
#
# I could not find any easy way to check if one particular advisory lock is
# already held by the current session, so each dispatcher must pass in a list
# of ids for jobs which are currently already working so those can be excluded.
#
# We also use a FOR UPDATE SKIP LOCKED since this causes the query to skip
# jobs that were completed (and deleted) by another session in the time since
# the table snapshot was taken. In rare cases under high concurrency levels,
# leaving this out can result in double executions.
@doc false
def lock(pg, n) do
lock(pg, n, [])
end
@doc false
def lock(_, 0, _) do
[]
end
@doc false
def lock(pg, n, exclude_ids)
when is_pid(pg) and is_integer(n) and n > 0 and is_list(exclude_ids) do
table = table()
# `due_at` NULLS FIRST ensures `enqueued_at` is respected when `due_at` is unset
lock_jobs = """
WITH RECURSIVE jobs AS (
SELECT #{@select_fields_for_recursive_lock_query}, pg_try_advisory_lock($1::integer, (j).id) AS locked
FROM (
SELECT j
FROM #{table} AS j
WHERE NOT (id = ANY($3))
AND (due_at IS NULL OR due_at <= now())
AND failed_at IS NULL
ORDER BY priority, due_at NULLS FIRST, enqueued_at, j.id
FOR UPDATE OF j SKIP LOCKED
LIMIT 1
) AS t1
UNION ALL (
SELECT #{@select_fields_for_recursive_lock_query}, pg_try_advisory_lock($1::integer, (j).id) AS locked
FROM (
SELECT (
SELECT j
FROM #{table} AS j
WHERE NOT (id = ANY($3))
AND (due_at IS NULL OR due_at <= now())
AND failed_at IS NULL
AND (j.enqueued_at, j.id) > (jobs.enqueued_at, jobs.id)
ORDER BY priority, due_at NULLS FIRST, enqueued_at, j.id
FOR UPDATE OF j SKIP LOCKED
LIMIT 1
) AS j
FROM jobs
WHERE jobs.id IS NOT NULL
LIMIT 1
) AS t1
)
)
SELECT #{@sql_fields}
FROM jobs
WHERE locked
LIMIT $2
"""
%{rows: rows, num_rows: _num_rows} =
Postgrex.query!(pg, lock_jobs, [classid(), n, exclude_ids])
rows
|> Rihanna.Job.from_sql()
|> Enum.map(&track_job_locked/1)
end
defp track_job_locked(job) do
:telemetry.execute([:rihanna, :job, :locked], %{}, telemetry_metadata(job))
# return job for mapping
job
end
@doc false
def mark_successful(pg, %{id: job_id} = job) when is_pid(pg) and is_integer(job_id) do
:telemetry.execute([:rihanna, :job, :succeeded], %{}, telemetry_metadata(job))
try do
job_id
|> delete()
|> case do
{:ok, _job} -> {:ok, 1}
{:error, _message} -> {:ok, 0}
end
after
release_lock(pg, job)
end
end
@doc false
def mark_failed(pg, %{id: job_id} = job, now, fail_reason)
when is_pid(pg) and is_integer(job_id) do
:telemetry.execute([:rihanna, :job, :failed], %{}, telemetry_metadata(job))
%{num_rows: num_rows} =
Postgrex.query!(
pg,
"""
UPDATE "#{table()}"
SET
failed_at = $1,
fail_reason = $2
WHERE
id = $3
""",
[now, fail_reason, job_id]
)
release_lock(pg, job)
{:ok, num_rows}
end
def mark_reenqueued(pg, %{id: job_id} = job, due_at) when is_pid(pg) and is_integer(job_id) do
:telemetry.execute([:rihanna, :job, :reenqueued], %{}, telemetry_metadata(job))
%{num_rows: num_rows} =
Postgrex.query!(
pg,
"""
UPDATE "#{table()}"
SET
due_at = $1
WHERE
id = $2
""",
[due_at, job_id]
)
release_lock(pg, job)
{:ok, num_rows}
end
@doc """
Update attempts and set due_at datetime
"""
def mark_retried(pg, %{id: job_id} = job, due_at) when is_pid(pg) and is_integer(job_id) do
:telemetry.execute([:rihanna, :job, :retried], %{}, telemetry_metadata(job))
%{num_rows: num_rows} =
Postgrex.query!(
pg,
"""
UPDATE "#{table()}"
SET
due_at = $1,
rihanna_internal_meta = jsonb_set(rihanna_internal_meta, '{attempts}', (
COALESCE(rihanna_internal_meta->>'attempts','0')::int + 1
)::text::jsonb)
WHERE
id = $2
""",
[due_at, job_id]
)
release_lock(pg, job)
{:ok, num_rows}
end
@doc """
The name of the jobs table.
"""
@spec table() :: String.t()
def table() do
Rihanna.Config.jobs_table_name()
end
@doc false
def classid() do
Rihanna.Config.pg_advisory_lock_class_id()
end
defp release_lock(pg, %{id: job_id} = job) when is_pid(pg) and is_integer(job_id) do
:telemetry.execute([:rihanna, :job, :released], %{}, telemetry_metadata(job))
%{rows: [[true]]} =
Postgrex.query!(
pg,
"""
SELECT pg_advisory_unlock($1, $2);
""",
[classid(), job_id]
)
end
@doc """
Checks if a job implements `priority` callback and runs it.
A lower value means a higher job priority. Has a default of 50,
a very low priority, to prevent new jobs from running before
higher priority jobs, when no priority is set.
"""
def priority(nil), do: @default_priority
def priority(job_module) do
if :erlang.function_exported(job_module, :priority, 0) do
job_module.priority()
else
@default_priority
end
end
@doc """
Checks whether a job implemented the `after_error` callback and runs it if it
does.
"""
def after_error(job_module, reason, arg) do
if :erlang.function_exported(job_module, :after_error, 2) do
# If they implemented the behaviour, there will only ever be one arg
try do
job_module.after_error(reason, arg)
rescue
exception ->
Logger.warn(
"""
[Rihanna] After error callback failed
Got an unexpected error while trying to run the `after_error` callback.
Check your `#{inspect(job_module)}.after_error/2` callback and make sure it doesn’t raise.
Exception: #{inspect(exception)}
Arg1: #{inspect(reason)}
Arg2: #{inspect(arg)}
""",
exception: exception,
job_arguments: arg,
job_failure_reason: reason,
job_module: job_module
)
:noop
end
end
end
@doc """
Checks when a job should be retried at
"""
def retry_at(job_module, reason, arg, attempts) do
if :erlang.function_exported(job_module, :retry_at, 3) do
try do
job_module.retry_at(reason, arg, attempts || 0)
rescue
exception ->
Logger.warn(
"""
[Rihanna] retry_at/4 callback failed
Got an unexpected error while trying to run the `retry_at` callback.
Check your `#{inspect(job_module)}.retry_at/2` callback and make sure it doesn’t raise.
Exception: #{inspect(exception)}
Arg1: #{inspect(reason)}
Arg2: #{inspect(arg)}
""",
exception: exception,
job_arguments: arg,
job_failure_reason: reason,
job_module: job_module
)
:noop
end
else
:noop
end
end
# Some operations can use the shared database connection as they don't use locks
defp producer_query(query, args, opts) do
opts
|> adapter()
|> producer_do_query(query, args)
end
if Code.ensure_compiled(Ecto) do
defp producer_do_query({Ecto, repo}, query, args) do
Ecto.Adapters.SQL.query(repo, query, args)
end
end
defp producer_do_query({Postgrex, conn}, query, args) do
Postgrex.query(conn, query, args)
end
defp adapter(opts) do
opts[:producer_postgres_connection] ||
Rihanna.Config.producer_postgres_connection()
end
# Behaviour term
defp telemetry_metadata(%{id: id, term: {mod, _args}}) do
%{job_id: id, job_module: mod, count: 1}
end
# MFA tuple term
defp telemetry_metadata(%{id: id, term: {mod, _f, _a}}) do
%{job_id: id, job_module: mod, count: 1}
end
end
| 26.085973 | 113 | 0.576178 |
795d58499bd0c031e192ad34e70beab55ccaf7a1 | 4,221 | ex | Elixir | fixtures/elixir_output/post_data_binary_with_equals.ex | martinsirbe/curlconverter | c5324e85d2ca24ef4743fb2bb36139d23367e293 | [
"MIT"
] | 4,955 | 2015-01-02T09:04:20.000Z | 2021-10-06T03:54:43.000Z | fixtures/elixir_output/post_data_binary_with_equals.ex | martinsirbe/curlconverter | c5324e85d2ca24ef4743fb2bb36139d23367e293 | [
"MIT"
] | 242 | 2015-03-27T05:59:11.000Z | 2021-10-03T08:36:05.000Z | fixtures/elixir_output/post_data_binary_with_equals.ex | martinsirbe/curlconverter | c5324e85d2ca24ef4743fb2bb36139d23367e293 | [
"MIT"
] | 504 | 2015-01-02T16:04:36.000Z | 2021-10-01T03:43:55.000Z | request = %HTTPoison.Request{
method: :post,
url: "https://localhost/api/service.svc",
options: [hackney: [cookies: [~s|X-BackEndCookie=S-1-5-21-1234556-56678-12345-2345=alphanumericstring12345/anotheralphanumericstring12345/scsiAdf/P; ClientId=LoremIupsum; PrivateComputer=true; PBack=0; cadata=bx88rrCBehITlBWSozO2l2hlFGu//JjT1/k6dewX5shV32jANUZSMU6GR+M25B6YpBODEgXzxfIHDnvxNC6SJoaE/d8RWX3uDnbkd+m91jNhMXNSYIRYTJHVFdPG06AE; cadataTTL=NfDhBViTJMUdC+ir+6BYvg==; cadataKey=qUY+OLTD9V14CFK6/CUPyrJWMxl1FFqZFjB8/qcS0/q55eqGvP9bWvX+XuSYVv3hIGAn9QNPhIDK6NP9LwCBdu25f2BUFDUWJruGO8MW02izSWzRUnni00xWQq3Y3nNKvpvO+OIR641BPHVZ0+lzCw2Wt8uzEnryCWAjlleozF/XWjpTN4/AaTmcIjEZUDN+fo4494rD0mADtEHv2gmd5mhLe+iyii/L9nAB3UuiJomwbRbKgy22Tj8cyavmLC4ZaViqW9E102NOLU4FYLgdZVET+mbdg==; cadataIV=bTM88YL1zmz7FsBEB0y3nI2SrdSTy+KLxCpx2FRfIZYFo2spN1IHQMSCT76OXrg79sVPhyqXk+N9rOj6M9KsQl4KqMNVBcoXgp24POpgoTwd4FBmKtAYbd9SDErna3jrMO168ML9PDG18K3CnBf6YG1tsIs0gXOEP9LzHVmUPF7KCKqUFiOiZGWuwmPhl85eo77BbEpVN2JkPnzuQWn6tC0cY4f2cJDlr3Z23SrAUVwwXmgRg2DXfOF5MIEkpwYiiI6sABCD9rsSnE6zTXlvZg33hjiD/ywUV1ZWjI2M/4zBixa4s150+dOnMmvtEFs/nOMnvMJui4PEDlTA==; cadataSig=WL3hB+av7sO3bzjL+Efe5b4exnvQxSInH3U5jDvfnPcttSp0XUF3y/NB573C0CTBYuOH/40smFssXlrKhT9tG2ITivdSIIamOmarmC8XwFOv9qQIFMHofcO/jjRDMqF0qRk7WBAC2FgBQrf2Tvq7wk5IX/JHn6zhlgKALAAqH9L9JNC244etnjj9YNaMDYEHV2M2jVTu3FsELqw1rSSqp0hEBlh+aFBvYCBg5hS1mVI76ZCHZVa0OUejiH2yiZyJIKHUI+Sv0rpU3iiQNtIFmGEdwhoo/rga4s4Dc2UsJLQ8c0yGlZgflYs+7Q5gPr74/mTUin60ej/w3M0roUl3FQ==; UC=d8be544621964f3c9865b3ee872fd432; AppcacheVer=15.0.1236.3:en-usbase; X-OWA-CANARY=VOXQP6xtGkiNnv7E4rFt8TrmclqVFtQI4IJqZflrR7Wz9AMPkMsFoyAlquw1YGsTUxIkVouAcvk.|]]],
headers: [
{~s|Origin|, ~s|https://nih.mail.edu.fr|},
{~s|Accept-Encoding|, ~s|gzip, deflate, br|},
{~s|X-EWS-TargetVersion|, ~s|2.5|},
{~s|Accept-Language|, ~s|en-US,en;q=0.8|},
{~s|User-Agent|, ~s|Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.99 Safari/537.36|},
{~s|Action|, ~s|CreateItem|},
{~s|X-Requested-With|, ~s|XMLHttpRequest|},
{~s|Connection|, ~s|keep-alive|},
{~s|X-OWA-CANARY|, ~s|VOXQP6xtGkiNnv7E4rFt8TrmclqVFtQI4IJqZflrR7Wz9AMPkMsFoyAlquw1YGsTUxIkVouAcvk.|},
{~s|X-OWA-ActionName|, ~s|CreateMessageForComposeSend|},
{~s|X-OWA-ActionId|, ~s|-37|},
{~s|X-OWA-ServiceUnavailableOnTransientError|, ~s|true|},
{~s|Content-Type|, ~s|application/json; charset=UTF-8|},
{~s|Accept|, ~s|*/*|},
{~s|Referer|, ~s|https://localhost/api/|},
{~s|X-OWA-ClientBuildVersion|, ~s|15.0.1236.3|},
{~s|X-OWA-CorrelationId|, ~s|2f11f8fb-f6c6-43a5-881d-8a1b242a4e70_148023102251337|},
{~s|DNT|, ~s|1|},
{~s|X-OWA-ClientBegin|, ~s|2016-11-27T07:17:02.513|},
{~s|X-OWA-Attempt|, ~s|1|},
],
params: [
{~s|action|, ~s|CreateItem|},
{~s|ID|, ~s|-37|},
{~s|AC|, ~s|1|},
],
body: ~s|{"__type":"CreateItemJsonRequest:#Exchange","Header":{"__type":"JsonRequestHeaders:#Exchange","RequestServerVersion":"Exchange2013","TimeZoneContext":{"__type":"TimeZoneContext:#Exchange","TimeZoneDefinition":{"__type":"TimeZoneDefinitionType:#Exchange","Id":"France Standard Time"}}},"Body":{"__type":"CreateItemRequest:#Exchange","Items":[{"__type":"Message:#Exchange","Subject":"API","Body":{"__type":"BodyContentType:#Exchange","BodyType":"HTML","Value":"<html><head><meta http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\"><style type=\"text/css\" style=\"display:none\"><!-- p { margin-top: 0px; margin-bottom: 0px; }--></style></head><body dir=\"ltr\" style=\"font-size:12pt;color:#000000;background-color:#FFFFFF;font-family:Calibri,Arial,Helvetica,sans-serif;\"><p>API Test for NickC<br></p></body></html>"},"Importance":"Normal","From":null,"ToRecipients":[{"Name":"George LUCAS","EmailAddress":"[email protected]","RoutingType":"SMTP","MailboxType":"Mailbox","OriginalDisplayName":"[email protected]","SipUri":" "}],"CcRecipients":[],"BccRecipients":[],"Sensitivity":"Normal","IsDeliveryReceiptRequested":false,"IsReadReceiptRequested":false}],"ClientSupportsIrm":true,"OutboundCharset":"AutoDetect","MessageDisposition":"SendAndSaveCopy","ComposeOperation":"newMail"}}|
}
response = HTTPoison.request(request)
| 117.25 | 1,547 | 0.762142 |
795d6b6e79c7e19af561f4bc2ba09b1a5dda4a0b | 15 | exs | Elixir | .iex.exs | ballpointcarrot/advent-of-code-2021 | a1adde8a13069efedfc7c190d6c71b06c2d15173 | [
"MIT"
] | null | null | null | .iex.exs | ballpointcarrot/advent-of-code-2021 | a1adde8a13069efedfc7c190d6c71b06c2d15173 | [
"MIT"
] | null | null | null | .iex.exs | ballpointcarrot/advent-of-code-2021 | a1adde8a13069efedfc7c190d6c71b06c2d15173 | [
"MIT"
] | null | null | null | import AOC.IEx
| 7.5 | 14 | 0.8 |
795db734c94958227b4c973aaff0c7b48151ad95 | 1,622 | ex | Elixir | lib/credo/check/warning/expensive_empty_enum_check.ex | jlgeering/credo | b952190ed758c262aa0d9bbee01227f9b1f0c63b | [
"MIT"
] | null | null | null | lib/credo/check/warning/expensive_empty_enum_check.ex | jlgeering/credo | b952190ed758c262aa0d9bbee01227f9b1f0c63b | [
"MIT"
] | null | null | null | lib/credo/check/warning/expensive_empty_enum_check.ex | jlgeering/credo | b952190ed758c262aa0d9bbee01227f9b1f0c63b | [
"MIT"
] | null | null | null | defmodule Credo.Check.Warning.ExpensiveEmptyEnumCheck do
@moduledoc """
Checking if the size of the enum is `0` can be very expensive, since you are
determining the exact count of elements.
Checking if an enum is empty should be done by using
Enum.empty?(enum)
or
list == []
"""
@explanation [check: @moduledoc]
use Credo.Check, base_priority: :high
@doc false
def run(source_file, params \\ []) do
issue_meta = IssueMeta.for(source_file, params)
Credo.Code.prewalk(source_file, &traverse(&1, &2, issue_meta))
end
@enum_count_pattern quote do: {
{:., _, [{:__aliases__, _, [:Enum]}, :count]},
_,
_
}
@length_pattern quote do: {:length, _, _}
@comparisons [
{@enum_count_pattern, 0},
{0, @enum_count_pattern},
{@length_pattern, 0},
{0, @length_pattern}
]
for {lhs, rhs} <- @comparisons do
defp traverse(
{:==, meta, [unquote(lhs), unquote(rhs)]} = ast,
issues,
issue_meta
) do
{ast, issues_for_call(meta, issues, issue_meta, ast)}
end
end
defp traverse(ast, issues, _issue_meta) do
{ast, issues}
end
def issues_for_call(meta, issues, issue_meta, ast) do
[issue_for(issue_meta, meta[:line], Macro.to_string(ast)) | issues]
end
defp issue_for(issue_meta, line_no, trigger) do
format_issue(
issue_meta,
message: "#{trigger} is expensive. Prefer Enum.empty?/1 or list == []",
trigger: trigger,
line_no: line_no
)
end
end
| 24.208955 | 78 | 0.590012 |
795de77da42b09a2320032fe6ff8ed18b5a2b3cc | 492 | exs | Elixir | blue_sky/config/test.exs | Rumel/bluesky | 9c68cad09e90d98411117ea7334f41c88c82ab19 | [
"MIT"
] | 2 | 2016-03-11T00:50:04.000Z | 2016-04-05T02:17:20.000Z | blue_sky/config/test.exs | Rumel/bluesky | 9c68cad09e90d98411117ea7334f41c88c82ab19 | [
"MIT"
] | null | null | null | blue_sky/config/test.exs | Rumel/bluesky | 9c68cad09e90d98411117ea7334f41c88c82ab19 | [
"MIT"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :blue_sky, BlueSky.Endpoint,
http: [port: 4001],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
# Configure your database
config :blue_sky, BlueSky.Repo,
adapter: Ecto.Adapters.Postgres,
username: "bluesky",
password: "bluesky",
database: "blue_sky_test",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
| 24.6 | 56 | 0.73374 |
795e02365a3e925233882ce23b3be07512059f52 | 78 | exs | Elixir | elixir-generate-dockerfile/src/test/test_helper.exs | kayodeosagbemi/elixir-runtime | 1746adf362444e3e0cc2daa5e461be24f1cb624a | [
"Apache-2.0"
] | 170 | 2017-08-25T06:40:14.000Z | 2022-01-10T22:18:51.000Z | elixir-generate-dockerfile/src/test/test_helper.exs | kayodeosagbemi/elixir-runtime | 1746adf362444e3e0cc2daa5e461be24f1cb624a | [
"Apache-2.0"
] | 27 | 2017-09-07T05:57:37.000Z | 2022-03-22T13:40:47.000Z | elixir-generate-dockerfile/src/test/test_helper.exs | kayodeosagbemi/elixir-runtime | 1746adf362444e3e0cc2daa5e461be24f1cb624a | [
"Apache-2.0"
] | 16 | 2017-11-14T01:45:00.000Z | 2021-10-09T03:26:39.000Z | System.put_env("DISABLE_GCP_METADATA_FOR_UNIT_TESTS", "true")
ExUnit.start()
| 19.5 | 61 | 0.807692 |
795e41a2e85183671dac98ff1a65fa9cf25c27a1 | 3,240 | ex | Elixir | clients/sts/lib/google_api/sts/v1/model/google_type_expr.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/sts/lib/google_api/sts/v1/model/google_type_expr.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/sts/lib/google_api/sts/v1/model/google_type_expr.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.STS.V1.Model.GoogleTypeExpr do
@moduledoc """
Represents a textual expression in the Common Expression Language (CEL) syntax. CEL is a C-like expression language. The syntax and semantics of CEL are documented at https://github.com/google/cel-spec. Example (Comparison): title: "Summary size limit" description: "Determines if a summary is less than 100 chars" expression: "document.summary.size() < 100" Example (Equality): title: "Requestor is owner" description: "Determines if requestor is the document owner" expression: "document.owner == request.auth.claims.email" Example (Logic): title: "Public documents" description: "Determine whether the document should be publicly visible" expression: "document.type != 'private' && document.type != 'internal'" Example (Data Manipulation): title: "Notification string" description: "Create a notification string with a timestamp." expression: "'New message received at ' + string(document.create_time)" The exact variables and functions that may be referenced within an expression are determined by the service that evaluates it. See the service documentation for additional information.
## Attributes
* `description` (*type:* `String.t`, *default:* `nil`) - Optional. Description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
* `expression` (*type:* `String.t`, *default:* `nil`) - Textual representation of an expression in Common Expression Language syntax.
* `location` (*type:* `String.t`, *default:* `nil`) - Optional. String indicating the location of the expression for error reporting, e.g. a file name and a position in the file.
* `title` (*type:* `String.t`, *default:* `nil`) - Optional. Title for the expression, i.e. a short string describing its purpose. This can be used e.g. in UIs which allow to enter the expression.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:description => String.t() | nil,
:expression => String.t() | nil,
:location => String.t() | nil,
:title => String.t() | nil
}
field(:description)
field(:expression)
field(:location)
field(:title)
end
defimpl Poison.Decoder, for: GoogleApi.STS.V1.Model.GoogleTypeExpr do
def decode(value, options) do
GoogleApi.STS.V1.Model.GoogleTypeExpr.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.STS.V1.Model.GoogleTypeExpr do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 57.857143 | 1,092 | 0.734259 |
795e4872c86a7e16525032420ab3add658e5b5f0 | 4,348 | ex | Elixir | lib/Helpers.ex | chriscaragianis/poly_bisector | 00c0feef268db1c8bace619dd3e0cc4db4ae1e5f | [
"MIT"
] | null | null | null | lib/Helpers.ex | chriscaragianis/poly_bisector | 00c0feef268db1c8bace619dd3e0cc4db4ae1e5f | [
"MIT"
] | null | null | null | lib/Helpers.ex | chriscaragianis/poly_bisector | 00c0feef268db1c8bace619dd3e0cc4db4ae1e5f | [
"MIT"
] | null | null | null | defmodule PolyPartition.Helpers do
alias PolyPartition.Geometry
@moduledoc """
Helper functions for PolyPartition
"""
@doc """
Calculates the determinant of two points provided as a segment
## Examples
iex> PolyPartition.Helpers.det_seg([ [2, 3], [1, -1] ])
-5
"""
def det_seg(seg) do
[p, q] = seg
det(p, q)
end
defp det(p, q) do
[x1, y1] = p
[x2, y2] = q
(x1 * y2) - (x2 * y1)
end
@doc """
Takes two numbers, returns true if their signs differ, false otherwise
## Examples
iex> PolyPartition.Helpers.sgn_to_bool(-1, 2)
true
iex> PolyPartition.Helpers.sgn_to_bool(3, 3)
false
iex> PolyPartition.Helpers.sgn_to_bool(3, 0)
false
"""
def sgn_to_bool(a, b) do
cond do
a * b < 0 -> true
true -> false
end
end
@doc """
Find the index of the vertex to use to split the polygon
`split_coord` assumes there is a vertex that is
- not a neighbor of the first vertex, and
- is "line of sight" from the first vertex.
I believe this is always the case for polygons with more than three vertices,
but haven't proven it.
`split_coord` will start by testing the vertex farthest away (in circular order)
from the first vertex and step out one vertex at a time alternating left and right.
The `step` parameter is incremented by a private function `next`.
## Examples
iex> poly = [[0,1], [1, 0], [2, 0], [3,1], [2,2], [1,2]]
iex> PolyPartition.Helpers.split_coord(poly, 0)
3
iex> poly = [[0,1], [1, 0], [2, 0], [3,1], [2,2], [2,0.5]]
iex> PolyPartition.Helpers.split_coord(poly, 0)
2
"""
def split_coord(poly, step) do
opp_index = round(:math.floor(length(poly) / 2)) + step
case Geometry.good_cut?(poly, opp_index) do
false -> split_coord(poly, next(step))
_ -> opp_index
end
end
defp next(n) do
cond do
n < 0 -> (-1 * n)
true -> -1 * (n + 1)
end
end
@doc """
Add a vertex at the midpoint of a polygon's longest side
If we have a triangle, we need to add a vertex to make a split. We choose the
longest side to keep the polygon as "fat" as possible
The generated point will have float coordinates, regardless of input
## Examples
iex> poly = [[0,1], [1,0], [2,1]]
iex> PolyPartition.Helpers.split_side(poly)
[[1.0, 1.0], [0,1], [1,0], [2,1],]
"""
def split_side(poly) do
{_, pt, ind} = poly
|> Stream.with_index
|> Enum.map(fn(x) ->
{point, index} = x
next = Enum.at(poly, rem((index + 1), length(poly)))
{Geometry.sq_length([point, next]), Geometry.midpoint([point, next]), index}
end)
|> List.foldr({0.0, 0, 0}, fn(x, acc) ->
{length, _, _} = x
{a_length, _, _} = acc
cond do
length > a_length -> x
true -> acc
end
end)
List.insert_at(poly, rem((ind + 1), length(poly)), pt)
end
@doc """
Takes a polygon and returns a list of two polygons forming a partition of the first
If any degenerate polygons are created, we retry with a different initial vertex
## Examples
iex> poly = [[0,1], [1, 0], [2, 0], [3,1], [2,2], [1,2]]
iex> PolyPartition.Helpers.split(poly, 0)
[[[0,1], [1,0], [2,0], [3,1]], [[3,1], [2,2], [1,2], [0,1]]]
"""
def split(poly, retries) do
cond do
retries >= length(poly) -> poly
true -> p = case length(poly) do
3 -> split_side(poly)
_ -> poly
end
opp_index = split_coord(p, 0)
result = []
r = result ++ rotate_list([Enum.slice(p, 0..opp_index)]) ++ [Enum.slice(p, opp_index..length(p)) ++ [hd(p)]]
t = r
|> Enum.map(fn(x) -> Geometry.area(x) end)
|> List.foldr(1, fn(x, acc) -> cond do
x < acc -> x
true -> acc
end
end)
case {t, retries >= length(poly) - 1} do
{0.0, false} -> split(rotate_list(poly), retries + 1) #split failed, try another vertex
_ -> r
end
end
end
defp rotate_list(list) do
list
|> Stream.with_index
|> Enum.map(fn(x) ->
{_, index} = x
Enum.at(list, rem((index + 1), length(list)))
end)
end
end
| 26.192771 | 120 | 0.555428 |
795e5be55072264d29f835a61e688a1e98ef6cb0 | 1,077 | ex | Elixir | lib/elixtagram/api/media.ex | Zensavona/elixtagram | 248682e23ac416c59d37e964e0a29afda625ece3 | [
"MIT"
] | 84 | 2015-10-02T08:17:54.000Z | 2021-01-25T10:44:00.000Z | lib/elixtagram/api/media.ex | Zensavona/elixtagram | 248682e23ac416c59d37e964e0a29afda625ece3 | [
"MIT"
] | 24 | 2015-10-29T14:53:25.000Z | 2019-06-06T19:12:50.000Z | lib/elixtagram/api/media.ex | Zensavona/elixtagram | 248682e23ac416c59d37e964e0a29afda625ece3 | [
"MIT"
] | 22 | 2015-11-15T04:06:52.000Z | 2020-11-15T18:41:26.000Z | defmodule Elixtagram.API.Media do
@moduledoc """
Provides access to the `/media/` area of the Instagram API (for internal use).
"""
import Elixtagram.API.Base
import Elixtagram.Parser
@doc """
Fetches a media item from the Instagram API by id.
"""
def media(media_id, token \\ :global) do
get("/media/#{media_id}", token).data |> parse_media
end
@doc """
Fetches a media item from the Instagram API by shortcode.
"""
def shortcode(code, token \\ :global) do
get("/media/shortcode/#{code}", token).data |> parse_media
end
@doc """
Searches media items based on some passed params.
"""
def search(params, token \\ :global) do
accepted_params = [:distance, :count, :min_timestamp, :max_timestamp, :lat, :lng]
params = parse_request_params(params, accepted_params)
get("/media/search", token, params).data |> Enum.map(&parse_media/1)
end
@doc """
Fetches popular media
"""
def popular(count, token \\ :global) do
get("/media/popular", token, [["count", count]]).data |> Enum.map(&parse_media/1)
end
end
| 28.342105 | 85 | 0.663881 |
795e68d57f4bd15b63889412b8babd18aa91ac79 | 624 | ex | Elixir | testData/org/elixir_lang/parser_definition/matched_dot_call_operation_parsing_test_case/AtNonNumericOperation.ex | ArtemGordinsky/intellij-elixir | e2d9b4dfc65651b293d499043edeaad606cf5652 | [
"Apache-2.0"
] | null | null | null | testData/org/elixir_lang/parser_definition/matched_dot_call_operation_parsing_test_case/AtNonNumericOperation.ex | ArtemGordinsky/intellij-elixir | e2d9b4dfc65651b293d499043edeaad606cf5652 | [
"Apache-2.0"
] | null | null | null | testData/org/elixir_lang/parser_definition/matched_dot_call_operation_parsing_test_case/AtNonNumericOperation.ex | ArtemGordinsky/intellij-elixir | e2d9b4dfc65651b293d499043edeaad606cf5652 | [
"Apache-2.0"
] | null | null | null | @one.()
@one.(function positional, key: value)
@one.(key_one: value_one, key_two: value_two)
@one.(
&one,
one <- two,
one when two,
one | two,
one = two,
one or two,
one || two,
one and two,
one && two,
one != two,
one < two,
one |> two,
one in two,
one ++ two,
one..two,
one + two,
one ^^^ two,
!one,
not one,
one.(),
Two.Three,
@one,
one,
@1,
&1,
!1,
(;),
1,
[],
"StringLine",
"""
String
Heredoc
""",
'CharListLine',
'''
CharList
Heredoc
''',
~x{sigil}modifiers,
nil,
:atom,
Alias
)
@one.(
one,
key: value
)
@one.(
one
)(
two
)
| 10.758621 | 45 | 0.496795 |
795e9648aec3505d69956bf43f77b9438a63dc50 | 1,187 | ex | Elixir | lib/blue_jet_web/controllers/identity/user_controller.ex | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 44 | 2018-05-09T01:08:57.000Z | 2021-01-19T07:25:26.000Z | lib/blue_jet_web/controllers/identity/user_controller.ex | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 36 | 2018-05-08T23:59:54.000Z | 2018-09-28T13:50:30.000Z | lib/blue_jet_web/controllers/identity/user_controller.ex | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 9 | 2018-05-09T14:09:19.000Z | 2021-03-21T21:04:04.000Z | defmodule BlueJetWeb.UserController do
use BlueJetWeb, :controller
alias BlueJet.Identity
action_fallback BlueJetWeb.FallbackController
plug :scrub_params, "data" when action in [:create, :update]
def create(conn, %{"data" => %{"type" => "User"}}) do
request = build_context_request(conn, :create, normalize: ["role"])
case Identity.create_user(request) do
{:ok, %{data: %{account_id: nil}}} ->
send_resp(conn, :no_content, "")
{:ok, %{data: user, meta: meta}} ->
conn
|> put_status(:created)
|> render("show.json-api", data: user, opts: [meta: camelize_map(meta), include: conn.query_params["include"]])
{:error, %ContextResponse{ errors: errors }} ->
conn
|> put_status(:unprocessable_entity)
|> render(:errors, data: extract_errors(errors))
other ->
other
end
end
def show(conn, _),
do: default(conn, :show, &Identity.get_user/1)
def update(conn, %{"data" => %{"type" => "User"}}),
do: default(conn, :update, &Identity.update_user/1, normalize: ["role"])
def delete(conn, %{"id" => _}),
do: default(conn, :delete, &Identity.delete_user/1)
end
| 28.95122 | 119 | 0.618366 |
795eb2bde090c2c5bef168ddcc24baed125c620c | 31,189 | exs | Elixir | test/date_time_parser_test.exs | dbernheisel/date_time_parser | 53378ccfb5938c304a18fd62b1f288ab94dd5446 | [
"MIT"
] | null | null | null | test/date_time_parser_test.exs | dbernheisel/date_time_parser | 53378ccfb5938c304a18fd62b1f288ab94dd5446 | [
"MIT"
] | null | null | null | test/date_time_parser_test.exs | dbernheisel/date_time_parser | 53378ccfb5938c304a18fd62b1f288ab94dd5446 | [
"MIT"
] | null | null | null | defmodule DateTimeParserTest do
use ExUnit.Case, async: true
import DateTimeParserTestMacros
import ExUnit.CaptureLog
alias DateTimeParser
alias DateTimeParser.Parser
if Version.match?(System.version(), ">= 1.5.0") do
doctest DateTimeParser
end
describe "config" do
test "parse/2, can turn off parsers" do
assert {:error, _} = DateTimeParser.parse("000", parsers: [])
assert {:ok, _} = DateTimeParser.parse("000", parsers: [Parser.Serial])
assert {:error, _} = DateTimeParser.parse("0000000001", parsers: [])
assert {:ok, _} = DateTimeParser.parse("0000000001", parsers: [Parser.Epoch])
assert {:error, _} = DateTimeParser.parse("2019-01-01", parsers: [])
assert {:ok, _} = DateTimeParser.parse("2019-01-01", parsers: [Parser.Tokenizer])
assert capture_log(fn ->
assert {:ok, _} = DateTimeParser.parse("000", parsers: [:serial])
end) =~ "Using :serial is deprecated"
assert capture_log(fn ->
assert {:ok, _} = DateTimeParser.parse("0000000001", parsers: [:epoch])
end) =~ "Using :epoch is deprecated"
assert capture_log(fn ->
assert {:ok, _} = DateTimeParser.parse("2019-01-01", parsers: [:tokenizer])
end) =~ "Using :tokenizer is deprecated"
assert {:error, _} = DateTimeParser.parse("2019-01-01", parsers: [])
end
test "parse_date/2, can turn off parsers" do
assert {:error, _} = DateTimeParser.parse_date("000", parsers: [])
assert {:ok, %Date{}} = DateTimeParser.parse_date("000", parsers: [Parser.Serial])
assert {:error, _} = DateTimeParser.parse_date("0000000001", parsers: [])
assert {:ok, %Date{}} = DateTimeParser.parse_date("0000000001", parsers: [Parser.Epoch])
assert {:error, _} = DateTimeParser.parse_date("2019-01-01", parsers: [])
assert {:ok, %Date{}} = DateTimeParser.parse_date("2019-01-01", parsers: [Parser.Tokenizer])
assert capture_log(fn ->
assert {:ok, %Date{}} = DateTimeParser.parse_date("000", parsers: [:serial])
end) =~ "Using :serial is deprecated"
assert capture_log(fn ->
assert {:ok, %Date{}} = DateTimeParser.parse_date("0000000001", parsers: [:epoch])
end) =~ "Using :epoch is deprecated"
assert capture_log(fn ->
assert {:ok, %Date{}} =
DateTimeParser.parse_date("2019-01-01", parsers: [:tokenizer])
end) =~ "Using :tokenizer is deprecated"
end
test "parse_time/2, can turn off parsers" do
assert {:error, _} = DateTimeParser.parse_time("000.0", parsers: [])
assert {:ok, %Time{}} = DateTimeParser.parse_time("000.0", parsers: [Parser.Serial])
assert {:error, _} = DateTimeParser.parse_time("0000000001", parsers: [])
assert {:ok, %Time{}} = DateTimeParser.parse_time("0000000001", parsers: [Parser.Epoch])
assert {:error, _} = DateTimeParser.parse_time("10:30", parsers: [])
assert {:ok, %Time{}} = DateTimeParser.parse_time("10:30", parsers: [Parser.Tokenizer])
assert capture_log(fn ->
assert {:ok, %Time{}} = DateTimeParser.parse_time("10:30", parsers: [:tokenizer])
end) =~ "Using :tokenizer is deprecated"
assert capture_log(fn ->
assert {:ok, %Time{}} = DateTimeParser.parse_time("000.0", parsers: [:serial])
end) =~ "Using :serial is deprecated"
assert capture_log(fn ->
assert {:ok, %Time{}} = DateTimeParser.parse_time("0000000001", parsers: [:epoch])
end) =~ "Using :epoch is deprecated"
end
test "parse_datetime/2, can turn off parsers" do
assert {:error, _} = DateTimeParser.parse_datetime("100.0", parsers: [])
assert {:ok, %NaiveDateTime{}} =
DateTimeParser.parse_datetime("100.0", parsers: [Parser.Serial])
assert {:error, _} = DateTimeParser.parse_datetime("0000000001", parsers: [])
assert {:ok, %DateTime{}} =
DateTimeParser.parse_datetime("0000000001", parsers: [Parser.Epoch])
assert {:error, _} = DateTimeParser.parse_datetime("2019-01-01T10:30:00", parsers: [])
assert {:ok, %NaiveDateTime{}} =
DateTimeParser.parse_datetime("2019-01-01T10:30:00", parsers: [Parser.Tokenizer])
assert capture_log(fn ->
assert {:ok, %NaiveDateTime{}} =
DateTimeParser.parse_datetime("100.0", parsers: [:serial])
end) =~ "Using :serial is deprecated"
assert capture_log(fn ->
assert {:ok, %DateTime{}} =
DateTimeParser.parse_datetime("0000000001", parsers: [:epoch])
end) =~ "Using :epoch is deprecated"
assert capture_log(fn ->
assert {:ok, %NaiveDateTime{}} =
DateTimeParser.parse_datetime("2019-01-01T10:30:00", parsers: [:tokenizer])
end) =~ "Using :tokenizer is deprecated"
end
end
describe "compare with Ruby/Rails datetime parsing" do
test_parsing(" 01 Feb 2013", "2013-02-01")
test_parsing(" 03 Jan 2013 10:15:26 -0800", "2013-01-03T18:15:26Z", to_utc: true)
test_parsing(" 10/1/2018 :: AM", "2018-10-01")
test_parsing(" 11 Feb 2013", "2013-02-11")
test_parsing(" 11 Jan 2013 13:26:55 -0800", "2013-01-11T21:26:55Z", to_utc: true)
test_parsing(" 12/26/2016", "2016-12-26")
test_parsing(" 24 Sep 2013", "2013-09-24")
test_parsing("01-01-2018", "2018-01-01")
test_parsing("01-Feb-18", "2018-02-01")
test_parsing("01-Jul", "2019-07-01", assume_date: ~D[2019-01-05])
test_parsing("01-Jul-18", "2018-07-01")
test_parsing("01.09.2018", "2018-09-01")
test_parsing("01.11.2018", "2018-11-01")
test_parsing("01/01/17", "2017-01-01")
test_parsing("01/01/2017", "2017-01-01")
test_parsing("01/01/2018 - 17:06", "2018-01-01T17:06:00")
test_parsing("01/01/2018 01:21PM", "2018-01-01T13:21:00")
test_parsing("01/01/2018 14:44", "2018-01-01T14:44:00")
test_parsing("01/01/2018 6:22", "2018-01-01T06:22:00")
test_parsing("01/02/16", "2016-01-02")
test_parsing("01/02/18 01:02 AM", "2018-01-02T01:02:00")
test_parsing("01/02/2015", "2015-01-02")
test_parsing("01/Jun./2018", "2018-06-01")
test_parsing("02-05-2018", "2018-05-02")
test_parsing("02-Oct-17", "2017-10-02")
test_parsing("02/01/17", "2017-02-01")
test_parsing("02/01/2018", "2018-02-01")
test_parsing("02/21/2018 9:37:42 AM", "2018-02-21T09:37:42")
test_parsing("03/5/2018", "2018-03-05")
test_parsing("2010/01/01", "2010-01-01")
test_parsing("05/01/2018 0:00", "2018-05-01T00:00:00")
test_parsing("06/14/2018 09:42:08 PM-0500", "2018-06-15T02:42:08Z", to_utc: true)
test_parsing("06/28/18 1:25", "2018-06-28T01:25:00")
test_parsing("1-Apr", "2019-04-01", assume_date: ~D[2019-01-13])
test_error("1-Apr")
# Ruby parses this next one incorrectly
test_parsing("1//1/17", "2017-01-01")
test_parsing("1/1/0117", "0117-01-01")
test_parsing("1/1/17 19:12", "2017-01-01T19:12:00")
test_parsing("1/1/18 00:01", "2018-01-01T00:01:00")
test_parsing("1/1/18 3:24 PM", "2018-01-01T15:24:00")
test_parsing("1/1/19 10:39 AM", "2019-01-01T10:39:00")
test_parsing("1/1/2013", "2013-01-01")
test_parsing("1/10/2018 8:38pM", "2018-01-10T20:38:00")
test_parsing("1/17/2018 0:00:00", "2018-01-17T00:00:00")
test_parsing("1/2/2018 18:06:26", "2018-01-02T18:06:26")
test_parsing("1/3/2019 12:00:00 AM", "2019-01-03T12:00:00")
test_parsing("1/31/2018 0:00:00 UTC", "2018-01-31T00:00:00Z")
test_parsing("5/12/2019 12:21:58 PM", "2019-05-12T12:21:58")
test_parsing("2011-01-01 04:19:20 -0:00", "2011-01-01T04:19:20Z")
test_parsing("2012-11-23T22:42:25-05:00", "2012-11-24T03:42:25Z", to_utc: true)
test_parsing("2013-12-31T22:18:50+00:00", "2013-12-31T22:18:50Z")
test_parsing("10/2/2017 - 23:14", "2017-10-02T23:14:00")
test_parsing("10/5/2017 23:52", "2017-10-05T23:52:00")
test_parsing("18-07-2018 20:38:34 +00:00", "2018-07-18T20:38:34Z")
test_parsing("18-12-29", "2018-12-29")
test_parsing("19-Dec-19", "2019-12-19")
test_parsing("2012-10-30 09:52:00", "2012-10-30T09:52:00")
test_parsing("2013-04-26 11:25:03 UTC", "2013-04-26T11:25:03Z")
test_parsing("2013-09-10 22:14:56.717", "2013-09-10T22:14:56.717")
test_parsing("2016-11-17 10:36:34.81", "2016-11-17T10:36:34.81")
test_parsing("2015-09-28 10:57:11 -0700", "2015-09-28T17:57:11Z", to_utc: true)
test_parsing("2015/12/1 1:16", "2015-12-01T01:16:00")
test_parsing("2016-04-30", "2016-04-30")
test_parsing("2016-05-02T01:10:06+00:00", "2016-05-02T01:10:06Z")
test_parsing("2016-06-11 15:50:43", "2016-06-11T15:50:43")
test_parsing("2016-06-16 06:06:06", "2016-06-16T06:06:06")
test_parsing("2016-07-01 01:51:34+00", "2016-07-01T01:51:34Z")
test_parsing("2016-07-31 18:42:46-07:00", "2016-08-01T01:42:46Z", to_utc: true)
test_parsing("2016-08-04T07:00:25Z", "2016-08-04T07:00:25Z")
test_parsing("2016-08-19 09:34:51.0", "2016-08-19T09:34:51.0")
test_parsing("2016-11-23T16:25:33.971897", "2016-11-23T16:25:33.971897")
test_parsing("2016/1/9", "2016-01-09")
test_parsing("2017-09-29+00:00", "2017-09-29T00:00:00")
test_parsing("2017-10-06+03:45:16", "2017-10-06T03:45:16")
test_parsing("2017-10-24 04:00:10 PDT", "2017-10-24T11:00:10Z", to_utc: true)
test_parsing("2017-12-01 03:52", "2017-12-01T03:52:00")
test_parsing("2017/08/08", "2017-08-08")
test_parsing("2019/01/31 0:01", "2019-01-31T00:01:00")
# Ruby gets the time wrong
test_parsing("20190118 949 CST", "2019-01-18T15:49:00Z", to_utc: true)
test_parsing("29/Aug./2018", "2018-08-29")
test_parsing("29/Sep./2018", "2018-09-29")
test_parsing("9/10/2018 11:08:13 AM", "2018-09-10T11:08:13")
test_parsing("9/19/2018 20:38", "2018-09-19T20:38:00")
test_parsing("9/20/2017 18:57:24 UTC", "2017-09-20T18:57:24Z")
test_parsing(~s|"=\""10/1/2018\"""|, "2018-10-01")
test_parsing(~s|"=\""9/5/2018\"""|, "2018-09-05")
test_parsing(~s|"Apr 1, 2016 12:02:53 AM PDT"|, "2016-04-01T19:02:53Z", to_utc: true)
test_parsing(~s|"Apr 1, 2017 2:21:25 AM PDT"|, "2017-04-01T09:21:25Z", to_utc: true)
test_parsing(~s|"Dec 1, 2018 7:39:53 AM PST"|, "2018-12-01T15:39:53Z", to_utc: true)
test_parsing("Fri Mar 2 09:01:57 2018", "2018-03-02T09:01:57")
test_parsing("Sun Jul 1 00:31:18 2018", "2018-07-01T00:31:18")
test_parsing("Fri Mar 31 2017 21:41:40 GMT+0000 (UTC)", "2017-03-31T21:41:40Z")
test_parsing("Friday 02 February 2018 10:42:21 AM", "2018-02-02T10:42:21")
test_parsing(~s|"Jan 1, 2013 06:34:31 PM PST"|, "2013-01-02T02:34:31Z", to_utc: true)
test_parsing(~s|"Jan 1, 2014 6:44:47 AM PST"|, "2014-01-01T14:44:47Z", to_utc: true)
test_parsing(~s|"Mar 28, 2014 6:44:47 AM PDT"|, "2014-03-28T13:44:47Z", to_utc: true)
test_parsing("Jan-01-19", "2019-01-01")
test_parsing("Jan-01-19", "2019-01-01T00:00:00", assume_time: true)
test_parsing("Jan-01-19", "2019-01-01T10:13:15", assume_time: ~T[10:13:15])
test_parsing("Jan-01-2018", "2018-01-01")
test_parsing("Monday 01 October 2018 06:34:19 AM", "2018-10-01T06:34:19")
test_parsing("Monday 02 October 2017 9:04:49 AM", "2017-10-02T09:04:49")
test_parsing(~s|"Nov 16, 2017 9:41:28 PM PST"|, "2017-11-17T05:41:28Z", to_utc: true)
# This isn't a valid time with PM specified
test_parsing(~s|"Nov 20, 2016 22:09:23 PM"|, "2016-11-20T22:09:23")
test_parsing(~s|"Sat, 29 Sep 2018 21:36:28 -0400"|, "2018-09-30T01:36:28Z", to_utc: true)
test_parsing(~s|"September 28, 2016"|, "2016-09-28")
test_parsing("Sun Jan 08 2017 04:28:42 GMT+0000 (UTC)", "2017-01-08T04:28:42Z")
test_parsing("Sunday 01 January 2017 09:22:46 AM", "2017-01-01T09:22:46")
test_parsing("Sunday 01 January 2017 10:11:02 PM", "2017-01-01T22:11:02")
test_parsing("Thu Aug 09 2018 17:13:43 GMT+0000 (UTC)", "2018-08-09T17:13:43Z")
test_parsing("Thu Feb 08 00:24:33 2018", "2018-02-08T00:24:33")
test_parsing("Thu Jul 5 12:19:56 2018", "2018-07-05T12:19:56")
test_parsing("Tue Jul 31 06:44:39 2018", "2018-07-31T06:44:39")
test_parsing("Thursday 30 August 2018 11:31:18 AM", "2018-08-30T11:31:18")
test_parsing("Tuesday 11 July 2017 1:43:46 PM", "2017-07-11T13:43:46")
test_parsing(~s|"Tuesday, November 29, 2016"|, "2016-11-29")
test_parsing("jul-10-18", "2018-07-10")
end
describe "parse_datetime/1 - serial" do
test_datetime_parsing("41261.6013888889", ~N[2012-12-18T14:26:00])
test_datetime_parsing("-45103.1454398148", ~N[1776-07-04T20:30:34])
test_datetime_parsing("-363.0", ~N[1899-01-01T00:00:00])
test_datetime_parsing("2.0", ~N[1900-01-01T00:00:00])
test_datetime_parsing("62.0", ~N[1900-03-02T00:00:00])
end
describe "parse/1 - serial options" do
test_parsing("62", ~N[1900-03-02T00:00:00], assume_time: true)
test_parsing("62", ~N[1904-03-03T00:00:00], assume_time: true, use_1904_date_system: true)
test_parsing("62.0", ~N[1900-03-02T00:00:00])
test_parsing("62.0", ~N[1904-03-03T00:00:00], use_1904_date_system: true)
test_datetime_parsing("62.0", ~N[1904-03-03T00:00:00], use_1904_date_system: true)
end
describe "parse_datetime/1 - epoch" do
test_datetime_parsing("99999999999", DateTime.from_naive!(~N[5138-11-16T09:46:39], "Etc/UTC"))
test_datetime_parsing("9999999999", DateTime.from_naive!(~N[2286-11-20T17:46:39], "Etc/UTC"))
test_datetime_parsing(
"9999999999.009",
DateTime.from_naive!(~N[2286-11-20T17:46:39.009], "Etc/UTC")
)
test_datetime_parsing(
"9999999999.090",
DateTime.from_naive!(~N[2286-11-20T17:46:39.090], "Etc/UTC")
)
test_datetime_parsing(
"9999999999.900",
DateTime.from_naive!(~N[2286-11-20T17:46:39.900], "Etc/UTC")
)
test_datetime_parsing(
"9999999999.999",
DateTime.from_naive!(~N[2286-11-20T17:46:39.999], "Etc/UTC")
)
test_datetime_parsing(
"9999999999.999999",
DateTime.from_naive!(~N[2286-11-20T17:46:39.999999], "Etc/UTC")
)
test_datetime_parsing(
"9999999999.0000000009",
DateTime.from_naive!(~N[2286-11-20T17:46:39.000000], "Etc/UTC")
)
test_datetime_parsing(
"9999999999.0000009000",
DateTime.from_naive!(~N[2286-11-20T17:46:39.000000], "Etc/UTC")
)
test_datetime_parsing(
"9999999999.9999999999",
DateTime.from_naive!(~N[2286-11-20T17:46:39.999999], "Etc/UTC")
)
test_datetime_parsing("0000000000", DateTime.from_naive!(~N[1970-01-01T00:00:00], "Etc/UTC"))
test_datetime_parsing("-0000000001", DateTime.from_naive!(~N[1969-12-31T23:59:59], "Etc/UTC"))
test_datetime_parsing(
"-0000000001.0000000001",
DateTime.from_naive!(~N[1969-12-31T23:59:58.000000], "Etc/UTC")
)
# example from the Wikipedia article
test_datetime_parsing("-0386380800", DateTime.from_naive!(~N[1957-10-04T00:00:00], "Etc/UTC"))
test_datetime_parsing("-9999999999", DateTime.from_naive!(~N[1653-02-10T06:13:21], "Etc/UTC"))
if Version.match?(System.version(), ">= 1.7.0") do
test_datetime_parsing(
"-99999999999",
NaiveDateTime.new(-1199, 2, 15, 14, 13, 21) |> elem(1) |> DateTime.from_naive!("Etc/UTC")
)
end
test_datetime_parsing(
"-9999999999.9999999999",
DateTime.from_naive!(~N[1653-02-10T06:13:20.000001], "Etc/UTC")
)
end
describe "parse_datetime/1 - MDY" do
test_datetime_parsing("02/06/2019", ~N[2019-02-06 00:00:00], assume_time: true)
test_datetime_parsing("1/9/34", ~N[2034-01-09 00:00:00], assume_time: true)
test_datetime_parsing("1/9/2034", ~N[2034-01-09 00:00:00], assume_time: true)
test_datetime_parsing("01/09/2034", ~N[2034-01-09 00:00:00], assume_time: true)
test_datetime_parsing("9/4/2018 0:00", ~N[2018-09-04 00:00:00])
test_datetime_parsing("9/1/2018 10:26", ~N[2018-09-01 10:26:00], assume_time: true)
test_datetime_parsing("1/13/2019", ~N[2019-01-13 00:00:00], assume_time: true)
test_datetime_parsing(~s|""=\""9/5/2018\"""|, ~N[2018-09-05 00:00:00], assume_time: true)
test_datetime_parsing("1/13/19", ~N[2019-01-13 00:00:00], assume_time: true)
test_datetime_parsing("1/15/2019 3:06", ~N[2019-01-15 03:06:00])
test_datetime_parsing("4/24/2019 0:00:00", ~N[2019-04-24 00:00:00])
test_datetime_parsing("5/2/2019 0:00:00", ~N[2019-05-02 00:00:00])
test_datetime_parsing("5/31/2019 12:00:00 AM", ~N[2019-05-31 12:00:00])
test_datetime_parsing("5/2/2019 12:00:00 AM", ~N[2019-05-02 12:00:00])
end
describe "parse_date/1 - MDY" do
test_date_parsing("02/06/2019", ~D[2019-02-06])
test_date_parsing("1/9/34", ~D[2034-01-09])
test_date_parsing("1/9/2034", ~D[2034-01-09])
test_date_parsing("01/09/2034", ~D[2034-01-09])
test_date_parsing("9/4/2018 0:00", ~D[2018-09-04])
test_date_parsing("9/1/2018 10:26", ~D[2018-09-01])
test_date_parsing("1/13/2019", ~D[2019-01-13])
test_date_parsing(~s|""=\""9/5/2018\"""|, ~D[2018-09-05])
test_date_parsing("1/13/19", ~D[2019-01-13])
test_date_parsing("1/15/2019 3:06", ~D[2019-01-15])
test_date_parsing("4/24/2019 0:00:00", ~D[2019-04-24])
test_date_parsing("5/2/2019 0:00:00", ~D[2019-05-02])
test_date_parsing("5/31/2019 12:00:00 AM", ~D[2019-05-31])
test_date_parsing("5/2/2019 12:00:00 AM", ~D[2019-05-02])
test_date_parsing("2/5", ~D[2021-02-05], assume_date: ~D[2021-01-01])
test_date_parsing("12/5", ~D[2021-12-05], assume_date: ~D[2021-01-01])
test_date_parsing("13/5", ~D[2021-05-13], assume_date: ~D[2021-01-01])
end
describe "parse_datetime/1 - DMY" do
test_datetime_parsing("23-05-2019 @ 10:01", ~N[2019-05-23 10:01:00], assume_time: true)
test_datetime_parsing("9-Feb-18", ~N[2018-02-09 00:00:00], assume_time: true)
test_datetime_parsing("9-2-32", ~N[2032-02-09 00:00:00], assume_time: true)
end
describe "parse_date/1 - DMY" do
test_date_parsing("23-05-2019 @ 10:01", ~D[2019-05-23])
test_date_parsing("9-Feb-18", ~D[2018-02-09])
test_date_parsing("9-2-32", ~D[2032-02-09])
end
describe "parse_datetime/1 - YMD" do
test_datetime_parsing("2019-05-16+04:00", ~N[2019-05-16 04:00:00], assume_time: true)
test_datetime_parsing("34-1-13", ~N[2034-01-13 00:00:00], assume_time: true)
test_datetime_parsing("2034-1-9", ~N[2034-01-09 00:00:00], assume_time: true)
test_datetime_parsing("20340109", ~N[2034-01-09 00:00:00], assume_time: true)
test_datetime_parsing("2034-01-13", ~N[2034-01-13 00:00:00], assume_time: true)
test_datetime_parsing("2016-02-29 00:00:00 UTC", "2016-02-29T00:00:00Z")
test_datetime_parsing(
"2017-11-04 15:20:47 UTC",
DateTime.from_naive!(~N[2017-11-04 15:20:47Z], "Etc/UTC")
)
test_datetime_parsing(
"2017-11-04 15:20:47 EDT",
DateTime.from_naive!(~N[2017-11-04 19:20:47Z], "Etc/UTC"),
to_utc: true
)
test_datetime_parsing(
"2017-11-04 15:20:47 EST",
DateTime.from_naive!(~N[2017-11-04 20:20:47Z], "Etc/UTC"),
to_utc: true
)
test_datetime_parsing(
"2017-11-04 15:20:47-0500",
DateTime.from_naive!(~N[2017-11-04 20:20:47Z], "Etc/UTC"),
to_utc: true
)
test_datetime_parsing(
"2017-11-04 15:20:47+0500",
DateTime.from_naive!(~N[2017-11-04 10:20:47Z], "Etc/UTC"),
to_utc: true
)
test_datetime_parsing(
"2017-11-04 15:20:47+0000",
DateTime.from_naive!(~N[2017-11-04 15:20:47Z], "Etc/UTC")
)
test_datetime_parsing(
"2019-05-20 10:00:00PST",
DateTime.from_naive!(~N[2019-05-20 17:00:00Z], "Etc/UTC"),
to_utc: true
)
end
describe "parse_date/1 - YMD" do
test_date_parsing("2019-05-16+04:00", ~D[2019-05-16])
test_date_parsing("34-1-13", ~D[2034-01-13])
test_date_parsing("2034-1-9", ~D[2034-01-09])
test_date_parsing("2034-01-13", ~D[2034-01-13])
test_date_parsing("2017-11-04 15:20:47 UTC", ~D[2017-11-04])
test_date_parsing("2017-11-04 15:20:47 EDT", ~D[2017-11-04])
test_date_parsing("2017-11-04 15:20:47 EST", ~D[2017-11-04])
test_date_parsing("2017-11-04 15:20:47-0500", ~D[2017-11-04])
test_date_parsing("2017-11-04 15:20:47+0500", ~D[2017-11-04])
test_date_parsing("2017-11-04 15:20:47+0000", ~D[2017-11-04])
test_date_parsing("2019-05-20 10:00:00PST", ~D[2019-05-20])
test_date_parsing("2016-02-29", ~D[2016-02-29])
end
describe "parse_datetime/2 - options" do
test "to_utc: false returns NaiveDateTime when undetermined timezone" do
string = "2019-01-01T00:00:00"
{:ok, result} = DateTimeParser.parse_datetime(string, to_utc: false)
assert result == ~N[2019-01-01 00:00:00]
end
test "to_utc: false returns DateTime when determined timezone" do
string = "2019-01-01T00:00:00Z"
{:ok, result} = DateTimeParser.parse_datetime(string, to_utc: false)
assert result == DateTime.from_naive!(~N[2019-01-01 00:00:00], "Etc/UTC")
end
test "to_utc: true returns converted DateTime when timezone is determined" do
string = "2019-01-01T00:00:00 PST"
{:ok, result} = DateTimeParser.parse_datetime(string, to_utc: true)
assert result == DateTime.from_naive!(~N[2019-01-01 08:00:00], "Etc/UTC")
end
test "to_utc: true returns NaiveDateTime when timezone is undetermined" do
string = "2019-01-01T08:00:00"
{:ok, result} = DateTimeParser.parse_datetime(string, to_utc: true)
assert result == ~N[2019-01-01 08:00:00]
end
test "assume_utc: false returns NaiveDateTime when undetermined timezone" do
string = "2019-01-01T00:00:00"
{:ok, result} = DateTimeParser.parse_datetime(string, assume_utc: false)
assert result == ~N[2019-01-01 00:00:00]
end
test "assume_utc: false returns DateTime when determined timezone" do
string = "2019-01-01T00:00:00Z"
{:ok, result} = DateTimeParser.parse_datetime(string, assume_utc: false)
assert result == DateTime.from_naive!(~N[2019-01-01 00:00:00], "Etc/UTC")
end
test "assume_utc: true returns timezoned DateTime when timezone is determined" do
string = "2019-01-01T00:00:00 PST"
{:ok, result} = DateTimeParser.parse_datetime(string, assume_utc: true)
naive_datetime_result = DateTime.to_naive(result)
assert naive_datetime_result == ~N[2019-01-01 00:00:00]
assert %{zone_abbr: "PST", time_zone: "PST8PDT", utc_offset: -28_800, std_offset: 0} =
result
end
test "assume_utc: true returns NaiveDateTime when timezone is undetermined" do
string = "2019-01-01T08:00:00"
{:ok, result} = DateTimeParser.parse_datetime(string, assume_utc: true)
assert result == DateTime.from_naive!(~N[2019-01-01 08:00:00], "Etc/UTC")
end
end
describe "parse_datetime/1 - vocal" do
test_datetime_parsing("Sunday 01 January 2017 10:11:02 PM", ~N[2017-01-01 22:11:02])
test_datetime_parsing("Sunday, 01 January 2017 10:11:02 PM", ~N[2017-01-01 22:11:02])
test_datetime_parsing("Sun, 01 January 2017 10:11:02 PM", ~N[2017-01-01 22:11:02])
test_datetime_parsing("Sun 01 January 2017 10:11:02 PM", ~N[2017-01-01 22:11:02])
test_datetime_parsing("November 29, 2016", ~N[2016-11-29 00:00:00], assume_time: true)
test_datetime_parsing(
"May 30, 2019 4:31:09 AM PDT",
DateTime.from_naive!(~N[2019-05-30 11:31:09], "Etc/UTC"),
to_utc: true
)
test_datetime_parsing("Sep-19-16", ~N[2016-09-19 00:00:00], assume_time: true)
test_datetime_parsing(
"Oct 5, 2018 6:16:56 PM PDT",
DateTime.from_naive!(~N[2018-10-06 01:16:56Z], "Etc/UTC"),
to_utc: true
)
test_datetime_parsing("19 September 2018 08:15:22 AM", ~N[2018-09-19 08:15:22])
test_datetime_parsing("19 September 18 2:33:08 PM", ~N[2018-09-19 14:33:08])
test_datetime_parsing("11 July 2017 1:43:46 PM", ~N[2017-07-11 13:43:46])
end
describe "parse_date/1 - vocal" do
test_date_parsing("Sunday 01 January 2017 10:11:02 PM", ~D[2017-01-01])
test_date_parsing("Sunday, 01 January 2017 10:11:02 PM", ~D[2017-01-01])
test_date_parsing("Sun, 01 January 2017 10:11:02 PM", ~D[2017-01-01])
test_date_parsing("Sun 01 January 2017 10:11:02 PM", ~D[2017-01-01])
test_date_parsing("November 29, 2016", ~D[2016-11-29])
test_date_parsing("May 30, 2019 4:31:09 AM PDT", ~D[2019-05-30])
test_date_parsing("Sep-19-16", ~D[2016-09-19])
test_date_parsing("Oct 5, 2018 6:16:56 PM PDT", ~D[2018-10-05])
test_date_parsing("19 September 2018 08:15:22 AM", ~D[2018-09-19])
test_date_parsing("19 September 18 2:33:08 PM", ~D[2018-09-19])
test_date_parsing("11 July 2017 1:43:46 PM", ~D[2017-07-11])
end
describe "parse_date/1 - epoch" do
test_date_parsing("99999999999", ~D[5138-11-16])
test_date_parsing("9999999999", ~D[2286-11-20])
test_date_parsing("9999999999.009", ~D[2286-11-20])
test_date_parsing("9999999999.999", ~D[2286-11-20])
test_date_parsing("9999999999.999999", ~D[2286-11-20])
test_date_parsing("9999999999.9999999999", ~D[2286-11-20])
test_date_parsing("0000000000", ~D[1970-01-01])
test_date_parsing("-0000000001", ~D[1969-12-31])
test_date_parsing("-0000000001.001", ~D[1969-12-31])
test_date_parsing("-0000000001.111111", ~D[1969-12-31])
test_date_parsing("-9999999999.009", ~D[1653-02-10])
test_date_parsing("-9999999999.999", ~D[1653-02-10])
test_date_parsing("-9999999999.999999", ~D[1653-02-10])
test_date_parsing("-9999999999.9999999999", ~D[1653-02-10])
end
describe "parse_date/1 - serial" do
test_date_parsing("41261.6013888889", ~D[2012-12-18])
test_date_parsing("-45103.1454398148", ~D[1776-07-04])
test_date_parsing("-363", ~D[1899-01-01])
test_date_parsing("2", ~D[1900-01-01])
test_date_parsing("62", ~D[1900-03-02])
end
describe "parse_time/1" do
test_time_parsing("00:00.0", ~T[00:00:00])
test_time_parsing("07:09.3", ~T[07:09:00])
test_time_parsing("08:53.0", ~T[08:53:00])
test_time_parsing("10:13.7", ~T[10:13:00])
end
describe "parse_time/1 - epoch" do
test_time_parsing("99999999999", ~T[09:46:39])
test_time_parsing("9999999999", ~T[17:46:39])
test_time_parsing("9999999999.000001", ~T[17:46:39.000001])
test_time_parsing("9999999999.000010", ~T[17:46:39.000010])
test_time_parsing("9999999999.000100", ~T[17:46:39.000100])
test_time_parsing("9999999999.001000", ~T[17:46:39.001000])
test_time_parsing("9999999999.010000", ~T[17:46:39.010000])
test_time_parsing("9999999999.100000", ~T[17:46:39.100000])
test_time_parsing("9999999999.009", ~T[17:46:39.009])
test_time_parsing("9999999999.900", ~T[17:46:39.900])
test_time_parsing("9999999999.999", ~T[17:46:39.999])
test_time_parsing("9999999999.999999", ~T[17:46:39.999999])
test_time_parsing("9999999999.9999999999", ~T[17:46:39.999999])
test_time_parsing("0000000000", ~T[00:00:00])
test_time_parsing("-9999999999.9999999999", ~T[06:13:20.000001])
test_time_parsing("-9999999999.999999", ~T[06:13:20.000001])
test_time_parsing("-9999999999.99999", ~T[06:13:20.00001])
test_time_parsing("-9999999999.9999", ~T[06:13:20.0001])
test_time_parsing("-9999999999.999", ~T[06:13:20.001])
test_time_parsing("-9999999999.99", ~T[06:13:20.01])
test_time_parsing("-9999999999.9", ~T[06:13:20.1])
test_time_parsing("-0000000001.0000000001", ~T[23:59:58.000000])
test_time_parsing("-0000000001.000001", ~T[23:59:58.999999])
test_time_parsing("-0000000001.00001", ~T[23:59:58.99999])
test_time_parsing("-0000000001.0001", ~T[23:59:58.9999])
test_time_parsing("-0000000001.001", ~T[23:59:58.999])
test_time_parsing("-0000000001.01", ~T[23:59:58.99])
test_time_parsing("-0000000001.1", ~T[23:59:58.9])
end
describe "parse_time/1 - serial" do
test_time_parsing("41261.6013888889", ~T[14:26:00])
test_time_parsing("-45103.1454398148", ~T[20:30:34])
end
describe "bang variants" do
test "parse! successfully returns results" do
assert %NaiveDateTime{} = DateTimeParser.parse!("2019-01-01T01:01:01")
assert %DateTime{} = DateTimeParser.parse!("2019-01-01T01:01:01Z")
assert %Date{} = DateTimeParser.parse!("2019-01-01")
assert %Time{} = DateTimeParser.parse!("9:30pm")
end
test "parse! raises an error when fails to parse" do
assert_raise DateTimeParser.ParseError, ~s|Could not parse "foo"|, fn ->
DateTimeParser.parse!("foo")
end
end
test "parse_datetime! successfully returns results" do
assert %NaiveDateTime{} = DateTimeParser.parse_datetime!("2019-01-01T01:01:01")
assert %DateTime{} = DateTimeParser.parse_datetime!("2019-01-01T01:01:01Z")
end
test "parse_datetime! raises an error when fails to parse" do
assert_raise DateTimeParser.ParseError, ~s|Could not parse "foo"|, fn ->
DateTimeParser.parse_datetime!("foo")
end
end
test "parse_date! successfully returns results" do
assert %Date{} = DateTimeParser.parse_date!("2019-01-01")
end
test "parse_date! raises an error when fails to parse" do
assert_raise DateTimeParser.ParseError, ~s|Could not parse "foo"|, fn ->
DateTimeParser.parse_date!("foo")
end
end
test "parse_time! successfully returns results" do
assert %Time{} = DateTimeParser.parse_time!("10:30pm")
end
test "parse_time! raises an error when fails to parse" do
assert_raise DateTimeParser.ParseError, ~s|Could not parse "foo"|, fn ->
DateTimeParser.parse_time!("foo")
end
end
end
describe "errors" do
test "returns an error when not recognized" do
assert DateTimeParser.parse_datetime("2017-24-32 16:09:53 UTC") ==
{:error, ~s|Could not parse "2017-24-32 16:09:53 UTC"|}
assert DateTimeParser.parse_datetime(nil) == {:error, "Could not parse nil"}
assert DateTimeParser.parse_date(nil) == {:error, "Could not parse nil"}
assert DateTimeParser.parse_time(nil) == {:error, "Could not parse nil"}
assert DateTimeParser.parse(nil) == {:error, "Could not parse nil"}
assert DateTimeParser.parse({:ok, "foo"}) == {:error, ~s|Could not parse {:ok, "foo"}|}
assert DateTimeParser.parse_date({:ok, "foo"}) == {:error, ~s|Could not parse {:ok, "foo"}|}
assert DateTimeParser.parse_time({:ok, "foo"}) == {:error, ~s|Could not parse {:ok, "foo"}|}
assert DateTimeParser.parse_datetime({:ok, "foo"}) ==
{:error, ~s|Could not parse {:ok, "foo"}|}
end
test_error("01-Jul", ~s|Could not parse "01-Jul"|)
test_datetime_error("01-Jul")
test_datetime_error("2017-02-29 00:00:00 UTC")
test_date_error("2017-02-29")
for month <- ~w[04 06 09 11] do
@month month
test_datetime_error(
"2017-#{@month}-31 00:00:00 UTC",
~s|Could not parse "2017-#{@month}-31 00:00:00 UTC"|
)
test_date_error("2017-#{@month}-31", ~s|Could not parse "2017-#{@month}-31"|)
end
end
end
| 45.136035 | 99 | 0.649075 |
795ebd11372741e6eecca40f2d6493b2eff23607 | 1,590 | ex | Elixir | clients/private_ca/lib/google_api/private_ca/v1beta1/model/fetch_certificate_authority_csr_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/private_ca/lib/google_api/private_ca/v1beta1/model/fetch_certificate_authority_csr_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/private_ca/lib/google_api/private_ca/v1beta1/model/fetch_certificate_authority_csr_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.PrivateCA.V1beta1.Model.FetchCertificateAuthorityCsrResponse do
@moduledoc """
Response message for CertificateAuthorityService.FetchCertificateAuthorityCsr.
## Attributes
* `pemCsr` (*type:* `String.t`, *default:* `nil`) - Output only. The PEM-encoded signed certificate signing request (CSR).
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:pemCsr => String.t() | nil
}
field(:pemCsr)
end
defimpl Poison.Decoder,
for: GoogleApi.PrivateCA.V1beta1.Model.FetchCertificateAuthorityCsrResponse do
def decode(value, options) do
GoogleApi.PrivateCA.V1beta1.Model.FetchCertificateAuthorityCsrResponse.decode(value, options)
end
end
defimpl Poison.Encoder,
for: GoogleApi.PrivateCA.V1beta1.Model.FetchCertificateAuthorityCsrResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.44898 | 126 | 0.758491 |
795ec0f4bd044d8b5922fcbcdbfbbf39bf8dfa28 | 926 | ex | Elixir | test/types_test.ex | gmassman/everex | ce0f3037e69041127776181afafe6b2d14f0e343 | [
"Apache-2.0"
] | 11 | 2015-02-22T17:42:03.000Z | 2018-09-23T07:48:57.000Z | test/types_test.ex | gmassman/everex | ce0f3037e69041127776181afafe6b2d14f0e343 | [
"Apache-2.0"
] | 38 | 2015-03-11T16:13:27.000Z | 2021-08-02T07:14:50.000Z | test/types_test.ex | gmassman/everex | ce0f3037e69041127776181afafe6b2d14f0e343 | [
"Apache-2.0"
] | 4 | 2015-05-20T01:33:33.000Z | 2020-06-09T04:44:46.000Z | #
# Copyright 2015 Johan Wärlander
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
defmodule EverexTypesTest do
use ExUnit.Case
use Everex.Types
test "note record is the same as a note struct" do
note_record = Types.Note.record(title: "Foo Bar")
note_struct = %Types.Note{title: "Foo Bar"}
assert Types.to_record(note_struct) == note_record
assert Types.to_struct(note_record) == note_struct
end
end
| 34.296296 | 74 | 0.7473 |
795eea5e4d91b8bba20972ee1a6f649629964765 | 3,852 | ex | Elixir | lib/militerm/systems/simple_response.ex | jgsmith/militerm | c4252d0a93f5620b90750ac2b61baf282e9ef7eb | [
"Apache-2.0"
] | 6 | 2017-06-16T10:26:35.000Z | 2021-04-07T15:01:00.000Z | lib/militerm/systems/simple_response.ex | jgsmith/militerm | c4252d0a93f5620b90750ac2b61baf282e9ef7eb | [
"Apache-2.0"
] | 2 | 2020-04-14T02:17:46.000Z | 2021-03-10T11:09:05.000Z | lib/militerm/systems/simple_response.ex | jgsmith/militerm | c4252d0a93f5620b90750ac2b61baf282e9ef7eb | [
"Apache-2.0"
] | null | null | null | defmodule Militerm.Systems.SimpleResponse do
@moduledoc ~S"""
The response system allows NPCs to map text string patterns to
events. This is a fairly generic system, so the scripting needs to
supply the string being matched as well as the set of matches. The
returned event is then triggered by the script as well.
response:
set-name:
- pattern: pattern
events:
- event1
- event2
The pattern is a regex with named captures available.
This should be sufficient to build a bot based on the old Eliza game.
"""
use Militerm.ECS.System
require Logger
defscript simple_response_trigger_event(set, text), for: objects do
do_sr_trigger_event(objects, set, text)
end
defscript simple_response_trigger_event(set, text, default_event), for: objects do
do_sr_trigger_event(objects, set, text, default_event)
end
defscript random_selection(list) do
if is_list(list) do
count = Enum.count(list)
Enum.at(list, :rand.uniform(count) - 1)
else
list
end
end
def do_sr_trigger_event(objects, set, text, default_event \\ nil)
def do_sr_trigger_event(objects, set, [text], default_event) do
do_sr_trigger_event(objects, set, text, default_event)
end
def do_sr_trigger_event(%{"this" => this} = objects, set, text, default_event) do
this
|> get_pattern_set(set)
|> log_pattern_set(this, set)
|> find_match(text)
|> log_match(this, set)
|> trigger_event(objects, default_event)
end
def do_sr_trigger_event(_, _, _, _), do: false
def get_pattern_set({:thing, thing_id}, set) do
Militerm.Components.SimpleResponses.get_set(thing_id, set)
end
def get_pattern_set({:thing, thing_id, _}, set) do
Militerm.Components.SimpleResponses.get_set(thing_id, set)
end
def log_pattern_set(patterns, {:thing, thing_id}, set) do
Logger.debug(fn ->
[thing_id, " SimpleResponseTriggerEvent ", set, " patterns: ", inspect(patterns)]
end)
patterns
end
def log_pattern_set(patterns, {:thing, thing_id, _}, set) do
Logger.debug(fn ->
[thing_id, " SimpleResponseTriggerEvent ", set, " patterns: ", inspect(patterns)]
end)
patterns
end
def find_match(patterns, text) do
patterns
|> Enum.find_value(fn %{"regex" => regex, "event" => event} ->
case regex_matches(regex, text) do
%{} = captures -> {event, captures}
_ -> false
end
end)
end
def log_match(match, {:thing, thing_id, _}, set) do
log_match(match, {:thing, thing_id}, set)
end
def log_match(match, {:thing, thing_id}, set) do
Logger.debug(fn ->
[thing_id, " SimpleResponseTriggerEvent ", set, " match: ", inspect(match)]
end)
match
end
def regex_matches([], _), do: false
def regex_matches([regex | rest], text) do
case Regex.named_captures(regex, text) do
%{} = captures -> captures
_ -> regex_matches(rest, text)
end
end
def regex_matches(regex, text), do: Regex.named_captures(regex, text)
def trigger_event(nil, _, nil), do: false
def trigger_event(nil, %{"this" => this} = objects, event) do
do_trigger_event(this, event, objects)
false
end
def trigger_event({event, captures}, %{"this" => this} = objects, _) do
do_trigger_event(this, event, Map.merge(captures, objects))
end
def trigger_event(event, %{"this" => this} = objects, _) do
do_trigger_event(this, event, objects)
true
end
def do_trigger_event({:thing, thing_id}, event, args) do
do_trigger_event(thing_id, event, args)
end
def do_trigger_event({:thing, thing_id, _}, event, args) do
do_trigger_event(thing_id, event, args)
end
def do_trigger_event(thing_id, event, args) do
Militerm.Systems.Events.async_trigger(thing_id, event, "responder", args)
true
end
end
| 26.937063 | 87 | 0.676532 |
795f002836afeb527bfb3bf0e938933af0030677 | 6,132 | ex | Elixir | deps/phoenix_html/lib/phoenix_html.ex | rpillar/Top5_Elixir | 9c450d2e9b291108ff1465dc066dfe442dbca822 | [
"MIT"
] | null | null | null | deps/phoenix_html/lib/phoenix_html.ex | rpillar/Top5_Elixir | 9c450d2e9b291108ff1465dc066dfe442dbca822 | [
"MIT"
] | null | null | null | deps/phoenix_html/lib/phoenix_html.ex | rpillar/Top5_Elixir | 9c450d2e9b291108ff1465dc066dfe442dbca822 | [
"MIT"
] | null | null | null | defmodule Phoenix.HTML do
@moduledoc """
Helpers for working with HTML strings and templates.
When used, it imports the given modules:
* `Phoenix.HTML` - functions to handle HTML safety;
* `Phoenix.HTML.Tag` - functions for generating HTML tags;
* `Phoenix.HTML.Form` - functions for working with forms;
* `Phoenix.HTML.Link` - functions for generating links and urls;
* `Phoenix.HTML.Format` - functions for formatting text;
## HTML Safe
One of the main responsibilities of this module is to
provide convenience functions for escaping and marking
HTML code as safe.
By default, data output in templates is not considered
safe:
<%= "<hello>" %>
will be shown as:
<hello>
User data or data coming from the database is almost never
considered safe. However, in some cases, you may want to tag
it as safe and show its "raw" contents:
<%= raw "<hello>" %>
Keep in mind most helpers will automatically escape your data
and return safe content:
<%= content_tag :p, "<hello>" %>
will properly output:
<p><hello></p>
"""
@doc false
defmacro __using__(_) do
quote do
import Phoenix.HTML
import Phoenix.HTML.Form
import Phoenix.HTML.Link
import Phoenix.HTML.Tag
import Phoenix.HTML.Format
end
end
@typedoc "Guaranteed to be safe"
@type safe :: {:safe, iodata}
@typedoc "May be safe or unsafe (i.e. it needs to be converted)"
@type unsafe :: Phoenix.HTML.Safe.t()
@doc """
Provides `~e` sigil with HTML safe EEx syntax inside source files.
Raises on attempts to interpolate with `\#{}`, so `~E` should be preferred.
iex> ~e"\""
...> Hello <%= "world" %>
...> "\""
{:safe, ["Hello ", "world", "\\n"]}
"""
defmacro sigil_e(expr, opts) do
handle_sigil(expr, opts, __CALLER__.line)
end
@doc """
Provides `~E` sigil with HTML safe EEx syntax inside source files.
Does not raise on attempts to interpolate with `\#{}`, but rather shows those
characters literally, so it should be preferred over `~e`.
iex> ~E"\""
...> Hello <%= "world" %>
...> "\""
{:safe, ["Hello ", "world", "\\n"]}
"""
defmacro sigil_E(expr, opts) do
handle_sigil(expr, opts, __CALLER__.line)
end
defp handle_sigil({:<<>>, _, [expr]}, [], line) do
EEx.compile_string(expr, engine: Phoenix.HTML.Engine, line: line + 1)
end
defp handle_sigil(_, _, _) do
raise ArgumentError,
"interpolation not allowed in ~e sigil. " <>
"Remove the interpolation, use <%= %> to insert values, " <>
"or use ~E to show the interpolation literally"
end
@doc """
Marks the given content as raw.
This means any HTML code inside the given
string won't be escaped.
iex> raw("<hello>")
{:safe, "<hello>"}
iex> raw({:safe, "<hello>"})
{:safe, "<hello>"}
iex> raw(nil)
{:safe, ""}
"""
@spec raw(iodata | safe | nil) :: safe
def raw({:safe, value}), do: {:safe, value}
def raw(nil), do: {:safe, ""}
def raw(value) when is_binary(value) or is_list(value), do: {:safe, value}
@doc """
Escapes the HTML entities in the given term, returning iodata.
iex> html_escape("<hello>")
{:safe, [[[] | "<"], "hello" | ">"]}
iex> html_escape('<hello>')
{:safe, ["<", 104, 101, 108, 108, 111, ">"]}
iex> html_escape(1)
{:safe, "1"}
iex> html_escape({:safe, "<hello>"})
{:safe, "<hello>"}
"""
@spec html_escape(unsafe) :: safe
def html_escape({:safe, _} = safe), do: safe
def html_escape(nil), do: {:safe, ""}
def html_escape(bin) when is_binary(bin), do: {:safe, Plug.HTML.html_escape_to_iodata(bin)}
def html_escape(list) when is_list(list), do: {:safe, Phoenix.HTML.Safe.List.to_iodata(list)}
def html_escape(other), do: {:safe, Phoenix.HTML.Safe.to_iodata(other)}
@doc """
Converts a safe result into a string.
Fails if the result is not safe. In such cases, you can
invoke `html_escape/1` or `raw/1` accordingly before.
"""
@spec safe_to_string(safe) :: String.t()
def safe_to_string({:safe, iodata}) do
IO.iodata_to_binary(iodata)
end
@doc false
@deprecated "Use javascript_escape/1 instead"
def escape_javascript(data), do: javascript_escape(data)
@doc """
Escapes quotes (double and single), double backslashes and other.
This function is useful in JavaScript responses when there is a need
to escape HTML rendered from other templates, like in the following:
$("#container").append("<%= javascript_escape(render("post.html", post: @post)) %>");
"""
@spec javascript_escape(binary) :: binary
@spec javascript_escape(safe) :: safe
def javascript_escape({:safe, data}),
do: {:safe, data |> IO.iodata_to_binary() |> javascript_escape("")}
def javascript_escape(data) when is_binary(data),
do: javascript_escape(data, "")
defp javascript_escape(<<0x2028::utf8, t::binary>>, acc),
do: javascript_escape(t, <<acc::binary, "\\u2028">>)
defp javascript_escape(<<0x2029::utf8, t::binary>>, acc),
do: javascript_escape(t, <<acc::binary, "\\u2029">>)
defp javascript_escape(<<0::utf8, t::binary>>, acc),
do: javascript_escape(t, <<acc::binary, "\\u0000">>)
defp javascript_escape(<<"</", t::binary>>, acc),
do: javascript_escape(t, <<acc::binary, ?<, ?\\, ?/>>)
defp javascript_escape(<<"\r\n", t::binary>>, acc),
do: javascript_escape(t, <<acc::binary, ?\\, ?n>>)
defp javascript_escape(<<h, t::binary>>, acc) when h in [?", ?', ?\\],
do: javascript_escape(t, <<acc::binary, ?\\, h>>)
defp javascript_escape(<<h, t::binary>>, acc) when h in [?\r, ?\n],
do: javascript_escape(t, <<acc::binary, ?\\, ?n>>)
defp javascript_escape(<<h, t::binary>>, acc),
do: javascript_escape(t, <<acc::binary, h>>)
defp javascript_escape(<<>>, acc), do: acc
end
| 29.623188 | 96 | 0.604207 |
795f91831e2a0f76aad83da47a7a83dc98e73b47 | 247 | exs | Elixir | config/config.exs | axelson/ueberauth_google | ee6f9ddff059a0b8eb2cddaf64d3eefcc2202450 | [
"MIT"
] | 134 | 2015-11-25T16:45:41.000Z | 2022-03-17T01:15:41.000Z | config/config.exs | axelson/ueberauth_google | ee6f9ddff059a0b8eb2cddaf64d3eefcc2202450 | [
"MIT"
] | 79 | 2015-11-19T01:47:53.000Z | 2022-02-06T01:52:00.000Z | config/config.exs | axelson/ueberauth_google | ee6f9ddff059a0b8eb2cddaf64d3eefcc2202450 | [
"MIT"
] | 89 | 2016-01-12T05:21:43.000Z | 2022-03-13T22:06:48.000Z | use Mix.Config
config :ueberauth, Ueberauth,
providers: [
google: {Ueberauth.Strategy.Google, []}
]
config :ueberauth, Ueberauth.Strategy.Google.OAuth,
client_id: "client_id",
client_secret: "client_secret",
token_url: "token_url"
| 20.583333 | 51 | 0.724696 |
795f95717b826ef3fa0942224ca2baee4bcebfc1 | 220 | ex | Elixir | lib/mix/tasks/server.ex | Hou-Rui/cadet | f9036d76005bf3b267b632dce176067ae1a19f71 | [
"Apache-2.0"
] | 27 | 2018-01-20T05:56:24.000Z | 2021-05-24T03:21:55.000Z | lib/mix/tasks/server.ex | Hou-Rui/cadet | f9036d76005bf3b267b632dce176067ae1a19f71 | [
"Apache-2.0"
] | 731 | 2018-04-16T13:25:49.000Z | 2021-06-22T07:16:12.000Z | lib/mix/tasks/server.ex | Hou-Rui/cadet | f9036d76005bf3b267b632dce176067ae1a19f71 | [
"Apache-2.0"
] | 43 | 2018-01-20T06:35:46.000Z | 2021-05-05T03:22:35.000Z | defmodule Mix.Tasks.Cadet.Server do
@moduledoc """
Run the Cadet server.
Currently it is equivalent with `phx.server`
"""
use Mix.Task
def run(args) do
:ok = Mix.Tasks.Phx.Server.run(args)
end
end
| 18.333333 | 48 | 0.663636 |
795fa221bbb633999220bf730aadfb4e5644ed22 | 1,553 | ex | Elixir | back/lib/api_web.ex | HugoLefebvre/Epitech_TM_MP03 | 0ed161c956f7a10aec245fe2e17eb5a9b55f6075 | [
"MIT"
] | null | null | null | back/lib/api_web.ex | HugoLefebvre/Epitech_TM_MP03 | 0ed161c956f7a10aec245fe2e17eb5a9b55f6075 | [
"MIT"
] | null | null | null | back/lib/api_web.ex | HugoLefebvre/Epitech_TM_MP03 | 0ed161c956f7a10aec245fe2e17eb5a9b55f6075 | [
"MIT"
] | null | null | null | defmodule ApiWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use ApiWeb, :controller
use ApiWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: ApiWeb
import Plug.Conn
import ApiWeb.Router.Helpers
import ApiWeb.Gettext
end
end
def view do
quote do
use Phoenix.View, root: "lib/api_web/templates",
namespace: ApiWeb
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_flash: 2, view_module: 1]
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
import ApiWeb.Router.Helpers
import ApiWeb.ErrorHelpers
import ApiWeb.Gettext
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
end
end
def channel do
quote do
use Phoenix.Channel
import ApiWeb.Gettext
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 22.838235 | 69 | 0.676755 |
795fc69de3dcf85daea387639297b001c97cffb3 | 1,834 | ex | Elixir | clients/data_labeling/lib/google_api/data_labeling/v1beta1/model/google_cloud_datalabeling_v1beta1_video_thumbnail.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/data_labeling/lib/google_api/data_labeling/v1beta1/model/google_cloud_datalabeling_v1beta1_video_thumbnail.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/data_labeling/lib/google_api/data_labeling/v1beta1/model/google_cloud_datalabeling_v1beta1_video_thumbnail.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DataLabeling.V1beta1.Model.GoogleCloudDatalabelingV1beta1VideoThumbnail do
@moduledoc """
Container of information of a video thumbnail.
## Attributes
* `thumbnail` (*type:* `String.t`, *default:* `nil`) - A byte string of the video frame.
* `timeOffset` (*type:* `String.t`, *default:* `nil`) - Time offset relative to the beginning of the video, corresponding to the video frame where the thumbnail has been extracted from.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:thumbnail => String.t(),
:timeOffset => String.t()
}
field(:thumbnail)
field(:timeOffset)
end
defimpl Poison.Decoder,
for: GoogleApi.DataLabeling.V1beta1.Model.GoogleCloudDatalabelingV1beta1VideoThumbnail do
def decode(value, options) do
GoogleApi.DataLabeling.V1beta1.Model.GoogleCloudDatalabelingV1beta1VideoThumbnail.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.DataLabeling.V1beta1.Model.GoogleCloudDatalabelingV1beta1VideoThumbnail do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.345455 | 189 | 0.742094 |
795fd967c1238e5acbf5202c6641e1cf6f846b18 | 1,683 | ex | Elixir | lib/earmark_hashed_link.ex | niku/earmark_hashed_link | bb72e21b133038ea4b58e1f971f423b00128661d | [
"MIT"
] | 5 | 2019-11-25T06:46:28.000Z | 2019-12-23T00:55:53.000Z | lib/earmark_hashed_link.ex | niku/earmark_hashed_link | bb72e21b133038ea4b58e1f971f423b00128661d | [
"MIT"
] | null | null | null | lib/earmark_hashed_link.ex | niku/earmark_hashed_link | bb72e21b133038ea4b58e1f971f423b00128661d | [
"MIT"
] | null | null | null | defmodule EarmarkHashedLink do
@moduledoc """
Documentation for EarmarkHashedLink.
"""
@doc """
Splits hashed link from given text
## Examples
iex> EarmarkHashedLink.split_hashed_link("abc #def ghi")
["abc ", "#def", " ghi"]
"""
@spec split_hashed_link(binary) :: [binary]
def split_hashed_link(text) when is_binary(text) do
Regex.split(~r/#[[:word:]]+/u, text, trim: true, include_captures: true)
end
@doc """
Adds hashed link to given ast
"""
@spec add_hashed_link([binary | {any, any, [any]}]) :: [any]
def add_hashed_link(ast) when is_list(ast) do
do_add_hashed_link(ast, [], [])
end
@doc false
def do_add_hashed_link(_ast, _ancestor_tags, _result)
def do_add_hashed_link([], _ancestor_tags, result), do: Enum.reverse(result)
def do_add_hashed_link([{tag, atts, ast} | rest], ancestor_tags, result) do
do_add_hashed_link(rest, [tag | ancestor_tags], [
{tag, atts, do_add_hashed_link(ast, [tag | ancestor_tags], [])} | result
])
end
def do_add_hashed_link([string | rest], ancestor_tags, result) when is_binary(string) do
if Enum.any?(
ancestor_tags,
&Enum.member?([:comment, "a", "blockquote", "code", "img", "pre"], &1)
) do
do_add_hashed_link(rest, ancestor_tags, [string | result])
else
new_ast =
split_hashed_link(string)
|> Enum.map(fn
"#" <> hashed_link ->
# Link representation
{"a", [{"href", hashed_link}], ["#" <> hashed_link]}
text ->
text
end)
|> Enum.reverse()
do_add_hashed_link(rest, ancestor_tags, new_ast ++ result)
end
end
end
| 27.145161 | 90 | 0.616162 |
796000f4188cf29eae9c00488b93d9180215d4c0 | 1,036 | exs | Elixir | test/layers/builder_queries/aggregate/aggregate_group_min_test.exs | haskric/mongo_agile | 393e1e96f706e3580f6bac9ff7bcc081b0a2e4eb | [
"MIT"
] | 1 | 2020-12-30T18:30:32.000Z | 2020-12-30T18:30:32.000Z | test/layers/builder_queries/aggregate/aggregate_group_min_test.exs | haskric/mongo_agile | 393e1e96f706e3580f6bac9ff7bcc081b0a2e4eb | [
"MIT"
] | null | null | null | test/layers/builder_queries/aggregate/aggregate_group_min_test.exs | haskric/mongo_agile | 393e1e96f706e3580f6bac9ff7bcc081b0a2e4eb | [
"MIT"
] | null | null | null | defmodule MongoAgile.BuilderQueries.Aggregate.GroupMin.Test do
@moduledoc false
use ExUnit.Case
defmodule DataSetExample do
@moduledoc false
import MongoAgile.Queries.AgilQuery
use MongoAgile.BuilderQueries,
collection: "test_aggregate",
pid_mongo: :mongo
find "get_all", where: %{}
aggregate "min_likes",
pipeline: [
%{
"$group" =>
%{
"_id" => nil,
"min_likes" => %{
"$min" => "$likes"
}
}
}
]
end
test "min_likes" do
result = DataSetExample.run_query("min_likes")
|> Enum.to_list()
min_likes = calcular_min_likes()
assert result == [%{"_id" => nil, "min_likes" => min_likes}]
end
def calcular_min_likes do
{:ok, docs} = DataSetExample.run_query("get_all")
docs |> Enum.reduce(nil, fn(doc, acc) ->
likes = doc["likes"]
cond do
acc == nil -> likes
likes < acc -> likes
true -> acc
end
end)
end
end
| 20.72 | 64 | 0.548263 |
796097f685ec973b356b4926ea522d4c456ac531 | 1,722 | ex | Elixir | clients/content/lib/google_api/content/v2/model/orders_acknowledge_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/orders_acknowledge_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/orders_acknowledge_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V2.Model.OrdersAcknowledgeResponse do
@moduledoc """
## Attributes
* `executionStatus` (*type:* `String.t`, *default:* `nil`) - The status of the execution.
Acceptable values are:
- "`duplicate`"
- "`executed`"
* `kind` (*type:* `String.t`, *default:* `content#ordersAcknowledgeResponse`) - Identifies what kind of resource this is. Value: the fixed string "content#ordersAcknowledgeResponse".
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:executionStatus => String.t(),
:kind => String.t()
}
field(:executionStatus)
field(:kind)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.OrdersAcknowledgeResponse do
def decode(value, options) do
GoogleApi.Content.V2.Model.OrdersAcknowledgeResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.OrdersAcknowledgeResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.888889 | 186 | 0.723577 |
79609f2655e4b043e710ccfeabcca18e1aaf292f | 2,279 | ex | Elixir | lib/worker/attemptor/registry.ex | UnofficialJuliaMirror/bors-ng-bors-ng | fcec51b5f6e5a691d3e310bd35ba7b74e55b667f | [
"Apache-2.0"
] | null | null | null | lib/worker/attemptor/registry.ex | UnofficialJuliaMirror/bors-ng-bors-ng | fcec51b5f6e5a691d3e310bd35ba7b74e55b667f | [
"Apache-2.0"
] | 1 | 2020-03-07T08:28:14.000Z | 2020-03-07T08:28:14.000Z | lib/worker/attemptor/registry.ex | UnofficialJuliaMirror/bors-ng-bors-ng | fcec51b5f6e5a691d3e310bd35ba7b74e55b667f | [
"Apache-2.0"
] | 3 | 2020-11-15T16:17:58.000Z | 2021-06-04T11:26:42.000Z | defmodule BorsNG.Worker.Attemptor.Registry do
@moduledoc """
The "Attemptor" manages the project's try branch.
This is the registry of each individual attemptor.
It starts the attemptor if it doesn't exist,
restarts it if it crashes,
and logs the crashes because that's needed sometimes.
Note that the attemptor and registry are always on the same node.
Sharding between them will be done by directing which registry to go to.
"""
use GenServer
alias BorsNG.Worker.Attemptor
alias BorsNG.Database.Crash
alias BorsNG.Database.Project
alias BorsNG.Database.Repo
@name BorsNG.Worker.Attemptor.Registry
# Public API
def start_link do
GenServer.start_link(__MODULE__, :ok, name: @name)
end
def get(project_id) when is_integer(project_id) do
GenServer.call(@name, {:get, project_id})
end
# Server callbacks
def init(:ok) do
names = Project.active
|> Repo.all()
|> Enum.map(&{&1.id, do_start(&1.id)})
|> Map.new()
refs = names
|> Enum.map(&{Process.monitor(elem(&1, 1)), elem(&1, 0)})
|> Map.new()
{:ok, {names, refs}}
end
def do_start(project_id) do
{:ok, pid} = Attemptor.Supervisor.start(project_id)
pid
end
def start_and_insert(project_id, {names, refs}) do
pid = do_start(project_id)
names = Map.put(names, project_id, pid)
ref = Process.monitor(pid)
refs = Map.put(refs, ref, project_id)
{pid, {names, refs}}
end
def handle_call({:get, project_id}, _from, {names, _refs} = state) do
{pid, state} = case names[project_id] do
nil ->
start_and_insert(project_id, state)
pid ->
{pid, state}
end
{:reply, pid, state}
end
def handle_info({:DOWN, ref, :process, _pid, :normal}, {names, refs}) do
{project_id, refs} = Map.pop(refs, ref)
names = Map.delete(names, project_id)
{:noreply, {names, refs}}
end
def handle_info({:DOWN, ref, :process, _, reason}, {_, refs} = state) do
project_id = refs[ref]
{_pid, state} = start_and_insert(project_id, state)
Repo.insert(%Crash{
project_id: project_id,
component: "try",
crash: inspect(reason, pretty: true, width: 60)})
{:noreply, state}
end
def handle_info(_msg, state) do
{:noreply, state}
end
end
| 25.897727 | 74 | 0.656867 |
7960c9c543d8afc1f09b1c9a8ba4c0bfc96d61db | 52,173 | ex | Elixir | clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v1/api/projects.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v1/api/projects.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v1/api/projects.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.CloudResourceManager.V1.Api.Projects do
@moduledoc """
API calls for all endpoints tagged `Projects`.
"""
alias GoogleApi.CloudResourceManager.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@doc """
Clears a `Policy` from a resource.
## Parameters
- connection (GoogleApi.CloudResourceManager.V1.Connection): Connection to server
- projects_id (String.t): Part of `resource`. Name of the resource for the `Policy` to clear.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :body (ClearOrgPolicyRequest):
## Returns
{:ok, %GoogleApi.CloudResourceManager.V1.Model.Empty{}} on success
{:error, info} on failure
"""
@spec cloudresourcemanager_projects_clear_org_policy(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.CloudResourceManager.V1.Model.Empty.t()} | {:error, Tesla.Env.t()}
def cloudresourcemanager_projects_clear_org_policy(
connection,
projects_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/projects/{projectsId}:clearOrgPolicy", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.CloudResourceManager.V1.Model.Empty{}])
end
@doc """
Request that a new Project be created. The result is an Operation which can be used to track the creation process. It is automatically deleted after a few hours, so there is no need to call DeleteOperation. Our SLO permits Project creation to take up to 30 seconds at the 90th percentile. As of 2016-08-29, we are observing 6 seconds 50th percentile latency. 95th percentile latency is around 11 seconds. We recommend polling at the 5th second with an exponential backoff. Authorization requires the Google IAM permission `resourcemanager.projects.create` on the specified parent for the new project. The parent is identified by a specified ResourceId, which must include both an ID and a type, such as organization. This method does not associate the new project with a billing account. You can set or update the billing account associated with a project using the [`projects.updateBillingInfo`] (/billing/reference/rest/v1/projects/updateBillingInfo) method.
## Parameters
- connection (GoogleApi.CloudResourceManager.V1.Connection): Connection to server
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :body (Project):
## Returns
{:ok, %GoogleApi.CloudResourceManager.V1.Model.Operation{}} on success
{:error, info} on failure
"""
@spec cloudresourcemanager_projects_create(Tesla.Env.client(), keyword()) ::
{:ok, GoogleApi.CloudResourceManager.V1.Model.Operation.t()} | {:error, Tesla.Env.t()}
def cloudresourcemanager_projects_create(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/projects")
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.CloudResourceManager.V1.Model.Operation{}])
end
@doc """
Marks the Project identified by the specified `project_id` (for example, `my-project-123`) for deletion. This method will only affect the Project if it has a lifecycle state of ACTIVE. This method changes the Project's lifecycle state from ACTIVE to DELETE_REQUESTED. The deletion starts at an unspecified time, at which point the Project is no longer accessible. Until the deletion completes, you can check the lifecycle state checked by retrieving the Project with GetProject, and the Project remains visible to ListProjects. However, you cannot update the project. After the deletion completes, the Project is not retrievable by the GetProject and ListProjects methods. The caller must have modify permissions for this Project.
## Parameters
- connection (GoogleApi.CloudResourceManager.V1.Connection): Connection to server
- project_id (String.t): The Project ID (for example, `foo-bar-123`). Required.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
## Returns
{:ok, %GoogleApi.CloudResourceManager.V1.Model.Empty{}} on success
{:error, info} on failure
"""
@spec cloudresourcemanager_projects_delete(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.CloudResourceManager.V1.Model.Empty.t()} | {:error, Tesla.Env.t()}
def cloudresourcemanager_projects_delete(
connection,
project_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/v1/projects/{projectId}", %{
"projectId" => URI.encode(project_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.CloudResourceManager.V1.Model.Empty{}])
end
@doc """
Retrieves the Project identified by the specified `project_id` (for example, `my-project-123`). The caller must have read permissions for this Project.
## Parameters
- connection (GoogleApi.CloudResourceManager.V1.Connection): Connection to server
- project_id (String.t): The Project ID (for example, `my-project-123`). Required.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
## Returns
{:ok, %GoogleApi.CloudResourceManager.V1.Model.Project{}} on success
{:error, info} on failure
"""
@spec cloudresourcemanager_projects_get(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.CloudResourceManager.V1.Model.Project.t()} | {:error, Tesla.Env.t()}
def cloudresourcemanager_projects_get(connection, project_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/projects/{projectId}", %{
"projectId" => URI.encode(project_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.CloudResourceManager.V1.Model.Project{}])
end
@doc """
Gets a list of ancestors in the resource hierarchy for the Project identified by the specified `project_id` (for example, `my-project-123`). The caller must have read permissions for this Project.
## Parameters
- connection (GoogleApi.CloudResourceManager.V1.Connection): Connection to server
- project_id (String.t): The Project ID (for example, `my-project-123`). Required.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :body (GetAncestryRequest):
## Returns
{:ok, %GoogleApi.CloudResourceManager.V1.Model.GetAncestryResponse{}} on success
{:error, info} on failure
"""
@spec cloudresourcemanager_projects_get_ancestry(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.CloudResourceManager.V1.Model.GetAncestryResponse.t()}
| {:error, Tesla.Env.t()}
def cloudresourcemanager_projects_get_ancestry(
connection,
project_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/projects/{projectId}:getAncestry", %{
"projectId" => URI.encode(project_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.CloudResourceManager.V1.Model.GetAncestryResponse{}]
)
end
@doc """
Gets the effective `Policy` on a resource. This is the result of merging `Policies` in the resource hierarchy. The returned `Policy` will not have an `etag`set because it is a computed `Policy` across multiple resources. Subtrees of Resource Manager resource hierarchy with 'under:' prefix will not be expanded.
## Parameters
- connection (GoogleApi.CloudResourceManager.V1.Connection): Connection to server
- projects_id (String.t): Part of `resource`. The name of the resource to start computing the effective `Policy`.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :body (GetEffectiveOrgPolicyRequest):
## Returns
{:ok, %GoogleApi.CloudResourceManager.V1.Model.OrgPolicy{}} on success
{:error, info} on failure
"""
@spec cloudresourcemanager_projects_get_effective_org_policy(
Tesla.Env.client(),
String.t(),
keyword()
) ::
{:ok, GoogleApi.CloudResourceManager.V1.Model.OrgPolicy.t()} | {:error, Tesla.Env.t()}
def cloudresourcemanager_projects_get_effective_org_policy(
connection,
projects_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/projects/{projectsId}:getEffectiveOrgPolicy", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.CloudResourceManager.V1.Model.OrgPolicy{}])
end
@doc """
Returns the IAM access control policy for the specified Project. Permission is denied if the policy or the resource does not exist. Authorization requires the Google IAM permission `resourcemanager.projects.getIamPolicy` on the project. For additional information about resource structure and identification, see [Resource Names](/apis/design/resource_names).
## Parameters
- connection (GoogleApi.CloudResourceManager.V1.Connection): Connection to server
- resource (String.t): REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :body (GetIamPolicyRequest):
## Returns
{:ok, %GoogleApi.CloudResourceManager.V1.Model.Policy{}} on success
{:error, info} on failure
"""
@spec cloudresourcemanager_projects_get_iam_policy(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.CloudResourceManager.V1.Model.Policy.t()} | {:error, Tesla.Env.t()}
def cloudresourcemanager_projects_get_iam_policy(
connection,
resource,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/projects/{resource}:getIamPolicy", %{
"resource" => URI.encode(resource, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.CloudResourceManager.V1.Model.Policy{}])
end
@doc """
Gets a `Policy` on a resource. If no `Policy` is set on the resource, a `Policy` is returned with default values including `POLICY_TYPE_NOT_SET` for the `policy_type oneof`. The `etag` value can be used with `SetOrgPolicy()` to create or update a `Policy` during read-modify-write.
## Parameters
- connection (GoogleApi.CloudResourceManager.V1.Connection): Connection to server
- projects_id (String.t): Part of `resource`. Name of the resource the `Policy` is set on.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :body (GetOrgPolicyRequest):
## Returns
{:ok, %GoogleApi.CloudResourceManager.V1.Model.OrgPolicy{}} on success
{:error, info} on failure
"""
@spec cloudresourcemanager_projects_get_org_policy(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.CloudResourceManager.V1.Model.OrgPolicy.t()} | {:error, Tesla.Env.t()}
def cloudresourcemanager_projects_get_org_policy(
connection,
projects_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/projects/{projectsId}:getOrgPolicy", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.CloudResourceManager.V1.Model.OrgPolicy{}])
end
@doc """
Lists Projects that the caller has the `resourcemanager.projects.get` permission on and satisfy the specified filter. This method returns Projects in an unspecified order. This method is eventually consistent with project mutations; this means that a newly created project may not appear in the results or recent updates to an existing project may not be reflected in the results. To retrieve the latest state of a project, use the GetProject method. NOTE: If the request filter contains a `parent.type` and `parent.id` and the caller has the `resourcemanager.projects.list` permission on the parent, the results will be drawn from an alternate index which provides more consistent results. In future versions of this API, this List method will be split into List and Search to properly capture the behavorial difference.
## Parameters
- connection (GoogleApi.CloudResourceManager.V1.Connection): Connection to server
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :filter (String.t): An expression for filtering the results of the request. Filter rules are case insensitive. The fields eligible for filtering are: + `name` + `id` + `labels.<key>` (where *key* is the name of a label) + `parent.type` + `parent.id` Some examples of using labels as filters: | Filter | Description | |------------------|-----------------------------------------------------| | name:how* | The project's name starts with \"how\". | | name:Howl | The project's name is `Howl` or `howl`. | | name:HOWL | Equivalent to above. | | NAME:howl | Equivalent to above. | | labels.color:* | The project has the label `color`. | | labels.color:red | The project's label `color` has the value `red`. | | labels.color:red&nbsp;labels.size:big |The project's label `color` has the value `red` and its label `size` has the value `big`. | If no filter is specified, the call will return projects for which the user has the `resourcemanager.projects.get` permission. NOTE: To perform a by-parent query (eg., what projects are directly in a Folder), the caller must have the `resourcemanager.projects.list` permission on the parent and the filter must contain both a `parent.type` and a `parent.id` restriction (example: \"parent.type:folder parent.id:123\"). In this case an alternate search index is used which provides more consistent results. Optional.
- :pageSize (integer()): The maximum number of Projects to return in the response. The server can return fewer Projects than requested. If unspecified, server picks an appropriate default. Optional.
- :pageToken (String.t): A pagination token returned from a previous call to ListProjects that indicates from where listing should continue. Optional.
## Returns
{:ok, %GoogleApi.CloudResourceManager.V1.Model.ListProjectsResponse{}} on success
{:error, info} on failure
"""
@spec cloudresourcemanager_projects_list(Tesla.Env.client(), keyword()) ::
{:ok, GoogleApi.CloudResourceManager.V1.Model.ListProjectsResponse.t()}
| {:error, Tesla.Env.t()}
def cloudresourcemanager_projects_list(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:filter => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/projects")
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.CloudResourceManager.V1.Model.ListProjectsResponse{}]
)
end
@doc """
Lists `Constraints` that could be applied on the specified resource.
## Parameters
- connection (GoogleApi.CloudResourceManager.V1.Connection): Connection to server
- projects_id (String.t): Part of `resource`. Name of the resource to list `Constraints` for.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :body (ListAvailableOrgPolicyConstraintsRequest):
## Returns
{:ok, %GoogleApi.CloudResourceManager.V1.Model.ListAvailableOrgPolicyConstraintsResponse{}} on success
{:error, info} on failure
"""
@spec cloudresourcemanager_projects_list_available_org_policy_constraints(
Tesla.Env.client(),
String.t(),
keyword()
) ::
{:ok,
GoogleApi.CloudResourceManager.V1.Model.ListAvailableOrgPolicyConstraintsResponse.t()}
| {:error, Tesla.Env.t()}
def cloudresourcemanager_projects_list_available_org_policy_constraints(
connection,
projects_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/projects/{projectsId}:listAvailableOrgPolicyConstraints", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++
[
struct:
%GoogleApi.CloudResourceManager.V1.Model.ListAvailableOrgPolicyConstraintsResponse{}
]
)
end
@doc """
Lists all the `Policies` set for a particular resource.
## Parameters
- connection (GoogleApi.CloudResourceManager.V1.Connection): Connection to server
- projects_id (String.t): Part of `resource`. Name of the resource to list Policies for.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :body (ListOrgPoliciesRequest):
## Returns
{:ok, %GoogleApi.CloudResourceManager.V1.Model.ListOrgPoliciesResponse{}} on success
{:error, info} on failure
"""
@spec cloudresourcemanager_projects_list_org_policies(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.CloudResourceManager.V1.Model.ListOrgPoliciesResponse.t()}
| {:error, Tesla.Env.t()}
def cloudresourcemanager_projects_list_org_policies(
connection,
projects_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/projects/{projectsId}:listOrgPolicies", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.CloudResourceManager.V1.Model.ListOrgPoliciesResponse{}]
)
end
@doc """
Sets the IAM access control policy for the specified Project. Overwrites any existing policy. The following constraints apply when using `setIamPolicy()`: + Project does not support `allUsers` and `allAuthenticatedUsers` as `members` in a `Binding` of a `Policy`. + The owner role can be granted only to `user` and `serviceAccount`. + Service accounts can be made owners of a project directly without any restrictions. However, to be added as an owner, a user must be invited via Cloud Platform console and must accept the invitation. + A user cannot be granted the owner role using `setIamPolicy()`. The user must be granted the owner role using the Cloud Platform Console and must explicitly accept the invitation. + You can only grant ownership of a project to a member by using the GCP Console. Inviting a member will deliver an invitation email that they must accept. An invitation email is not generated if you are granting a role other than owner, or if both the member you are inviting and the project are part of your organization. + Membership changes that leave the project without any owners that have accepted the Terms of Service (ToS) will be rejected. + If the project is not part of an organization, there must be at least one owner who has accepted the Terms of Service (ToS) agreement in the policy. Calling `setIamPolicy()` to remove the last ToS-accepted owner from the policy will fail. This restriction also applies to legacy projects that no longer have owners who have accepted the ToS. Edits to IAM policies will be rejected until the lack of a ToS-accepting owner is rectified. + This method will replace the existing policy, and cannot be used to append additional IAM settings. Note: Removing service accounts from policies or changing their roles can render services completely inoperable. It is important to understand how the service account is being used before removing or updating its roles. Authorization requires the Google IAM permission `resourcemanager.projects.setIamPolicy` on the project
## Parameters
- connection (GoogleApi.CloudResourceManager.V1.Connection): Connection to server
- resource (String.t): REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :body (SetIamPolicyRequest):
## Returns
{:ok, %GoogleApi.CloudResourceManager.V1.Model.Policy{}} on success
{:error, info} on failure
"""
@spec cloudresourcemanager_projects_set_iam_policy(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.CloudResourceManager.V1.Model.Policy.t()} | {:error, Tesla.Env.t()}
def cloudresourcemanager_projects_set_iam_policy(
connection,
resource,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/projects/{resource}:setIamPolicy", %{
"resource" => URI.encode(resource, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.CloudResourceManager.V1.Model.Policy{}])
end
@doc """
Updates the specified `Policy` on the resource. Creates a new `Policy` for that `Constraint` on the resource if one does not exist. Not supplying an `etag` on the request `Policy` results in an unconditional write of the `Policy`.
## Parameters
- connection (GoogleApi.CloudResourceManager.V1.Connection): Connection to server
- projects_id (String.t): Part of `resource`. Resource name of the resource to attach the `Policy`.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :body (SetOrgPolicyRequest):
## Returns
{:ok, %GoogleApi.CloudResourceManager.V1.Model.OrgPolicy{}} on success
{:error, info} on failure
"""
@spec cloudresourcemanager_projects_set_org_policy(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.CloudResourceManager.V1.Model.OrgPolicy.t()} | {:error, Tesla.Env.t()}
def cloudresourcemanager_projects_set_org_policy(
connection,
projects_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/projects/{projectsId}:setOrgPolicy", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.CloudResourceManager.V1.Model.OrgPolicy{}])
end
@doc """
Returns permissions that a caller has on the specified Project. There are no permissions required for making this API call.
## Parameters
- connection (GoogleApi.CloudResourceManager.V1.Connection): Connection to server
- resource (String.t): REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :body (TestIamPermissionsRequest):
## Returns
{:ok, %GoogleApi.CloudResourceManager.V1.Model.TestIamPermissionsResponse{}} on success
{:error, info} on failure
"""
@spec cloudresourcemanager_projects_test_iam_permissions(
Tesla.Env.client(),
String.t(),
keyword()
) ::
{:ok, GoogleApi.CloudResourceManager.V1.Model.TestIamPermissionsResponse.t()}
| {:error, Tesla.Env.t()}
def cloudresourcemanager_projects_test_iam_permissions(
connection,
resource,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/projects/{resource}:testIamPermissions", %{
"resource" => URI.encode(resource, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.CloudResourceManager.V1.Model.TestIamPermissionsResponse{}]
)
end
@doc """
Restores the Project identified by the specified `project_id` (for example, `my-project-123`). You can only use this method for a Project that has a lifecycle state of DELETE_REQUESTED. After deletion starts, the Project cannot be restored. The caller must have modify permissions for this Project.
## Parameters
- connection (GoogleApi.CloudResourceManager.V1.Connection): Connection to server
- project_id (String.t): The project ID (for example, `foo-bar-123`). Required.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :body (UndeleteProjectRequest):
## Returns
{:ok, %GoogleApi.CloudResourceManager.V1.Model.Empty{}} on success
{:error, info} on failure
"""
@spec cloudresourcemanager_projects_undelete(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.CloudResourceManager.V1.Model.Empty.t()} | {:error, Tesla.Env.t()}
def cloudresourcemanager_projects_undelete(
connection,
project_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/projects/{projectId}:undelete", %{
"projectId" => URI.encode(project_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.CloudResourceManager.V1.Model.Empty{}])
end
@doc """
Updates the attributes of the Project identified by the specified `project_id` (for example, `my-project-123`). The caller must have modify permissions for this Project.
## Parameters
- connection (GoogleApi.CloudResourceManager.V1.Connection): Connection to server
- project_id (String.t): The project ID (for example, `my-project-123`). Required.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :body (Project):
## Returns
{:ok, %GoogleApi.CloudResourceManager.V1.Model.Project{}} on success
{:error, info} on failure
"""
@spec cloudresourcemanager_projects_update(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.CloudResourceManager.V1.Model.Project.t()} | {:error, Tesla.Env.t()}
def cloudresourcemanager_projects_update(
connection,
project_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:put)
|> Request.url("/v1/projects/{projectId}", %{
"projectId" => URI.encode(project_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.CloudResourceManager.V1.Model.Project{}])
end
end
| 50.653398 | 2,153 | 0.677094 |
7960f0ae6e3579b4dcd332c229baba2ac5942366 | 1,770 | ex | Elixir | clients/slides/lib/google_api/slides/v1/model/color_stop.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/slides/lib/google_api/slides/v1/model/color_stop.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/slides/lib/google_api/slides/v1/model/color_stop.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Slides.V1.Model.ColorStop do
@moduledoc """
A color and position in a gradient band.
## Attributes
- alpha (Float): The alpha value of this color in the gradient band. Defaults to 1.0, fully opaque. Defaults to: `null`.
- color (OpaqueColor): The color of the gradient stop. Defaults to: `null`.
- position (Float): The relative position of the color stop in the gradient band measured in percentage. The value should be in the interval [0.0, 1.0]. Defaults to: `null`.
"""
defstruct [
:"alpha",
:"color",
:"position"
]
end
defimpl Poison.Decoder, for: GoogleApi.Slides.V1.Model.ColorStop do
import GoogleApi.Slides.V1.Deserializer
def decode(value, options) do
value
|> deserialize(:"color", :struct, GoogleApi.Slides.V1.Model.OpaqueColor, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Slides.V1.Model.ColorStop do
def encode(value, options) do
GoogleApi.Slides.V1.Deserializer.serialize_non_nil(value, options)
end
end
| 34.038462 | 175 | 0.737853 |
7961353924fbfd4996b05d33a81fec32160b3d73 | 1,983 | exs | Elixir | test/sources/correios/result_formatter_test.exs | douglascamata/cep | 042aa6d2bcb64b459ac6ac281add2f2f753a6877 | [
"MIT"
] | 11 | 2016-03-23T20:34:19.000Z | 2019-07-16T13:21:48.000Z | test/sources/correios/result_formatter_test.exs | douglascamata/cep | 042aa6d2bcb64b459ac6ac281add2f2f753a6877 | [
"MIT"
] | 5 | 2016-03-22T05:35:46.000Z | 2019-04-02T16:44:00.000Z | test/sources/correios/result_formatter_test.exs | douglascamata/cep | 042aa6d2bcb64b459ac6ac281add2f2f753a6877 | [
"MIT"
] | 3 | 2019-03-26T18:54:59.000Z | 2020-07-06T22:41:51.000Z | defmodule CepSourcesCorreiosResultFormatterTest do
use ExUnit.Case, async: true
alias Cep.Sources.Correios.ResultFormatter
setup_all do
{:ok, formatted_result: ResultFormatter.format(correios_response())}
end
describe "format/1" do
test "should translate 'bairro' into 'neighborhood'", state do
assert state[:formatted_result].neighborhood == correios_response_field("bairro")
end
test "should translate 'cep' into 'cep'", state do
assert state[:formatted_result].cep == correios_response_field("cep")
end
test "should translate 'cidade' into 'city'", state do
assert state[:formatted_result].city == correios_response_field("cidade")
end
test "should translate 'complemento' into 'complement'", state do
assert state[:formatted_result].complement == correios_response_field("complemento")
end
test "should translate 'end' into 'street'", state do
assert state[:formatted_result].street == correios_response_field("end")
end
test "should translate 'uf' into 'state'", state do
assert state[:formatted_result].state == correios_response_field("uf")
end
end
defp correios_response_field(name) do
import SweetXml
to_string(xpath(correios_response(), ~x"//return/#{name}/text()"))
end
defp correios_response do
"""
<soap:Envelope
xmlns:soap=\"http://schemas.xmlsoap.org/soap/envelope/\">
<soap:Body>
<ns2:consultaCEPResponse
xmlns:ns2=\"http://cliente.bean.master.sigep.bsb.correios.com.br/\">
<return>
<bairro></bairro>
<cep>29375000</cep>
<cidade>Venda Nova do Imigrante</cidade>
<complemento></complemento>
<complemento2></complemento2>
<end></end>
<id>0</id>
<uf>ES</uf>
</return>
</ns2:consultaCEPResponse>
</soap:Body>
</soap:Envelope>
"""
end
end
| 31.47619 | 90 | 0.641452 |
79613c2175a0e93e8dd02ce58fc0bebc51426464 | 1,952 | ex | Elixir | lib/subscription_web.ex | manojsamanta/stripe-subscription | d544a9dbde4c1c05998561ba4d77c966f00b18a5 | [
"MIT"
] | null | null | null | lib/subscription_web.ex | manojsamanta/stripe-subscription | d544a9dbde4c1c05998561ba4d77c966f00b18a5 | [
"MIT"
] | null | null | null | lib/subscription_web.ex | manojsamanta/stripe-subscription | d544a9dbde4c1c05998561ba4d77c966f00b18a5 | [
"MIT"
] | null | null | null | defmodule SubscriptionWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use SubscriptionWeb, :controller
use SubscriptionWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: SubscriptionWeb
import Plug.Conn
import SubscriptionWeb.Gettext
alias SubscriptionWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/subscription_web/templates",
namespace: SubscriptionWeb
# Import convenience functions from controllers
import Phoenix.Controller,
only: [get_flash: 1, get_flash: 2, view_module: 1, view_template: 1]
# Include shared imports and aliases for views
unquote(view_helpers())
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
end
end
def channel do
quote do
use Phoenix.Channel
import SubscriptionWeb.Gettext
end
end
defp view_helpers do
quote do
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
# Import basic rendering functionality (render, render_layout, etc)
import Phoenix.View
import SubscriptionWeb.ErrorHelpers
import SubscriptionWeb.Gettext
alias SubscriptionWeb.Router.Helpers, as: Routes
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 23.804878 | 76 | 0.695184 |
79613fc9c84d5d7700a37dc5a89aebc88f4f1e8a | 3,129 | ex | Elixir | lib/postgrex/query.ex | activeprospect/postgrex | d267e419de5db61ac8705210dec8527e4bf94a84 | [
"Apache-2.0"
] | null | null | null | lib/postgrex/query.ex | activeprospect/postgrex | d267e419de5db61ac8705210dec8527e4bf94a84 | [
"Apache-2.0"
] | 1 | 2020-07-17T10:07:44.000Z | 2020-07-17T10:07:44.000Z | lib/postgrex/query.ex | activeprospect/postgrex | d267e419de5db61ac8705210dec8527e4bf94a84 | [
"Apache-2.0"
] | null | null | null | defmodule Postgrex.Query do
@moduledoc """
Query struct returned from a successfully prepared query.
Its public fields are:
* `name` - The name of the prepared statement;
* `statement` - The prepared statement;
* `columns` - The column names;
* `ref` - A reference used to identify prepared queries;
## Prepared queries
Once a query is prepared with `Postgrex.prepare/4`, the
returned query will have its `ref` field set to a reference.
When `Postgrex.execute/4` is called with the prepared query,
it always returns a query. If the `ref` field in the query
given to `execute` and the one returned are the same, it
means the cached prepared query was used. If the `ref` field
is not the same, it means the query had to be re-prepared.
"""
@type t :: %__MODULE__{
cache: :reference | :statement,
ref: reference | nil,
name: iodata,
statement: iodata,
param_oids: [Postgrex.Types.oid] | nil,
param_formats: [:binary | :text] | nil,
param_types: [Postgrex.Types.type] | nil,
columns: [String.t] | nil,
result_oids: [Postgrex.Types.oid] | nil,
result_formats: [:binary | :text] | nil,
result_types: [Postgrex.Types.type] | nil,
types: Postgrex.Types.state | nil}
defstruct [:ref, :name, :statement, :param_oids, :param_formats, :param_types,
:columns, :result_oids, :result_formats, :result_types, :types, cache: :reference]
end
defimpl DBConnection.Query, for: Postgrex.Query do
require Postgrex.Messages
def parse(%{types: nil, name: name} = query, _) do
# for query table to match names must be equal
%{query | name: IO.iodata_to_binary(name)}
end
def parse(query, _) do
raise ArgumentError, "query #{inspect query} has already been prepared"
end
def describe(query, _), do: query
def encode(%{types: nil} = query, _params, _) do
raise ArgumentError, "query #{inspect query} has not been prepared"
end
def encode(query, params, _) do
%{param_types: param_types, types: types} = query
case Postgrex.Types.encode_params(params, param_types, types) do
encoded when is_list(encoded) ->
encoded
:error ->
raise ArgumentError,
"parameters must be of length #{length param_types} for query #{inspect query}"
end
end
def decode(_, %Postgrex.Result{rows: nil} = res, _opts) do
res
end
def decode(_, %Postgrex.Result{rows: rows} = res, opts) do
%Postgrex.Result{res | rows: decode_map(rows, opts)}
end
def decode(_, %Postgrex.Copy{} = copy, _opts) do
copy
end
## Helpers
defp decode_map(data, opts) do
case opts[:decode_mapper] do
nil -> Enum.reverse(data)
mapper -> decode_map(data, mapper, [])
end
end
defp decode_map([row | data], mapper, decoded) do
decode_map(data, mapper, [mapper.(row) | decoded])
end
defp decode_map([], _, decoded) do
decoded
end
end
defimpl String.Chars, for: Postgrex.Query do
def to_string(%Postgrex.Query{statement: statement}) do
IO.iodata_to_binary(statement)
end
end
| 30.378641 | 89 | 0.660914 |
79616c237883b924b58ab4e12271f02f0add91c0 | 3,298 | ex | Elixir | clients/app_engine/lib/google_api/app_engine/v1/model/static_files_handler.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/app_engine/lib/google_api/app_engine/v1/model/static_files_handler.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/app_engine/lib/google_api/app_engine/v1/model/static_files_handler.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.AppEngine.V1.Model.StaticFilesHandler do
@moduledoc """
Files served directly to the user for a given URL, such as images, CSS stylesheets, or JavaScript source files. Static file handlers describe which files in the application directory are static files, and which URLs serve them.
## Attributes
- applicationReadable (boolean()): Whether files should also be uploaded as code data. By default, files declared in static file handlers are uploaded as static data and are only served to end users; they cannot be read by the application. If enabled, uploads are charged against both your code and static data storage resource quotas. Defaults to: `null`.
- expiration (String.t): Time a static file served by this handler should be cached by web proxies and browsers. Defaults to: `null`.
- httpHeaders (%{optional(String.t) => String.t}): HTTP headers to use for all responses from these URLs. Defaults to: `null`.
- mimeType (String.t): MIME type used to serve all files served by this handler.Defaults to file-specific MIME types, which are derived from each file's filename extension. Defaults to: `null`.
- path (String.t): Path to the static files matched by the URL pattern, from the application root directory. The path can refer to text matched in groupings in the URL pattern. Defaults to: `null`.
- requireMatchingFile (boolean()): Whether this handler should match the request if the file referenced by the handler does not exist. Defaults to: `null`.
- uploadPathRegex (String.t): Regular expression that matches the file paths for all files that should be referenced by this handler. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:applicationReadable => any(),
:expiration => any(),
:httpHeaders => map(),
:mimeType => any(),
:path => any(),
:requireMatchingFile => any(),
:uploadPathRegex => any()
}
field(:applicationReadable)
field(:expiration)
field(:httpHeaders, type: :map)
field(:mimeType)
field(:path)
field(:requireMatchingFile)
field(:uploadPathRegex)
end
defimpl Poison.Decoder, for: GoogleApi.AppEngine.V1.Model.StaticFilesHandler do
def decode(value, options) do
GoogleApi.AppEngine.V1.Model.StaticFilesHandler.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AppEngine.V1.Model.StaticFilesHandler do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 49.969697 | 358 | 0.739842 |
796182ecf564113662dd71c5be556e79be1fac8e | 85 | ex | Elixir | apps/institute_web/lib/institute_web/views/auth_view.ex | hui-ad/institute | 28242d9d324d710a0e70678ec2d79099f1d3a98d | [
"MIT"
] | 4 | 2019-06-12T19:05:34.000Z | 2019-08-18T15:02:56.000Z | apps/institute_web/lib/institute_web/views/auth_view.ex | hui-ad/institute | 28242d9d324d710a0e70678ec2d79099f1d3a98d | [
"MIT"
] | 33 | 2019-06-12T18:59:21.000Z | 2021-03-31T15:45:22.000Z | apps/institute_web/lib/institute_web/views/auth_view.ex | hui-ad/institute | 28242d9d324d710a0e70678ec2d79099f1d3a98d | [
"MIT"
] | 1 | 2019-06-16T09:38:08.000Z | 2019-06-16T09:38:08.000Z | defmodule InstituteWeb.AuthView do
@moduledoc false
use InstituteWeb, :view
end
| 14.166667 | 34 | 0.788235 |
79619b12d750501a818ee50b6028e842eb04f1b1 | 2,115 | ex | Elixir | clients/dataflow/lib/google_api/dataflow/v1b3/model/streaming_computation_config.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/dataflow/lib/google_api/dataflow/v1b3/model/streaming_computation_config.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/dataflow/lib/google_api/dataflow/v1b3/model/streaming_computation_config.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Dataflow.V1b3.Model.StreamingComputationConfig do
@moduledoc """
Configuration information for a single streaming computation.
## Attributes
- computationId (String.t): Unique identifier for this computation. Defaults to: `null`.
- instructions ([ParallelInstruction]): Instructions that comprise the computation. Defaults to: `null`.
- stageName (String.t): Stage name of this computation. Defaults to: `null`.
- systemName (String.t): System defined name for this computation. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:computationId => any(),
:instructions => list(GoogleApi.Dataflow.V1b3.Model.ParallelInstruction.t()),
:stageName => any(),
:systemName => any()
}
field(:computationId)
field(:instructions, as: GoogleApi.Dataflow.V1b3.Model.ParallelInstruction, type: :list)
field(:stageName)
field(:systemName)
end
defimpl Poison.Decoder, for: GoogleApi.Dataflow.V1b3.Model.StreamingComputationConfig do
def decode(value, options) do
GoogleApi.Dataflow.V1b3.Model.StreamingComputationConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Dataflow.V1b3.Model.StreamingComputationConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.105263 | 106 | 0.744208 |
7961a97e162ccbd333ac1592ff4c85b7156e5b9e | 915 | ex | Elixir | lib/teslamate_web/channels/liveview_socket.ex | siomiz/teslamate | 3a17ad6ed8a1c62cd34dcbc828b7e4cd53b3d2bb | [
"MIT"
] | null | null | null | lib/teslamate_web/channels/liveview_socket.ex | siomiz/teslamate | 3a17ad6ed8a1c62cd34dcbc828b7e4cd53b3d2bb | [
"MIT"
] | null | null | null | lib/teslamate_web/channels/liveview_socket.ex | siomiz/teslamate | 3a17ad6ed8a1c62cd34dcbc828b7e4cd53b3d2bb | [
"MIT"
] | null | null | null | defmodule TeslaMateWeb.LiveViewSocket do
@moduledoc """
The LiveView socket for Phoenix Endpoints.
Switch to Phoenix.LiveView.Socket if log opts are accepted.
https://github.com/phoenixframework/phoenix_live_view/blob/master/lib/phoenix_live_view/socket.ex
"""
use Phoenix.Socket, log: :debug
defstruct id: nil,
endpoint: nil,
parent_pid: nil,
assigns: %{},
changed: %{},
fingerprints: {nil, %{}},
private: %{},
stopped: nil,
connected?: false
channel "lv:*", Phoenix.LiveView.Channel
@doc """
Connects the Phoenix.Socket for a LiveView client.
"""
@impl Phoenix.Socket
def connect(_params, %Phoenix.Socket{} = socket, _connect_info) do
{:ok, socket}
end
@doc """
Identifies the Phoenix.Socket for a LiveView client.
"""
@impl Phoenix.Socket
def id(_socket), do: nil
end
| 24.72973 | 99 | 0.630601 |
7961baed3ecc73e078a28a2acb720bf4cf1232ae | 1,697 | ex | Elixir | lib/ex_binance/spot/private/query_order.ex | rupurt/ex_binance | fa3fe75878758eef73508c72533e881fbf6ea0d7 | [
"MIT"
] | 7 | 2019-04-12T12:46:53.000Z | 2021-06-29T19:43:41.000Z | lib/ex_binance/spot/private/query_order.ex | rupurt/ex_binance | fa3fe75878758eef73508c72533e881fbf6ea0d7 | [
"MIT"
] | 18 | 2020-05-25T09:03:58.000Z | 2021-06-24T19:44:29.000Z | lib/ex_binance/spot/private/query_order.ex | rupurt/ex_binance | fa3fe75878758eef73508c72533e881fbf6ea0d7 | [
"MIT"
] | 12 | 2019-08-08T09:20:59.000Z | 2021-04-12T19:19:43.000Z | defmodule ExBinance.Spot.Private.QueryOrder do
import ExBinance.Rest.SpotClient, only: [get: 3]
alias ExBinance.Rest.SpotClient
alias ExBinance.Spot.Private.Responses
alias ExBinance.{Timestamp, Credentials}
@type symbol :: String.t()
@type order_id :: String.t()
@type client_order_id :: String.t()
@type credentials :: Credentials.t()
@type response :: Responses.QueryOrderResponse.t()
@type error_msg :: String.t()
@type error_reason :: {:not_found, error_msg} | SpotClient.shared_errors()
@path "/api/v3/order"
@receiving_window 5000
@spec query_order_by_order_id(symbol, order_id, credentials) ::
{:ok, response} | {:error, error_reason}
def query_order_by_order_id(symbol, order_id, credentials) do
params = %{
symbol: symbol,
orderId: order_id,
timestamp: Timestamp.now(),
recv_window: @receiving_window
}
@path
|> get(params, credentials)
|> parse_response()
end
@spec query_order_by_client_order_id(symbol, client_order_id, credentials) ::
{:ok, response} | {:error, error_reason}
def query_order_by_client_order_id(symbol, client_order_id, credentials) do
params = %{
symbol: symbol,
origClientOrderId: client_order_id,
timestamp: Timestamp.now(),
recv_window: @receiving_window
}
@path
|> get(params, credentials)
|> parse_response()
end
defp parse_response({:ok, response}) do
{:ok, Responses.QueryOrderResponse.new(response)}
end
defp parse_response({:error, {:binance_error, %{"code" => -2013, "msg" => msg}}}) do
{:error, {:not_found, msg}}
end
defp parse_response({:error, _} = error) do
error
end
end
| 28.283333 | 86 | 0.680024 |
7961c4c7911225e3cb0359ca4cced033d2e95dde | 1,215 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/projects_disable_xpn_resource_request.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/projects_disable_xpn_resource_request.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/projects_disable_xpn_resource_request.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Compute.V1.Model.ProjectsDisableXpnResourceRequest do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:"xpnResource"
]
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.ProjectsDisableXpnResourceRequest do
import GoogleApi.Compute.V1.Deserializer
def decode(value, options) do
value
|> deserialize(:"xpnResource", :struct, GoogleApi.Compute.V1.Model.XpnResourceId, options)
end
end
| 31.153846 | 94 | 0.757202 |
7961ddb5f3c9ef7c877ee50d82843f8c934f8a7c | 30,501 | ex | Elixir | lib/adb/stage.ex | javobalazs/adb | 2965704fea9025976842ebd25f93b9cec642cb19 | [
"Beerware"
] | null | null | null | lib/adb/stage.ex | javobalazs/adb | 2965704fea9025976842ebd25f93b9cec642cb19 | [
"Beerware"
] | 2 | 2019-06-15T08:03:31.000Z | 2019-06-15T08:09:11.000Z | lib/adb/stage.ex | javobalazs/adb | 2965704fea9025976842ebd25f93b9cec642cb19 | [
"Beerware"
] | null | null | null | alias ADB.Mlmap
alias ADB.Stage
alias ADB.Rule
defmodule Stage do
@vsn "0.1.0"
require Mlmap
require Logger
require Util
Util.arrow_assignment()
@moduledoc """
```elixir
merge(orig, diff) = start
merge(start, stage) = internal
```
`@vsn "#{@vsn}"`
"""
###### ######## ## ## ######## ######## ######
## ## ## ## ## ## ## ##
## ## #### ## ## ## ##
## ## ## ######## ###### ##
## ## ## ## ## ##
## ## ## ## ## ##
###### ## ## ## ######## ######
defstruct stage1: %{},
stage2: %{},
stage12: %{},
diff1: nil,
diff2: nil,
diff12: nil,
orig1: nil,
orig2: nil,
orig12: nil,
current1: nil,
current2: nil,
current12: nil,
name: nil,
rule_ver: 0,
last: 0,
internal1: nil,
internal2: nil,
internal12: nil,
last_mod1: nil,
last_mod2: nil,
last_mod12: nil,
real: true,
keep: true,
pid: nil,
burst: :cpu
@typedoc """
```elixir
merge(orig, diff) = start
merge(start, stage) = internal
```
"""
@type t :: %__MODULE__{
stage1: Mlmap.t_diff(),
stage2: Mlmap.t_diff(),
stage12: Mlmap.t_diff(),
diff1: Mlmap.t_diff(),
diff2: Mlmap.t_diff(),
diff12: Mlmap.t_diff(),
orig1: Mlmap.t(),
orig2: Mlmap.t(),
orig12: Mlmap.t(),
current1: Mlmap.t(),
current2: Mlmap.t(),
current12: Mlmap.t(),
name: String.t(),
rule_ver: Integer.t(),
last: Integer.t(),
internal1: Mlmap.t(),
internal2: Mlmap.t(),
internal12: Mlmap.t(),
last_mod1: %{String.t() => Integer.t()},
last_mod2: %{String.t() => Integer.t()},
last_mod12: %{{String.t(), String.t()} => Integer.t()},
real: Boolean.t(),
keep: Boolean.t(),
pid: String.t(),
burst: Rule.burst()
}
@type iden :: :iden | nil
@type mapname ::
:stage1
| :stage2
| :stage12
| :diff1
| :diff2
| :diff12
| :orig1
| :orig2
| :orig12
| :current1
| :current2
| :current12
| :internal1
| :internal2
| :internal12
###### ###### ####### ## ## ###### ######## ######## ## ## ###### ######## ####### ######## ######
## ## ## ## ## ### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
## ## ## ## #### ## ## ## ## ## ## ## ## ## ## ## ## ## ##
## ## ## ## ## ## ## ###### ## ######## ## ## ## ## ## ## ######## ##
## ## ## ## ## #### ## ## ## ## ## ## ## ## ## ## ## ## ##
## ## ## ## ## ## ### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
###### ###### ####### ## ## ###### ## ## ## ####### ###### ## ####### ## ## ######
defmacro mconstructor(orig1, orig2, orig12, diff1, diff2, diff12, name, rule_ver, last, internal1, internal2, internal12, last_mod1, last_mod2, last_mod12, real, pid, burst) do
mod = __MODULE__
quote do
%unquote(mod){
orig1: unquote(orig1),
orig2: unquote(orig2),
orig12: unquote(orig12),
diff1: unquote(diff1),
diff2: unquote(diff2),
diff12: unquote(diff12),
name: unquote(name),
rule_ver: unquote(rule_ver),
last: unquote(last),
internal1: unquote(internal1),
internal2: unquote(internal2),
internal12: unquote(internal12),
last_mod1: unquote(last_mod1),
last_mod2: unquote(last_mod2),
last_mod12: unquote(last_mod12),
current1: unquote(internal1),
current2: unquote(internal2),
current12: unquote(internal12),
real: unquote(real),
keep: unquote(real),
pid: unquote(pid),
burst: unquote(burst)
}
end
end
@spec constructor(
orig1 :: Mulmap.t(),
orig2 :: Mulmap.t(),
orig12 :: Mulmap.t(),
diff1 :: Mulmap.t_diff(),
diff2 :: Mulmap.t_diff(),
diff12 :: Mulmap.t_diff(),
name :: Mulmap.iden(),
rule_ver :: Integer.t(),
last :: Integer.t(),
internal1 :: Mulmap.t(),
internal2 :: Mulmap.t(),
internal12 :: Mulmap.t(),
last_mod1 :: %{String.t() => Integer.t()},
last_mod2 :: %{String.t() => Integer.t()},
last_mod12 :: %{{String.t(), String.t()} => Integer.t()},
real :: Boolean.t(),
pid :: String.t(),
burst :: Rule.burst()
) :: t
def constructor(orig1, orig2, orig12, diff1, diff2, diff12, name, rule_ver, last, internal1, internal2, internal12, last_mod1, last_mod2, last_mod12, real, pid, burst) do
mconstructor(orig1, orig2, orig12, diff1, diff2, diff12, name, rule_ver, last, internal1, internal2, internal12, last_mod1, last_mod2, last_mod12, real, pid, burst)
end
@spec get(t, [any], any) :: any
def get(s, lst, defa \\ :undefined) do
Mlmap.get(s.internal1, lst, defa)
end
@spec getm(t, mapname, [any], any) :: any
def getm(s, map, lst, defa \\ :undefined) do
mp = Map.get(s, map)
Mlmap.get(mp, lst, defa)
end
@doc """
Ha valodi szabaly fut, akkor felpattintsa-e a szabaly verzioszamat (`true`, default), vagy nem.
"""
@spec set_keep(t, Boolean.t()) :: t
def set_keep(s, keep), do: %{s | keep: keep}
###### ######## ## ## ######## ######
## ## ## ## ## ## ##
## ## ## ## ## ## ##
## ######## ## ## ## ##
## ## ## ## ## ##
## ## ## ## ## ##
###### ## ####### ## ######
@spec put(t, [any], any, iden) :: t
def put(s, lst, val, iden \\ nil) do
# orig -diff-> start -stage-> internal
# Logger.warn("put #{inspect({lst, val, iden})}")
case Mlmap.supdate(s.internal1, lst, val) do
:bump ->
s
:undefined ->
%{s | internal1: %{}, stage1: :undefined, internal2: %{}, stage2: :undefined, internal12: %{}, stage12: :undefined}
{internal1, lst} ->
stage1 = Mlmap.dupdate(s.current1, s.stage1, lst, val)
case lst do
[map, key | rest] ->
# XXX HIBA sajnos ha nem dupla ez, attol meg lehet, hogy hozza kell nyulni a dupla vagy forditott indexhez.
# XXX Pelda: ["x", "y"] => "z", ahol "y" fut. Ha ezek utan ["x"] => :undefined, akkor az osszes "y"-t torolni kell.
lst12 = [{map, key} | rest]
# Itt ennek jonak kell lennie, nem lehet :undefined vagy :bump...
{internal12, lst12} = Mlmap.supdate(s.internal12, lst12, val)
stage12 = Mlmap.dupdate(s.current12, s.stage12, lst12, val)
if iden != nil do
lst2 = [key, map | rest]
case Mlmap.supdate(s.internal2, lst2, val) do
:bump ->
{s.internal2, s.stage2, internal12, stage12}
:undefined ->
{%{}, :undefined, internal12, stage12}
{internal2, lst2} ->
stage2 = Mlmap.dupdate(s.current12, s.stage2, lst2, val)
{internal2, stage2, internal12, stage12}
end
else
{s.internal2, s.stage2, internal12, stage12}
end
_ ->
{s.internal2, s.stage2, s.internal12, s.stage12}
end >>> {internal2, stage2, internal12, stage12}
%{s | internal1: internal1, stage1: stage1, internal2: internal2, stage2: stage2, internal12: internal12, stage12: stage12}
end
end
@spec pipeput([{[any], any, iden}], t) :: t
def pipeput(lstlst, s), do: put(s, lstlst)
@spec put(t, [{[any], any, iden}]) :: t
def put(s, lstlst) do
# orig -diff-> current -stage-> internal
current1 = s.current1
current2 = s.current2
current12 = s.current12
lstlst
|> Enum.reduce(
{s.internal1, s.stage1, s.internal2, s.stage2, s.internal12, s.stage12},
fn {lst, val, iden}, {internal1, stage1, internal2, stage2, internal12, stage12} ->
# Logger.warn("putl #{inspect({lst, val, iden})}")
case Mlmap.supdate(internal1, lst, val) do
:bump ->
{internal1, stage1, internal2, stage2, internal12, stage12}
:undefined ->
{%{}, :undefined, %{}, :undefined, %{}, :undefined}
{internal1, ulst} ->
stage1 = Mlmap.dupdate(current1, stage1, ulst, val)
case lst do
[map, key | rest] ->
lst12 = [{map, key} | rest]
# Itt ennek jonak kell lennie, nem lehet :undefined vagy :bump...
{internal12, lst12} = Mlmap.supdate(internal12, lst12, val)
stage12 = Mlmap.dupdate(current12, stage12, lst12, val)
if iden != nil do
lst2 = [key, map | rest]
case Mlmap.supdate(internal2, lst2, val) do
:bump ->
{internal1, stage1, internal2, stage2, internal12, stage12}
:undefined ->
{internal1, stage1, %{}, :undefined, internal12, stage12}
{internal2, lst2} ->
stage2 = Mlmap.dupdate(current2, stage2, lst2, val)
{internal1, stage1, internal2, stage2, internal12, stage12}
end
else
{internal1, stage1, internal2, stage2, internal12, stage12}
end
_ ->
{internal1, stage1, internal2, stage2, internal12, stage12}
end
end
end
) >>> {internal1, stage1, internal2, stage2, internal12, stage12}
%{s | internal1: internal1, stage1: stage1, internal2: internal2, stage2: stage2, internal12: internal12, stage12: stage12}
end
###### ## ## ######## ######## ###### ######## ######
## ### ### ## ## ## ## ## ## ##
## #### #### ## ## ## ## ## ##
## ## ### ## ###### ######## ## #### ###### ##
## ## ## ## ## ## ## ## ## ##
## ## ## ## ## ## ## ## ## ##
###### ## ## ######## ## ## ###### ######## ######
@doc """
Egy diff-et olvaszt be. Kifejezetten diff-et.
"""
@spec merge(t, [any], Map.t(), iden) :: t
def merge(s, lst, val, iden) do
# Logger.warn("mer #{inspect({lst, val, iden})}")
{l1, l2} =
case lst do
[map, key | rest] ->
{[], [{map, key, rest, val, iden}]}
[map] ->
Enum.reduce(val, {[], []}, fn {key, v}, {acc1, acc2} ->
Mlmap.casemap v do
{acc1, [{map, key, [], v, iden} | acc2]}
else
{[{[map, key], v, iden} | acc1], acc2}
end
end)
_ ->
Enum.reduce(val, {[], []}, fn {map, v2}, {acc1, acc2} ->
Mlmap.casemap v2 do
Enum.reduce(v2, {acc1, acc2}, fn {key, v}, {acc1x, acc2x} ->
Mlmap.casemap(v, do: {acc1x, [{map, key, [], v, iden} | acc2x]}, else: {[{[map, key], v, iden} | acc1x], acc2x})
end)
else
{[{[map], v2, iden} | acc1], acc2}
end
end)
end
s = put(s, l1)
s = merge_2level(s, l2)
s
end
@spec merge_2level(t, [{any, any, [any], Map.t(), iden}]) :: t
def merge_2level(s, ops) do
current1 = s.current1
current2 = s.current2
current12 = s.current12
Enum.reduce(ops, {s.internal1, s.stage1, s.internal2, s.stage2, s.internal12, s.stage12}, fn {map, key, lst, val, iden}, {internal1, stage1, internal2, stage2, internal12, stage12} ->
ulst = [map, key | lst]
case Mlmap.smerdate(internal1, ulst, val) do
:bump ->
{internal1, stage1, internal2, stage2, internal12, stage12}
{:undefined, _, _} ->
{%{}, :undefined, %{}, :undefined, %{}, :undefined}
{internal1, ulst, nval} ->
nval = Util.wife(nval, nval == :bump, do: val)
case nval do
:undefined -> Mlmap.dupdate(current1, stage1, ulst, :undefined)
_ -> Mlmap.dmerdate(current1, stage1, ulst, nval)
end >>> stage1
lst12 = [{map, key} | lst]
case nval do
:undefined ->
{internal12, lst12} = Mlmap.supdate(internal12, lst12, :undefined)
stage12 = Mlmap.dupdate(current12, stage12, lst12, :undefined)
if iden != nil do
lst2 = [key, map | lst]
case Mlmap.supdate(internal2, lst2, :undefined) do
:bump ->
{internal1, stage1, internal2, stage2, internal12, stage12}
:undefined ->
{internal1, stage1, %{}, :undefined, internal12, stage12}
{internal2, lst2} ->
stage2 = Mlmap.dupdate(current2, stage2, lst2, :undefined)
{internal1, stage1, internal2, stage2, internal12, stage12}
end
else
{internal1, stage1, internal2, stage2, internal12, stage12}
end
_ ->
{internal12, lst12} = Mlmap.smerdate_n(internal12, lst12, nval)
stage12 = Mlmap.dmerdate(current12, stage12, lst12, nval)
if iden != nil do
lst2 = [key, map | lst]
case Mlmap.smerdate(internal2, lst2, nval) do
:bump ->
{internal1, stage1, internal2, stage2, internal12, stage12}
{:undefined, _, _} ->
{internal1, stage1, %{}, :undefined, internal12, stage12}
{internal2, lst2, nnval} ->
nnval = Util.wife(nnval, nnval == :bump, do: nval)
case nnval do
:undefined -> Mlmap.dupdate(current2, stage2, lst2, :undefined)
_ -> Mlmap.dmerdate(current2, stage2, lst2, nnval)
end >>> stage2
{internal1, stage1, internal2, stage2, internal12, stage12}
end
else
{internal1, stage1, internal2, stage2, internal12, stage12}
end
end
end
end) >>> {internal1, stage1, internal2, stage2, internal12, stage12}
%{s | internal1: internal1, stage1: stage1, internal2: internal2, stage2: stage2, internal12: internal12, stage12: stage12}
end
###### ###### ####### ## ## ######## #### ## ## ######## ######## ######
## ## ## ## ## ### ### ## ## ## ### ## ## ## ## ##
## ## ## ## #### #### ## ## ## #### ## ## ## ## ##
## ## ## ## ## ### ## ######## ## ## ## ## ###### ## ## ##
## ## ## ## ## ## ## ## ## ## #### ## ## ## ##
## ## ## ## ## ## ## ## ## ## ## ### ## ## ## ##
###### ###### ####### ## ## ######## #### ## ## ######## ######## ######
@spec bulk(t, [{:merge, [any], Map.t(), iden} | {[any], any, iden} | {any, any, [any], Map.t(), iden}]) :: t
def bulk(s, lstlst) do
# Logger.warn("mer #{inspect({lst, val, iden})}")
lstlst
|> Enum.reduce({[], []}, fn x, {l1, l2} ->
case x do
{_lst, _val, _iden} ->
{[x | l1], l2}
{_map, _key, _lst, _val, _iden} ->
{l1, [x | l2]}
{:merge, lst, val, iden} ->
case lst do
[map, key | rest] ->
{l1, [{map, key, rest, val, iden} | l2]}
[map] ->
Enum.reduce(val, {l1, l2}, fn {key, v}, {acc1, acc2} ->
Mlmap.casemap v do
{acc1, [{map, key, [], v, iden} | acc2]}
else
{[{[map, key], v, iden} | acc1], acc2}
end
end)
_ ->
Enum.reduce(val, {l1, l2}, fn {map, v2}, {acc1, acc2} ->
Mlmap.casemap v2 do
Enum.reduce(v2, {acc1, acc2}, fn {key, v}, {acc1x, acc2x} ->
Mlmap.casemap(v, do: {acc1x, [{map, key, [], v, iden} | acc2x]}, else: {[{[map, key], v, iden} | acc1x], acc2x})
end)
else
{[{[map], v2, iden} | acc1], acc2}
end
end)
end
end
end) >>> {l1, l2}
s = put(s, l1)
s = merge_2level(s, l2)
s
end
###### ## ## ### ######## ## ######## ######## ######## ## ## ###### ######## ######
## ### ### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
## #### #### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
## ## ### ## ## ## ######## ## ######## ###### ## ## ## ## ## ###### ##
## ## ## ######### ## ## ## ## ## ## ## ## ## ## ## ##
## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
###### ## ## ## ## ## ## ## ## ######## ######## ####### ###### ######## ######
@spec mapm(t, mapname, [any], (any, any -> any)) :: [any]
def mapm(s, mapname, lst, fnc), do: Map.get(s, mapname) |> Mlmap.map(lst, fnc)
@spec map(t, [any], (any, any -> any)) :: [any]
def map(s, lst, fnc), do: mapm(s, :internal1, lst, fnc)
@spec reducem(t, mapname, [any], a, (any, any, a -> a)) :: a when a: var
def reducem(s, mapname, lst, acc, fnc), do: Map.get(s, mapname) |> Mlmap.reduce(lst, acc, fnc)
@spec reduce(t, [any], a, (any, any, a -> a)) :: a when a: var
def reduce(s, lst, acc, fnc), do: reducem(s, :internal1, lst, acc, fnc)
@spec reducem_while(t, mapname, [any], a, (any, any, a -> {:cont, a} | {:halt, a})) :: a when a: var
def reducem_while(s, mapname, lst, acc, fnc), do: Map.get(s, mapname) |> Mlmap.reduce_while(lst, acc, fnc)
@spec reduce_while(t, [any], a, (any, any, a -> {:cont, a} | {:halt, a})) :: a when a: var
def reduce_while(s, lst, acc, fnc), do: reducem_while(s, :internal1, lst, acc, fnc)
@spec full(t, [any], Mlmap.fulfun()) :: [any]
def full(s, lst, fnc), do: Mlmap.full(s.orig1, s.diff1, s.current1, lst, fnc)
@spec track(t, [any], Mlmap.mapfun()) :: [any]
def track(s, lst, fnc), do: Mlmap.track(s.orig1, s.diff1, s.current1, lst, fnc)
@spec track_reduce(t, [any], a, Mlmap.redfun(a)) :: a when a: var
def track_reduce(s, lst, acc, fnc), do: Mlmap.track_reduce(s.orig1, s.diff1, s.current1, lst, acc, fnc)
@spec track_reduce_while(t, [any], a, Mlmap.red_while_fun(a)) :: a when a: var
def track_reduce_while(s, lst, acc, fnc), do: Mlmap.track_reduce_while(s.orig1, s.diff1, s.current1, lst, acc, fnc)
@spec nfull(t, [any], Mlmap.fulfun()) :: [any]
def nfull(s, lst, fnc), do: Mlmap.full(s.current1, s.stage1, s.internal1, lst, fnc)
@spec ntrack(t, [any], Mlmap.mapfun()) :: [any]
def ntrack(s, lst, fnc), do: Mlmap.track(s.current1, s.stage1, s.internal1, lst, fnc)
@spec ntrack_reduce(t, [any], a, Mlmap.redfun(a)) :: a when a: var
def ntrack_reduce(s, lst, acc, fnc), do: Mlmap.track_reduce(s.current1, s.stage1, s.internal1, lst, acc, fnc)
@spec ntrack_reduce_while(t, [any], a, Mlmap.red_while_fun(a)) :: a when a: var
def ntrack_reduce_while(s, lst, acc, fnc), do: Mlmap.track_reduce_while(s.current1, s.stage1, s.internal1, lst, acc, fnc)
###### ## ## ### ######## ####### ## ######## ######## ######## ## ## ###### ######## ####### ######
## ### ### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
## #### #### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
## ## ### ## ## ## ######## ####### ## ######## ###### ## ## ## ## ## ###### ####### ##
## ## ## ######### ## ## ## ## ## ## ## ## ## ## ## ## ## ##
## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
###### ## ## ## ## ## ######### ## ## ## ######## ######## ####### ###### ######## ######### ######
@spec mapm2(t, mapname, [any], (any, any, any -> any)) :: [any]
def mapm2(s, mapname, lst, fnc), do: Map.get(s, mapname) |> Mlmap.map2(lst, fnc)
@spec map2(t, [any], (any, any, any -> any)) :: [any]
def map2(s, lst, fnc), do: mapm2(s, :internal1, lst, fnc)
@spec reducem2(t, mapname, [any], a, (any, any, any, a -> a)) :: a when a: var
def reducem2(s, mapname, lst, acc, fnc), do: Map.get(s, mapname) |> Mlmap.reduce2(lst, acc, fnc)
@spec reduce2(t, [any], a, (any, any, any, a -> a)) :: a when a: var
def reduce2(s, lst, acc, fnc), do: reducem2(s, :internal1, lst, acc, fnc)
@spec reducem_while2(t, mapname, [any], a, (any, any, any, a -> {:cont, a} | {:halt, a})) :: a when a: var
def reducem_while2(s, mapname, lst, acc, fnc), do: Map.get(s, mapname) |> Mlmap.reduce_while2(lst, acc, fnc)
@spec reduce_while2(t, [any], a, (any, any, any, a -> {:cont, a} | {:halt, a})) :: a when a: var
def reduce_while2(s, lst, acc, fnc), do: reducem_while2(s, :internal1, lst, acc, fnc)
@spec track2(t, [any], Mlmap.mapfun2()) :: [any]
def track2(s, lst, fnc), do: Mlmap.track2(s.orig1, s.diff1, s.current1, lst, fnc)
@spec track_reduce2(t, [any], a, Mlmap.redfun2(a)) :: a when a: var
def track_reduce2(s, lst, acc, fnc), do: Mlmap.track_reduce2(s.orig1, s.diff1, s.current1, lst, acc, fnc)
@spec track_reduce_while2(t, [any], a, Mlmap.red_while_fun2(a)) :: a when a: var
def track_reduce_while2(s, lst, acc, fnc), do: Mlmap.track_reduce_while2(s.orig1, s.diff1, s.current1, lst, acc, fnc)
@spec ntrack2(t, [any], Mlmap.mapfun2()) :: [any]
def ntrack2(s, lst, fnc), do: Mlmap.track2(s.current1, s.stage1, s.internal1, lst, fnc)
@spec ntrack_reduce2(t, [any], a, Mlmap.redfun2(a)) :: a when a: var
def ntrack_reduce2(s, lst, acc, fnc), do: Mlmap.track_reduce2(s.current1, s.stage1, s.internal1, lst, acc, fnc)
@spec ntrack_reduce_while2(t, [any], a, Mlmap.red_while_fun2(a)) :: a when a: var
def ntrack_reduce_while2(s, lst, acc, fnc), do: Mlmap.track_reduce_while2(s.current1, s.stage1, s.internal1, lst, acc, fnc)
###### ## ## ### ######## ####### ## ######## ######## ######## ## ## ###### ######## ####### ######
## ### ### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
## #### #### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
## ## ### ## ## ## ######## ####### ## ######## ###### ## ## ## ## ## ###### ####### ##
## ## ## ######### ## ## ## ## ## ## ## ## ## ## ## ## ## ##
## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
###### ## ## ## ## ## ####### ## ## ## ######## ######## ####### ###### ######## ####### ######
@spec mapm3(t, mapname, [any], (any, any, any, any -> any)) :: [any]
def mapm3(s, mapname, lst, fnc), do: Map.get(s, mapname) |> Mlmap.map3(lst, fnc)
@spec map3(t, [any], (any, any, any, any -> any)) :: [any]
def map3(s, lst, fnc), do: mapm3(s, :internal1, lst, fnc)
@spec reducem3(t, mapname, [any], a, (any, any, any, any, a -> a)) :: a when a: var
def reducem3(s, mapname, lst, acc, fnc), do: Map.get(s, mapname) |> Mlmap.reduce3(lst, acc, fnc)
@spec reduce3(t, [any], a, (any, any, any, any, a -> a)) :: a when a: var
def reduce3(s, lst, acc, fnc), do: reducem3(s, :internal1, lst, acc, fnc)
@spec reducem_while3(t, mapname, [any], a, (any, any, any, any, a -> {:cont, a} | {:halt, a})) :: a when a: var
def reducem_while3(s, mapname, lst, acc, fnc), do: Map.get(s, mapname) |> Mlmap.reduce_while3(lst, acc, fnc)
@spec reduce_while3(t, [any], a, (any, any, any, any, a -> {:cont, a} | {:halt, a})) :: a when a: var
def reduce_while3(s, lst, acc, fnc), do: reducem_while3(s, :internal1, lst, acc, fnc)
@spec track3(t, [any], Mlmap.mapfun3()) :: [any]
def track3(s, lst, fnc), do: Mlmap.track3(s.orig1, s.diff1, s.current1, lst, fnc)
@spec track_reduce3(t, [any], a, Mlmap.redfun3(a)) :: a when a: var
def track_reduce3(s, lst, acc, fnc), do: Mlmap.track_reduce3(s.orig1, s.diff1, s.current1, lst, acc, fnc)
@spec track_reduce_while3(t, [any], a, Mlmap.red_while_fun3(a)) :: a when a: var
def track_reduce_while3(s, lst, acc, fnc), do: Mlmap.track_reduce_while3(s.orig1, s.diff1, s.current1, lst, acc, fnc)
@spec ntrack3(t, [any], Mlmap.mapfun3()) :: [any]
def ntrack3(s, lst, fnc), do: Mlmap.track3(s.current1, s.stage1, s.internal1, lst, fnc)
@spec ntrack_reduce3(t, [any], a, Mlmap.redfun3(a)) :: a when a: var
def ntrack_reduce3(s, lst, acc, fnc), do: Mlmap.track_reduce3(s.current1, s.stage1, s.internal1, lst, acc, fnc)
@spec ntrack_reduce_while3(t, [any], a, Mlmap.red_while_fun3(a)) :: a when a: var
def ntrack_reduce_while3(s, lst, acc, fnc), do: Mlmap.track_reduce_while3(s.current1, s.stage1, s.internal1, lst, acc, fnc)
###### ## ## ### ######## ## ## ######## ######## ######## ## ## ###### ######## ## ######
## ### ### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
## #### #### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
## ## ### ## ## ## ######## ## ## ## ######## ###### ## ## ## ## ## ###### ## ## ##
## ## ## ######### ## ######### ## ## ## ## ## ## ## ## ## ## ######### ##
## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
###### ## ## ## ## ## ## ## ## ## ######## ######## ####### ###### ######## ## ######
@spec mapm4(t, mapname, [any], (any, any, any, any, any -> any)) :: [any]
def mapm4(s, mapname, lst, fnc), do: Map.get(s, mapname) |> Mlmap.map4(lst, fnc)
@spec map4(t, [any], (any, any, any, any, any -> any)) :: [any]
def map4(s, lst, fnc), do: mapm4(s, :internal1, lst, fnc)
@spec reducem4(t, mapname, [any], a, (any, any, any, any, any, a -> a)) :: a when a: var
def reducem4(s, mapname, lst, acc, fnc), do: Map.get(s, mapname) |> Mlmap.reduce4(lst, acc, fnc)
@spec reduce4(t, [any], a, (any, any, any, any, any, a -> a)) :: a when a: var
def reduce4(s, lst, acc, fnc), do: reducem4(s, :internal1, lst, acc, fnc)
@spec reducem_while4(t, mapname, [any], a, (any, any, any, any, any, a -> {:cont, a} | {:halt, a})) :: a when a: var
def reducem_while4(s, mapname, lst, acc, fnc), do: Map.get(s, mapname) |> Mlmap.reduce_while4(lst, acc, fnc)
@spec reduce_while4(t, [any], a, (any, any, any, any, any, a -> {:cont, a} | {:halt, a})) :: a when a: var
def reduce_while4(s, lst, acc, fnc), do: reducem_while4(s, :internal1, lst, acc, fnc)
@spec track4(t, [any], Mlmap.mapfun4()) :: [any]
def track4(s, lst, fnc), do: Mlmap.track4(s.orig1, s.diff1, s.current1, lst, fnc)
@spec track_reduce4(t, [any], a, Mlmap.redfun4(a)) :: a when a: var
def track_reduce4(s, lst, acc, fnc), do: Mlmap.track_reduce4(s.orig1, s.diff1, s.current1, lst, acc, fnc)
@spec track_reduce_while4(t, [any], a, Mlmap.red_while_fun4(a)) :: a when a: var
def track_reduce_while4(s, lst, acc, fnc), do: Mlmap.track_reduce_while4(s.orig1, s.diff1, s.current1, lst, acc, fnc)
@spec ntrack4(t, [any], Mlmap.mapfun4()) :: [any]
def ntrack4(s, lst, fnc), do: Mlmap.track4(s.current1, s.stage1, s.internal1, lst, fnc)
@spec ntrack_reduce4(t, [any], a, Mlmap.redfun4(a)) :: a when a: var
def ntrack_reduce4(s, lst, acc, fnc), do: Mlmap.track_reduce4(s.current1, s.stage1, s.internal1, lst, acc, fnc)
@spec ntrack_reduce_while4(t, [any], a, Mlmap.red_while_fun4(a)) :: a when a: var
def ntrack_reduce_while4(s, lst, acc, fnc), do: Mlmap.track_reduce_while4(s.current1, s.stage1, s.internal1, lst, acc, fnc)
# defmodule
end
| 44.986726 | 187 | 0.418839 |
7961e358270dc0bf435ecd851ba411249eab06a7 | 1,202 | ex | Elixir | apps/kitsune_aws_core/lib/kitsune/aws/config_provider/application_config.ex | shirayukikitsune/ex_aws | c56063fa986b173f160155dfb5185d1881989d0a | [
"BSD-2-Clause"
] | 1 | 2021-07-07T12:45:55.000Z | 2021-07-07T12:45:55.000Z | apps/kitsune_aws_core/lib/kitsune/aws/config_provider/application_config.ex | shirayukikitsune/ex_aws | c56063fa986b173f160155dfb5185d1881989d0a | [
"BSD-2-Clause"
] | null | null | null | apps/kitsune_aws_core/lib/kitsune/aws/config_provider/application_config.ex | shirayukikitsune/ex_aws | c56063fa986b173f160155dfb5185d1881989d0a | [
"BSD-2-Clause"
] | null | null | null | defmodule Kitsune.Aws.ConfigProvider.ApplicationConfig do
@moduledoc """
This implements the configuration loader from application configuration (i.e. `Config`)
All configuration should be done in the :kitsune_aws key.
## Examples:
import Config
config :kitsune_aws,
secret_key: "MY AWS SECRET KEY",
access_key: "MY AWS ACCESS KEY",
default_region: "us-east-1"
"""
@behaviour Kitsune.Aws.ConfigProvider
@doc """
Populates the `Kitsune.Aws.Config` struct with values from application configuration
"""
@spec load() :: Kitsune.Aws.Config.t()
def load() do
access_key = get_access_key()
secret_key = get_secret_key()
region = get_default_region()
case {access_key,secret_key,region} do
{nil,nil,nil} -> nil
_ -> %Kitsune.Aws.Config{
access_key: access_key,
secret_key: secret_key,
default_region: region
}
end
end
defp get_secret_key(), do: Application.get_env(:kitsune_aws, :secret_key)
defp get_access_key(), do: Application.get_env(:kitsune_aws, :access_key)
defp get_default_region(), do: Application.get_env(:kitsune_aws, :default_region)
end
| 27.953488 | 89 | 0.678037 |
7961e3bf10b30ae740f2b0912225954e2be6aec0 | 511 | ex | Elixir | lib/ex_led_web/channels/controller_socket.ex | zastrixarundell/ex_led | e826c71abed8b0f28d523590fb72796968898b36 | [
"WTFPL"
] | null | null | null | lib/ex_led_web/channels/controller_socket.ex | zastrixarundell/ex_led | e826c71abed8b0f28d523590fb72796968898b36 | [
"WTFPL"
] | null | null | null | lib/ex_led_web/channels/controller_socket.ex | zastrixarundell/ex_led | e826c71abed8b0f28d523590fb72796968898b36 | [
"WTFPL"
] | null | null | null | defmodule ExLedWeb.ControllerSocket do
use Phoenix.Socket
@secret_auth_key Application.get_env(:ex_led, __MODULE__, %{})[:secret_auth_key]
channel "controller:*", ExLedWeb.ControllerChannel
def connect(params, socket, _connect_info) do
require Logger
if params["secret_key"] == @secret_auth_key do
{:ok, socket}
else
Logger.error "Someone tried to connect with this secret key: '#{params["secret_key"]}' but failed"
:error
end
end
def id(_socket), do: nil
end
| 24.333333 | 104 | 0.702544 |
7961f7827979e1d6bb3ca8455146ac5daa77e20c | 3,075 | ex | Elixir | clients/you_tube/lib/google_api/you_tube/v3/model/subscription.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/subscription.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/subscription.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.YouTube.V3.Model.Subscription do
@moduledoc """
A subscription resource contains information about a YouTube user subscription. A subscription notifies a user when new videos are added to a channel or when another user takes one of several actions on YouTube, such as uploading a video, rating a video, or commenting on a video.
## Attributes
* `contentDetails` (*type:* `GoogleApi.YouTube.V3.Model.SubscriptionContentDetails.t`, *default:* `nil`) - The contentDetails object contains basic statistics about the subscription.
* `etag` (*type:* `String.t`, *default:* `nil`) - Etag of this resource.
* `id` (*type:* `String.t`, *default:* `nil`) - The ID that YouTube uses to uniquely identify the subscription.
* `kind` (*type:* `String.t`, *default:* `youtube#subscription`) - Identifies what kind of resource this is. Value: the fixed string "youtube#subscription".
* `snippet` (*type:* `GoogleApi.YouTube.V3.Model.SubscriptionSnippet.t`, *default:* `nil`) - The snippet object contains basic details about the subscription, including its title and the channel that the user subscribed to.
* `subscriberSnippet` (*type:* `GoogleApi.YouTube.V3.Model.SubscriptionSubscriberSnippet.t`, *default:* `nil`) - The subscriberSnippet object contains basic details about the sbuscriber.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:contentDetails => GoogleApi.YouTube.V3.Model.SubscriptionContentDetails.t(),
:etag => String.t(),
:id => String.t(),
:kind => String.t(),
:snippet => GoogleApi.YouTube.V3.Model.SubscriptionSnippet.t(),
:subscriberSnippet => GoogleApi.YouTube.V3.Model.SubscriptionSubscriberSnippet.t()
}
field(:contentDetails, as: GoogleApi.YouTube.V3.Model.SubscriptionContentDetails)
field(:etag)
field(:id)
field(:kind)
field(:snippet, as: GoogleApi.YouTube.V3.Model.SubscriptionSnippet)
field(:subscriberSnippet, as: GoogleApi.YouTube.V3.Model.SubscriptionSubscriberSnippet)
end
defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.Subscription do
def decode(value, options) do
GoogleApi.YouTube.V3.Model.Subscription.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.YouTube.V3.Model.Subscription do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 49.596774 | 282 | 0.736911 |
796204069d8bccf640d741e31007ccf8211bb56b | 382 | ex | Elixir | lib/domain/schema/song.ex | ideaMarcos/portishead | 0ad09af305e1e663c0d080a9637b8420d0f40fd1 | [
"MIT"
] | 3 | 2020-03-09T04:29:49.000Z | 2020-07-14T16:09:50.000Z | lib/domain/schema/song.ex | ideaMarcos/portishead | 0ad09af305e1e663c0d080a9637b8420d0f40fd1 | [
"MIT"
] | null | null | null | lib/domain/schema/song.ex | ideaMarcos/portishead | 0ad09af305e1e663c0d080a9637b8420d0f40fd1 | [
"MIT"
] | null | null | null | defmodule Portishead.Schema.Song do
use Portishead.Schema.Common
schema "song" do
field :title, :string
field :band_uuid, Ecto.UUID
field :external_id, Ecto.UUID, autogenerate: true
end
def changeset(%__MODULE__{} = band, params \\ %{}) do
band
|> cast(params, [:title, :band_uuid, :external_id, :metadata])
|> validate_required([:name])
end
end
| 23.875 | 66 | 0.672775 |
79620ee50354bf02898207e143a3b77b4515d1e8 | 519 | ex | Elixir | lib/absinthe_subscriptions_web/views/error_view.ex | egjimenezg/absinthe_subscriptions | 49d05d9ce141c9de5d1cfabe21452d2bde8c49d9 | [
"Apache-2.0"
] | null | null | null | lib/absinthe_subscriptions_web/views/error_view.ex | egjimenezg/absinthe_subscriptions | 49d05d9ce141c9de5d1cfabe21452d2bde8c49d9 | [
"Apache-2.0"
] | null | null | null | lib/absinthe_subscriptions_web/views/error_view.ex | egjimenezg/absinthe_subscriptions | 49d05d9ce141c9de5d1cfabe21452d2bde8c49d9 | [
"Apache-2.0"
] | null | null | null | defmodule AbsintheSubscriptionsWeb.ErrorView do
use AbsintheSubscriptionsWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.html", _assigns) do
# "Internal Server Error"
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.html" becomes
# "Not Found".
def template_not_found(template, _assigns) do
Phoenix.Controller.status_message_from_template(template)
end
end
| 30.529412 | 61 | 0.749518 |
7962359a79ce994e7378864ff47e1b6a37108269 | 2,786 | exs | Elixir | test/ex_pesa/Mpesa/stk_test.exs | MidigoF/ex_pesa | d2a62e72c67084385609c895e52a6ac52e8a9a8a | [
"AML",
"MIT"
] | 20 | 2020-08-07T18:45:03.000Z | 2021-12-02T12:47:07.000Z | test/ex_pesa/Mpesa/stk_test.exs | MidigoF/ex_pesa | d2a62e72c67084385609c895e52a6ac52e8a9a8a | [
"AML",
"MIT"
] | 65 | 2020-08-17T05:52:33.000Z | 2021-05-20T16:06:34.000Z | test/ex_pesa/Mpesa/stk_test.exs | MidigoF/ex_pesa | d2a62e72c67084385609c895e52a6ac52e8a9a8a | [
"AML",
"MIT"
] | 11 | 2020-08-17T07:53:02.000Z | 2021-04-02T20:57:16.000Z | defmodule ExPesa.Mpesa.StkTest do
@moduledoc false
use ExUnit.Case, async: true
import Tesla.Mock
doctest ExPesa.Mpesa.Stk
alias ExPesa.Mpesa.Stk
setup do
mock(fn
%{
url: "https://sandbox.safaricom.co.ke/oauth/v1/generate?grant_type=client_credentials",
method: :get
} ->
%Tesla.Env{
status: 200,
body: %{
"access_token" => "SGWcJPtNtYNPGm6uSYR9yPYrAI3Bm",
"expires_in" => "3599"
}
}
%{url: "https://sandbox.safaricom.co.ke/mpesa/stkpush/v1/processrequest", method: :post} ->
%Tesla.Env{
status: 200,
body:
Jason.encode!(%{
"CheckoutRequestID" => "ws_CO_010320202011179845",
"CustomerMessage" => "Success. Request accepted for processing",
"MerchantRequestID" => "25558-10595705-4",
"ResponseCode" => "0",
"ResponseDescription" => "Success. Request accepted for processing"
})
}
%{url: "https://sandbox.safaricom.co.ke/mpesa/stkpushquery/v1/query", method: :post} ->
%Tesla.Env{
status: 200,
body:
Jason.encode!(%{
"CheckoutRequestID" => "ws_CO_260820202102496165",
"MerchantRequestID" => "11130-78831728-4",
"ResponseCode" => "0",
"ResponseDescription" => "The service request has been accepted successsfully",
"ResultCode" => "1032",
"ResultDesc" => "Request cancelled by user"
})
}
end)
:ok
end
describe "Mpesa STK Push/ Validate Transaction" do
test "request/1 should Initiate STK with required parameters" do
request_details = %{
amount: 10,
phone: "254724540000",
reference: "reference",
description: "description"
}
{:ok, result} = Stk.request(request_details)
assert result["CheckoutRequestID"] == "ws_CO_010320202011179845"
assert result["ResponseCode"] == "0"
end
end
test "request/1 should error out without required parameter" do
{:error, result} = Stk.request(%{})
"Required Parameters missing, 'phone, 'amount', 'reference', 'description'" = result
end
test "validate/1 should validate transaction successfully" do
{:ok, result} = Stk.validate(%{checkout_request_id: "ws_CO_260820202102496165"})
assert result["CheckoutRequestID"] == "ws_CO_260820202102496165"
assert result["ResponseCode"] == "0"
assert result["ResultDesc"] == "Request cancelled by user"
end
test "validate/1 should error out without required parameter" do
{:error, result} = Stk.validate(%{})
"Required Parameter missing, 'CheckoutRequestID'" = result
end
end
| 30.955556 | 97 | 0.599067 |
7963023f81200ac20040bddc75fe725e06d9ddda | 910 | ex | Elixir | create_fun_umbrella/apps/create_fun_endpoint/lib/create_fun_endpoint/application.ex | Vorzious/CreateFun | 5744c913ef706bc29062fa90a8ec5de12d267dab | [
"MIT"
] | null | null | null | create_fun_umbrella/apps/create_fun_endpoint/lib/create_fun_endpoint/application.ex | Vorzious/CreateFun | 5744c913ef706bc29062fa90a8ec5de12d267dab | [
"MIT"
] | 9 | 2018-06-17T09:54:03.000Z | 2018-06-17T09:55:20.000Z | create_fun_umbrella/apps/create_fun_endpoint/lib/create_fun_endpoint/application.ex | Vorzious/CreateFun | 5744c913ef706bc29062fa90a8ec5de12d267dab | [
"MIT"
] | 1 | 2018-06-05T18:38:01.000Z | 2018-06-05T18:38:01.000Z | defmodule CreateFunEndpoint.Application do
use Application
def start(_type, _args) do
import Supervisor.Spec
# Define workers and child supervisors to be supervised
children = [
# Start the endpoint when the application starts
supervisor(CreateFunEndpoint.Endpoint, [])
# Start your own worker by calling: CreateFunEndpoint.Worker.start_link(arg1, arg2, arg3)
# worker(CreateFunEndpoint.Worker, [arg1, arg2, arg3]),
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: CreateFunEndpoint.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
CreateFunEndpoint.Endpoint.config_change(changed, removed)
:ok
end
end
| 32.5 | 95 | 0.734066 |
796329285f6bdef0d2396ed93e8ab963b5e697f5 | 259 | ex | Elixir | lib/kids_chain/pipeline_instrumenter.ex | arpnetwork/kids_chain | f320a6468c78ae7e1727d72ad3ede1befbdf931c | [
"Apache-2.0"
] | null | null | null | lib/kids_chain/pipeline_instrumenter.ex | arpnetwork/kids_chain | f320a6468c78ae7e1727d72ad3ede1befbdf931c | [
"Apache-2.0"
] | null | null | null | lib/kids_chain/pipeline_instrumenter.ex | arpnetwork/kids_chain | f320a6468c78ae7e1727d72ad3ede1befbdf931c | [
"Apache-2.0"
] | null | null | null | defmodule KidsChain.PipelineInstrumenter do
use Prometheus.PlugPipelineInstrumenter
def label_value(:request_path, conn) do
if String.starts_with?(conn.request_path, "/users/") do
"/users/UID"
else
conn.request_path
end
end
end
| 21.583333 | 59 | 0.725869 |
79632fa05cc5cabc0bf86c756c7d1def1f966e2d | 802 | ex | Elixir | test/support/mint/http1/test_server.ex | ryochin/mint | 1ea7731d921840ad75a47fa9415525c3d94b9980 | [
"Apache-2.0"
] | 477 | 2019-10-28T14:53:23.000Z | 2022-03-30T08:13:21.000Z | test/support/mint/http1/test_server.ex | ryochin/mint | 1ea7731d921840ad75a47fa9415525c3d94b9980 | [
"Apache-2.0"
] | 108 | 2019-11-05T04:21:04.000Z | 2022-02-24T18:36:15.000Z | test/support/mint/http1/test_server.ex | ryochin/mint | 1ea7731d921840ad75a47fa9415525c3d94b9980 | [
"Apache-2.0"
] | 56 | 2019-11-10T01:19:56.000Z | 2022-03-25T18:08:06.000Z | defmodule Mint.HTTP1.TestServer do
def start do
{:ok, listen_socket} = :gen_tcp.listen(0, mode: :binary, packet: :raw)
server_ref = make_ref()
parent = self()
spawn_link(fn -> loop(listen_socket, parent, server_ref) end)
with {:ok, port} <- :inet.port(listen_socket) do
{:ok, port, server_ref}
end
end
defp loop(listen_socket, parent, server_ref) do
case :gen_tcp.accept(listen_socket) do
{:ok, socket} ->
send(parent, {server_ref, socket})
# :einval started showing up with Erlang 23 and Ubuntu 18.
case :gen_tcp.controlling_process(socket, parent) do
:ok -> :ok
{:error, :einval} -> :ok
end
loop(listen_socket, parent, server_ref)
{:error, :closed} ->
:ok
end
end
end
| 25.0625 | 74 | 0.608479 |
79637700f71c3468c8f200090033314e5bb9e548 | 2,301 | ex | Elixir | clients/cloud_trace/lib/google_api/cloud_trace/v2/model/attributes.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/cloud_trace/lib/google_api/cloud_trace/v2/model/attributes.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/cloud_trace/lib/google_api/cloud_trace/v2/model/attributes.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudTrace.V2.Model.Attributes do
@moduledoc """
A set of attributes, each in the format `[KEY]:[VALUE]`.
## Attributes
* `attributeMap` (*type:* `%{optional(String.t) => GoogleApi.CloudTrace.V2.Model.AttributeValue.t}`, *default:* `nil`) - The set of attributes. Each attribute's key can be up to 128 bytes long. The value can be a string up to 256 bytes, a signed 64-bit integer, or the Boolean values `true` and `false`. For example: "/instance_id": { "string_value": { "value": "my-instance" } } "/http/request_bytes": { "int_value": 300 } "abc.com/myattribute": { "bool_value": false }
* `droppedAttributesCount` (*type:* `integer()`, *default:* `nil`) - The number of attributes that were discarded. Attributes can be discarded because their keys are too long or because there are too many attributes. If this value is 0 then all attributes are valid.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:attributeMap =>
%{optional(String.t()) => GoogleApi.CloudTrace.V2.Model.AttributeValue.t()} | nil,
:droppedAttributesCount => integer() | nil
}
field(:attributeMap, as: GoogleApi.CloudTrace.V2.Model.AttributeValue, type: :map)
field(:droppedAttributesCount)
end
defimpl Poison.Decoder, for: GoogleApi.CloudTrace.V2.Model.Attributes do
def decode(value, options) do
GoogleApi.CloudTrace.V2.Model.Attributes.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudTrace.V2.Model.Attributes do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 45.117647 | 474 | 0.725337 |
79638ae50ebcbd38c8fa291ca6fbd3dd4eb376fe | 1,796 | exs | Elixir | apps/firestorm_data/test/firestorm_data/commands/create_reaction_test.exs | CircleCI-Public/firestorm | 9ca2c46a2b2377370347ad94d6003eeb77be38d6 | [
"MIT"
] | 10 | 2017-06-28T08:06:52.000Z | 2022-03-19T17:49:21.000Z | apps/firestorm_data/test/firestorm_data/commands/create_reaction_test.exs | CircleCI-Public/firestorm | 9ca2c46a2b2377370347ad94d6003eeb77be38d6 | [
"MIT"
] | null | null | null | apps/firestorm_data/test/firestorm_data/commands/create_reaction_test.exs | CircleCI-Public/firestorm | 9ca2c46a2b2377370347ad94d6003eeb77be38d6 | [
"MIT"
] | 2 | 2017-10-21T12:01:02.000Z | 2021-01-29T10:26:22.000Z | defmodule FirestormData.Commands.CreateReactionTest do
@moduledoc false
use FirestormData.UnitCase
alias FirestormData.Commands.{CreateCategory, CreateThread, CreateReaction}
alias FirestormData.{Thread, User, Repo, Post, Reaction}
setup do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Repo)
end
describe "reacting to a post" do
setup [:create_user, :create_category, :create_thread, :react_to_post]
test "returns expected result", %{result: result} do
assert {:ok, _some_id} = result
end
test "creates a reaction in the database", %{post_id: post_id} do
post =
Post
|> Repo.get(post_id)
|> Repo.preload(:reactions)
assert 1 == length(post.reactions)
end
end
def create_category(_) do
changeset =
%CreateCategory{}
|> CreateCategory.changeset(%{title: "some title"})
{:ok, category_id} = CreateCategory.run(changeset)
{:ok, category_id: category_id}
end
def create_thread(%{user_id: user_id, category_id: category_id}) do
changeset =
%CreateThread{}
|> CreateThread.changeset(%{
user_id: user_id,
title: "Some thread",
body: "Some body",
category_id: category_id
})
{:ok, thread_id} = CreateThread.run(changeset)
{:ok, thread_id: thread_id}
end
def react_to_post(%{user_id: user_id, thread_id: thread_id}) do
require Ecto.Query
thread =
Thread
|> Ecto.Query.preload(:posts)
|> Ecto.Query.preload(:category)
|> Repo.get(thread_id)
[fp|_] = thread.posts
changeset =
%CreateReaction{}
|> CreateReaction.changeset(%{user_id: user_id, post_id: fp.id, emoji: "thumbsup"})
{:ok, post_id: fp.id, result: CreateReaction.run(changeset)}
end
end
| 26.028986 | 89 | 0.648664 |
7963a9daf8ce1d5c0a785cd25affed2b58224e2e | 1,043 | ex | Elixir | lib/hl7/2.4/segments/gol.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.4/segments/gol.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.4/segments/gol.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | defmodule HL7.V2_4.Segments.GOL do
@moduledoc false
require Logger
alias HL7.V2_4.{DataTypes}
use HL7.Segment,
fields: [
segment: nil,
action_code: nil,
action_date_time: DataTypes.Ts,
goal_id: DataTypes.Ce,
goal_instance_id: DataTypes.Ei,
episode_of_care_id: DataTypes.Ei,
goal_list_priority: nil,
goal_established_date_time: DataTypes.Ts,
expected_goal_achieve_date_time: DataTypes.Ts,
goal_classification: DataTypes.Ce,
goal_management_discipline: DataTypes.Ce,
current_goal_review_status: DataTypes.Ce,
current_goal_review_date_time: DataTypes.Ts,
next_goal_review_date_time: DataTypes.Ts,
previous_goal_review_date_time: DataTypes.Ts,
goal_review_interval: DataTypes.Tq,
goal_evaluation: DataTypes.Ce,
goal_evaluation_comment: nil,
goal_life_cycle_status: DataTypes.Ce,
goal_life_cycle_status_date_time: DataTypes.Ts,
goal_target_type: DataTypes.Ce,
goal_target_name: DataTypes.Xpn
]
end
| 31.606061 | 53 | 0.728667 |
7963ac2b4057d968fe92b3599ffa45f7076f8d0d | 2,377 | exs | Elixir | mix.exs | bgentry/ecto_as_state_machine | 068ad13ec60147564f02ab4d0493763dcd758bbd | [
"MIT"
] | null | null | null | mix.exs | bgentry/ecto_as_state_machine | 068ad13ec60147564f02ab4d0493763dcd758bbd | [
"MIT"
] | null | null | null | mix.exs | bgentry/ecto_as_state_machine | 068ad13ec60147564f02ab4d0493763dcd758bbd | [
"MIT"
] | null | null | null | defmodule EctoAsStateMachine.Mixfile do
use Mix.Project
@project_url "https://github.com/cnsa/ecto_as_state_machine"
@version "1.0.6"
def project do
[
app: :ecto_as_state_machine,
version: @version,
elixir: "~> 1.3",
elixirc_paths: elixirc_paths(Mix.env),
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
aliases: aliases(),
deps: deps(),
source_url: @project_url,
homepage_url: @project_url,
description: "State machine pattern for Ecto. I tried to make it similar as possible to ruby's gem 'aasm'",
package: package(),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: cli_env_for(:test, [
"coveralls", "coveralls.detail", "coveralls.html", "coveralls.post",
]),
]
end
defp elixirc_paths(:test), do: elixirc_paths() ++ ["test/support"]
defp elixirc_paths(_), do: elixirc_paths()
defp elixirc_paths(), do: ["lib"]
def application do
[
applications: app_list(Mix.env),
]
end
def app_list(:test), do: app_list() ++ [:ecto, :postgrex, :ex_machina]
def app_list(_), do: app_list()
def app_list, do: [:logger]
defp deps do
[
{:ecto, "~> 2.0"},
{:postgrex, ">= 0.0.0", only: :test},
{:ex_machina, "~> 2.0", only: :test},
{:ex_doc, "~> 0.11", only: :dev, runtime: false},
{:earmark, ">= 0.0.0", only: :dev},
{:ex_spec, "~> 2.0", only: :test},
{:credo, "~> 0.8", only: [:dev, :test], runtime: false},
{:excoveralls, "~> 0.5", only: :test},
]
end
defp cli_env_for(env, tasks) do
Enum.reduce(tasks, [], fn(key, acc) -> Keyword.put(acc, :"#{key}", env) end)
end
defp package do
[
name: :ecto_as_state_machine,
files: ["lib/ecto_as_state_machine.ex", "lib/ecto_as_state_machine/state.ex", "mix.exs", "README.md", "LICENSE.txt"],
maintainers: ["Alexander Merkulov"],
licenses: ["MIT"],
links: %{
"GitHub" => @project_url
}
]
end
defp git_tag(_args) do
System.cmd "git", ["tag", "v" <> Mix.Project.config[:version]]
System.cmd "git", ["push", "--tags"]
end
defp aliases do
[test: ["ecto.drop --quiet", "ecto.create --quiet", "ecto.migrate", "test"],
publish: ["hex.publish", "hex.publish docs", &git_tag/1],
tag: [&git_tag/1]]
end
end
| 28.638554 | 123 | 0.583088 |
7963ba9bb744dcfa62f29c086377e3d1179c0f29 | 2,279 | ex | Elixir | lib/guardian/db/token.ex | BenMorganIO/guardian_db | e16a4d76e6286c77e7b0097c6234be085eb23160 | [
"MIT"
] | null | null | null | lib/guardian/db/token.ex | BenMorganIO/guardian_db | e16a4d76e6286c77e7b0097c6234be085eb23160 | [
"MIT"
] | null | null | null | lib/guardian/db/token.ex | BenMorganIO/guardian_db | e16a4d76e6286c77e7b0097c6234be085eb23160 | [
"MIT"
] | null | null | null | defmodule Guardian.DB.Token do
@moduledoc """
A very simple model for storing tokens generated by `Guardian`.
"""
use Ecto.Schema
import Ecto.Changeset
import Ecto.Query, only: [where: 3]
alias Guardian.DB.Token
@primary_key {:jti, :string, autogenerate: false}
@allowed_fields ~w(jti typ aud iss sub exp jwt claims)a
schema "virtual: token" do
field(:typ, :string)
field(:aud, :string)
field(:iss, :string)
field(:sub, :string)
field(:exp, :integer)
field(:jwt, :string)
field(:claims, :map)
timestamps()
end
@doc """
Find one token by matching jti and aud.
"""
def find_by_claims(claims) do
jti = Map.get(claims, "jti")
aud = Map.get(claims, "aud")
query =
query_schema()
|> where([token], token.jti == ^jti and token.aud == ^aud)
|> Map.put(:prefix, prefix())
Guardian.DB.repo().one(query)
end
@doc """
Create a new token based on the JWT and decoded claims.
"""
def create(claims, jwt) do
prepared_claims =
claims
|> Map.put("jwt", jwt)
|> Map.put("claims", claims)
%Token{}
|> Ecto.put_meta(source: schema_name())
|> Ecto.put_meta(prefix: prefix())
|> cast(prepared_claims, @allowed_fields)
|> Guardian.DB.repo().insert()
end
@doc """
Purge any tokens that are expired. This should be done periodically to keep
your DB table clean of clutter.
"""
def purge_expired_tokens do
timestamp = Guardian.timestamp()
query_schema()
|> where([token], token.exp < ^timestamp)
|> Guardian.DB.repo().delete_all(prefix: prefix())
end
@doc false
def query_schema do
{schema_name(), Token}
end
@doc false
def schema_name do
:guardian
|> Application.fetch_env!(Guardian.DB)
|> Keyword.get(:schema_name, "guardian_tokens")
end
@doc false
def prefix do
:guardian
|> Application.fetch_env!(Guardian.DB)
|> Keyword.get(:prefix, nil)
end
@doc false
def destroy_token(nil, claims, jwt), do: {:ok, {claims, jwt}}
def destroy_token(model, claims, jwt) do
case Guardian.DB.repo().delete(model) do
{:error, _} -> {:error, :could_not_revoke_token}
nil -> {:error, :could_not_revoke_token}
_ -> {:ok, {claims, jwt}}
end
end
end
| 22.79 | 77 | 0.630101 |
7963be3eb74971aff301bc304cf897983a9b98e9 | 1,504 | ex | Elixir | lib/liveview_bindings_web/views/error_helpers.ex | rafalgolarz/liveview_bindings | c64fbf9661d5ff78c839daa7561549bbf6cbc731 | [
"MIT"
] | null | null | null | lib/liveview_bindings_web/views/error_helpers.ex | rafalgolarz/liveview_bindings | c64fbf9661d5ff78c839daa7561549bbf6cbc731 | [
"MIT"
] | 1 | 2021-03-10T09:32:06.000Z | 2021-03-10T09:32:06.000Z | lib/liveview_bindings_web/views/error_helpers.ex | rafalgolarz/liveview_bindings | c64fbf9661d5ff78c839daa7561549bbf6cbc731 | [
"MIT"
] | null | null | null | defmodule LiveviewBindingsWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn error ->
content_tag(:span, translate_error(error), class: "help-block")
end)
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# When using gettext, we typically pass the strings we want
# to translate as a static argument:
#
# # Translate "is invalid" in the "errors" domain
# dgettext("errors", "is invalid")
#
# # Translate the number of files with plural rules
# dngettext("errors", "1 file", "%{count} files", count)
#
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
# This requires us to call the Gettext module passing our gettext
# backend as first argument.
#
# Note we use the "errors" domain, which means translations
# should be written to the errors.po file. The :count option is
# set by Ecto and indicates we should also apply plural rules.
if count = opts[:count] do
Gettext.dngettext(LiveviewBindingsWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(LiveviewBindingsWeb.Gettext, "errors", msg, opts)
end
end
end
| 33.422222 | 85 | 0.676197 |
7963d406789348139c76166fb3366818612c183e | 16,105 | exs | Elixir | test/filter/filter_test.exs | ChristianTovar/ash | 66435322786c5d0b90a34051da969b68dcc8a045 | [
"MIT"
] | null | null | null | test/filter/filter_test.exs | ChristianTovar/ash | 66435322786c5d0b90a34051da969b68dcc8a045 | [
"MIT"
] | null | null | null | test/filter/filter_test.exs | ChristianTovar/ash | 66435322786c5d0b90a34051da969b68dcc8a045 | [
"MIT"
] | null | null | null | defmodule Ash.Test.Filter.FilterTest do
@moduledoc false
use ExUnit.Case, async: true
import Ash.Changeset
alias Ash.Filter
require Ash.Query
defmodule Profile do
@moduledoc false
use Ash.Resource, data_layer: Ash.DataLayer.Ets
ets do
private?(true)
end
actions do
read :read
create :create
update :update
end
attributes do
uuid_primary_key :id
attribute :bio, :string
attribute :private, :string, private?: true
end
relationships do
belongs_to :user, Ash.Test.Filter.FilterTest.User
end
end
defmodule User do
@moduledoc false
use Ash.Resource, data_layer: Ash.DataLayer.Ets
ets do
private?(true)
end
actions do
read :read
create :create
update :update
end
attributes do
uuid_primary_key :id
attribute :name, :string
attribute :allow_second_author, :boolean
attribute :special, :boolean
end
relationships do
has_many :posts, Ash.Test.Filter.FilterTest.Post, destination_field: :author1_id
has_many :second_posts, Ash.Test.Filter.FilterTest.Post, destination_field: :author1_id
has_one :profile, Profile, destination_field: :user_id
end
end
defmodule PostLink do
@moduledoc false
use Ash.Resource, data_layer: Ash.DataLayer.Ets
ets do
private?(true)
end
actions do
read :read
create :create
update :update
end
relationships do
belongs_to :source_post, Ash.Test.Filter.FilterTest.Post,
primary_key?: true,
required?: true
belongs_to :destination_post, Ash.Test.Filter.FilterTest.Post,
primary_key?: true,
required?: true
end
end
defmodule Post do
@moduledoc false
use Ash.Resource, data_layer: Ash.DataLayer.Ets
ets do
private?(true)
end
actions do
read :read
create :create
update :update
end
attributes do
uuid_primary_key :id
attribute :title, :string
attribute :contents, :string
attribute :points, :integer
attribute :approved_at, :utc_datetime
attribute :category, :ci_string
end
relationships do
belongs_to :author1, User,
destination_field: :id,
source_field: :author1_id
belongs_to :special_author1, User,
destination_field: :id,
source_field: :author1_id,
define_field?: false,
filter: expr(special == true)
belongs_to :author2, User,
destination_field: :id,
source_field: :author2_id
many_to_many :related_posts, __MODULE__,
through: PostLink,
source_field_on_join_table: :source_post_id,
destination_field_on_join_table: :destination_post_id
end
end
defmodule SoftDeletePost do
@moduledoc false
use Ash.Resource, data_layer: Ash.DataLayer.Ets
ets do
private? true
end
resource do
base_filter is_nil: :deleted_at
end
actions do
read :read
create :create
destroy :destroy do
soft? true
change set_attribute(:deleted_at, &DateTime.utc_now/0)
end
end
attributes do
uuid_primary_key :id
attribute :deleted_at, :utc_datetime
end
end
defmodule Api do
@moduledoc false
use Ash.Api
resources do
resource(Post)
resource(SoftDeletePost)
resource(User)
resource(Profile)
resource(PostLink)
end
end
describe "predicate optimization" do
# Testing against the stringified query may be a bad idea, but its a quick win and we
# can switch to actually checking the structure if this bites us
test "equality simplifies to `in`" do
stringified_query =
Post
|> Ash.Query.filter(title == "foo" or title == "bar")
|> inspect()
assert stringified_query =~ ~S(title in ["bar", "foo"])
end
test "in with equality simplifies to `in`" do
stringified_query =
Post
|> Ash.Query.filter(title in ["foo", "bar", "baz"] or title == "bar")
|> inspect()
assert stringified_query =~ ~S(title in ["bar", "baz", "foo"])
end
test "in with non-equality simplifies to `in`" do
stringified_query =
Post
|> Ash.Query.filter(title in ["foo", "bar", "baz"] and title != "bar")
|> inspect()
assert stringified_query =~ ~S(title in ["baz", "foo"])
end
test "in with or-in simplifies to `in`" do
stringified_query =
Post
|> Ash.Query.filter(title in ["foo", "bar"] or title in ["bar", "baz"])
|> inspect()
assert stringified_query =~ ~S(title in ["bar", "baz", "foo"])
end
test "in with and-in simplifies to `in` when multiple values overlap" do
stringified_query =
Post
|> Ash.Query.filter(title in ["foo", "bar", "baz"] and title in ["bar", "baz", "bif"])
|> inspect()
assert stringified_query =~ ~S(title in ["bar", "baz"])
end
test "in with and-in simplifies to `eq` when one value overlaps" do
stringified_query =
Post
|> Ash.Query.filter(title in ["foo", "bar"] and title in ["bar", "baz", "bif"])
|> inspect()
assert stringified_query =~ ~S(title == "bar")
end
end
describe "simple attribute filters" do
setup do
post1 =
Post
|> new(%{title: "title1", contents: "contents1", points: 1})
|> Api.create!()
post2 =
Post
|> new(%{title: "title2", contents: "contents2", points: 2})
|> Api.create!()
%{post1: post1, post2: post2}
end
test "single filter field", %{post1: post1} do
assert [^post1] =
Post
|> Ash.Query.filter(title == ^post1.title)
|> Api.read!()
end
test "multiple filter field matches", %{post1: post1} do
assert [^post1] =
Post
|> Ash.Query.filter(title == ^post1.title and contents == ^post1.contents)
|> Api.read!()
end
test "no field matches" do
assert [] =
Post
|> Ash.Query.filter(title == "no match")
|> Api.read!()
end
test "no field matches single record, but each matches one record", %{
post1: post1,
post2: post2
} do
assert [] =
Post
|> Ash.Query.filter(title == ^post1.title and contents == ^post2.contents)
|> Api.read!()
end
test "less than works", %{
post1: post1,
post2: post2
} do
assert [^post1] =
Post
|> Ash.Query.filter(points < 2)
|> Api.read!()
assert [^post1, ^post2] =
Post
|> Ash.Query.filter(points < 3)
|> Ash.Query.sort(points: :asc)
|> Api.read!()
end
test "greater than works", %{
post1: post1,
post2: post2
} do
assert [^post2] =
Post
|> Ash.Query.filter(points > 1)
|> Api.read!()
assert [^post1, ^post2] =
Post
|> Ash.Query.filter(points > 0)
|> Ash.Query.sort(points: :asc)
|> Api.read!()
end
end
describe "relationship filters" do
setup do
post1 =
Post
|> new(%{title: "title1", contents: "contents1", points: 1})
|> Api.create!()
post2 =
Post
|> new(%{title: "title2", contents: "contents2", points: 2})
|> Api.create!()
post3 =
Post
|> new(%{title: "title3", contents: "contents3", points: 3})
|> replace_relationship(:related_posts, [post1, post2])
|> Api.create!()
post4 =
Post
|> new(%{title: "title4", contents: "contents4", points: 4})
|> replace_relationship(:related_posts, [post3])
|> Api.create!()
profile1 =
Profile
|> new(%{bio: "dope"})
|> Api.create!()
user1 =
User
|> new(%{name: "broseph"})
|> replace_relationship(:posts, [post1, post2])
|> replace_relationship(:profile, profile1)
|> Api.create!()
user2 =
User
|> new(%{name: "broseph", special: false})
|> replace_relationship(:posts, [post2])
|> Api.create!()
profile2 =
Profile
|> new(%{bio: "dope2"})
|> replace_relationship(:user, user2)
|> Api.create!()
%{
post1: Api.reload!(post1),
post2: Api.reload!(post2),
post3: Api.reload!(post3),
post4: Api.reload!(post4),
profile1: Api.reload!(profile1),
user1: Api.reload!(user1),
user2: Api.reload!(user2),
profile2: Api.reload!(profile2)
}
end
test "filtering on a has_one relationship", %{profile2: profile2, user2: %{id: user2_id}} do
assert [%{id: ^user2_id}] =
User
|> Ash.Query.filter(profile == ^profile2.id)
|> Api.read!()
end
test "filtering on a belongs_to relationship", %{profile1: %{id: id}, user1: user1} do
assert [%{id: ^id}] =
Profile
|> Ash.Query.filter(user == ^user1.id)
|> Api.read!()
end
test "filtering on a has_many relationship", %{user2: %{id: user2_id}, post2: post2} do
assert [%{id: ^user2_id}] =
User
|> Ash.Query.filter(posts == ^post2.id)
|> Api.read!()
end
test "filtering on a many_to_many relationship", %{post4: %{id: post4_id}, post3: post3} do
assert [%{id: ^post4_id}] =
Post
|> Ash.Query.filter(related_posts == ^post3.id)
|> Api.read!()
end
test "relationship filters are honored when filtering on relationships", %{post2: post} do
post = Api.load!(post, [:special_author1, :author1])
assert post.author1
refute post.special_author1
end
end
describe "filter subset logic" do
test "can detect a filter is a subset of itself" do
filter = Filter.parse!(Post, %{points: 1})
assert Filter.strict_subset_of?(filter, filter)
end
test "can detect a filter is a subset of itself *and* something else" do
filter = Filter.parse!(Post, points: 1)
candidate = Filter.add_to_filter!(filter, title: "Title")
assert Filter.strict_subset_of?(filter, candidate)
end
test "can detect a filter is not a subset of itself *or* something else" do
filter = Filter.parse!(Post, points: 1)
candidate = Filter.add_to_filter!(filter, [title: "Title"], :or)
refute Filter.strict_subset_of?(filter, candidate)
end
test "can detect a filter is a subset based on a simplification" do
filter = Filter.parse!(Post, points: [in: [1, 2]])
candidate = Filter.parse!(Post, points: 1)
assert Filter.strict_subset_of?(filter, candidate)
end
test "can detect a filter is not a subset based on a simplification" do
filter = Filter.parse!(Post, points: [in: [1, 2]])
candidate = Filter.parse!(Post, points: 3)
refute Filter.strict_subset_of?(filter, candidate)
end
test "can detect a more complicated scenario" do
filter = Filter.parse!(Post, or: [[points: [in: [1, 2, 3]]], [points: 4], [points: 5]])
candidate = Filter.parse!(Post, or: [[points: 1], [points: 3], [points: 5]])
assert Filter.strict_subset_of?(filter, candidate)
end
test "can detect less than and greater than closing in on a single value" do
filter = Filter.parse!(Post, points: [greater_than: 1, less_than: 3])
candidate = Filter.parse!(Post, points: 2)
assert Filter.strict_subset_of?(filter, candidate)
end
test "doesnt have false positives on less than and greater than closing in on a single value" do
filter = Filter.parse!(Post, points: [greater_than: 1, less_than: 3])
candidate = Filter.parse!(Post, points: 4)
refute Filter.strict_subset_of?(filter, candidate)
end
test "understands unrelated negations" do
filter = Filter.parse!(Post, or: [[points: [in: [1, 2, 3]]], [points: 4], [points: 5]])
candidate =
Filter.parse!(Post, or: [[points: 1], [points: 3], [points: 5]], not: [points: 7])
assert Filter.strict_subset_of?(filter, candidate)
end
test "understands relationship filter subsets" do
id1 = Ash.UUID.generate()
id2 = Ash.UUID.generate()
filter = Filter.parse!(Post, author1: [id: [in: [id1, id2]]])
candidate = Filter.parse!(Post, author1: id1)
assert Filter.strict_subset_of?(filter, candidate)
end
test "understands relationship filter subsets when a value coincides with the join field" do
id1 = Ash.UUID.generate()
id2 = Ash.UUID.generate()
filter = Filter.parse!(Post, author1: [id: [in: [id1, id2]]])
candidate = Filter.parse!(Post, author1_id: id1)
assert Filter.strict_subset_of?(filter, candidate)
end
end
describe "parse_input" do
test "parse_input works when no private attributes are used" do
Ash.Filter.parse_input!(Profile, bio: "foo")
end
test "parse_input fails when a private attribute is used" do
Ash.Filter.parse!(Profile, private: "private")
assert_raise(Ash.Error.Query.NoSuchAttributeOrRelationship, fn ->
Ash.Filter.parse_input!(Profile, private: "private")
end)
end
end
describe "base_filter" do
test "resources that apply to the base filter are returned" do
%{id: id} =
SoftDeletePost
|> new(%{})
|> Api.create!()
assert [%{id: ^id}] = Api.read!(SoftDeletePost)
end
test "resources that don't apply to the base filter are not returned" do
SoftDeletePost
|> new(%{})
|> Api.create!()
|> Api.destroy()
assert [] = Api.read!(SoftDeletePost)
end
end
describe "contains/2" do
test "works for simple strings" do
Post
|> new(%{title: "foobar"})
|> Api.create!()
Post
|> new(%{title: "bazbuz"})
|> Api.create!()
assert [%{title: "foobar"}] =
Post
|> Ash.Query.filter(contains(title, "oba"))
|> Api.read!()
end
test "works for simple strings with a case insensitive search term" do
Post
|> new(%{title: "foobar"})
|> Api.create!()
Post
|> new(%{title: "bazbuz"})
|> Api.create!()
assert [%{title: "foobar"}] =
Post
|> Ash.Query.filter(contains(title, ^%Ash.CiString{string: "OBA"}))
|> Api.read!()
end
test "works for case insensitive strings" do
Post
|> new(%{category: "foobar"})
|> Api.create!()
Post
|> new(%{category: "bazbuz"})
|> Api.create!()
assert [%{category: %Ash.CiString{string: "foobar"}}] =
Post
|> Ash.Query.filter(contains(category, "OBA"))
|> Api.read!()
end
end
describe "calls in filters" do
test "calls are evaluated and can be used in predicates" do
post1 =
Post
|> new(%{title: "title1", contents: "contents1", points: 2})
|> Api.create!()
post_id = post1.id
assert [%Post{id: ^post_id}] =
Post
|> Ash.Query.filter(points + 1 == 3)
|> Api.read!()
end
test "function calls are evaluated properly" do
post1 =
Post
|> new(%{title: "title1", approved_at: Timex.shift(Timex.now(), weeks: -1)})
|> Api.create!()
Post
|> new(%{title: "title1", approved_at: Timex.shift(Timex.now(), weeks: -4)})
|> Api.create!()
post_id = post1.id
assert [%Post{id: ^post_id}] =
Post
|> Ash.Query.filter(approved_at > ago(2, :week))
|> Api.read!()
end
end
end
| 25.892283 | 100 | 0.571748 |
7963eb2e34121fb76ce24723913c841ad29ff227 | 3,345 | exs | Elixir | lib/elixir/test/elixir/gen_server_test.exs | mk/elixir | 2b2c66ecf7b1cc2167cae9cc3e88f950994223f1 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/gen_server_test.exs | mk/elixir | 2b2c66ecf7b1cc2167cae9cc3e88f950994223f1 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/gen_server_test.exs | mk/elixir | 2b2c66ecf7b1cc2167cae9cc3e88f950994223f1 | [
"Apache-2.0"
] | null | null | null | Code.require_file "test_helper.exs", __DIR__
defmodule GenServerTest do
use ExUnit.Case, async: true
defmodule Stack do
use GenServer
def handle_call(:pop, _from, [h|t]) do
{:reply, h, t}
end
def handle_call(request, from, state) do
super(request, from, state)
end
def handle_cast({:push, item}, state) do
{:noreply, [item|state]}
end
def handle_cast(request, state) do
super(request, state)
end
def terminate(_reason, _state) do
# There is a race condition if the agent is
# restarted too fast and it is registered.
try do
self |> Process.info(:registered_name) |> elem(1) |> Process.unregister
rescue
_ -> :ok
end
:ok
end
end
test "start_link/2, call/2 and cast/2" do
{:ok, pid} = GenServer.start_link(Stack, [:hello])
{:links, links} = Process.info(self, :links)
assert pid in links
assert GenServer.call(pid, :pop) == :hello
assert GenServer.cast(pid, {:push, :world}) == :ok
assert GenServer.call(pid, :pop) == :world
assert GenServer.stop(pid) == :ok
assert GenServer.cast({:global, :foo}, {:push, :world}) == :ok
assert GenServer.cast({:via, :foo, :bar}, {:push, :world}) == :ok
assert_raise ArgumentError, fn ->
GenServer.cast(:foo, {:push, :world})
end
end
test "nil name" do
{:ok, pid} = GenServer.start_link(Stack, [:hello], name: nil)
assert Process.info(pid, :registered_name) == {:registered_name, []}
end
test "start/2" do
{:ok, pid} = GenServer.start(Stack, [:hello])
{:links, links} = Process.info(self, :links)
refute pid in links
GenServer.stop(pid)
end
test "abcast/3" do
{:ok, _} = GenServer.start_link(Stack, [], name: :stack)
assert GenServer.abcast(:stack, {:push, :hello}) == :abcast
assert GenServer.call({:stack, node()}, :pop) == :hello
assert GenServer.abcast([node, :foo@bar], :stack, {:push, :world}) == :abcast
assert GenServer.call(:stack, :pop) == :world
GenServer.stop(:stack)
end
test "multi_call/4" do
{:ok, _} = GenServer.start_link(Stack, [:hello, :world], name: :stack)
assert GenServer.multi_call(:stack, :pop) ==
{[{node(), :hello}], []}
assert GenServer.multi_call([node, :foo@bar], :stack, :pop) ==
{[{node, :world}], [:foo@bar]}
GenServer.stop(:stack)
end
test "whereis/1" do
name = :whereis_server
{:ok, pid} = GenServer.start_link(Stack, [], name: name)
assert GenServer.whereis(name) == pid
assert GenServer.whereis({name, node()}) == pid
assert GenServer.whereis({name, :another_node}) == {name, :another_node}
assert GenServer.whereis(pid) == pid
assert GenServer.whereis(:whereis_bad_server) == nil
{:ok, pid} = GenServer.start_link(Stack, [], name: {:global, name})
assert GenServer.whereis({:global, name}) == pid
assert GenServer.whereis({:global, :whereis_bad_server}) == nil
assert GenServer.whereis({:via, :global, name}) == pid
assert GenServer.whereis({:via, :global, :whereis_bad_server}) == nil
end
test "stop/3" do
{:ok, pid} = GenServer.start(Stack, [])
assert GenServer.stop(pid, :normal) == :ok
{:ok, _} = GenServer.start(Stack, [], name: :stack)
assert GenServer.stop(:stack, :normal) == :ok
end
end
| 29.086957 | 81 | 0.618535 |
796430ebff2139b81114a9e8d15eaab1ce98b31b | 1,875 | ex | Elixir | lib/ash/notifier/pub_sub/publication.ex | MrFlorius/ash | 247abbb8333d252da5440a58ddf4f1b7f184342f | [
"MIT"
] | null | null | null | lib/ash/notifier/pub_sub/publication.ex | MrFlorius/ash | 247abbb8333d252da5440a58ddf4f1b7f184342f | [
"MIT"
] | null | null | null | lib/ash/notifier/pub_sub/publication.ex | MrFlorius/ash | 247abbb8333d252da5440a58ddf4f1b7f184342f | [
"MIT"
] | null | null | null | defmodule Ash.Notifier.PubSub.Publication do
@moduledoc "Represents an individual publication setup"
defstruct [
:action,
:topic,
:event,
:type
]
@schema [
action: [
type: :atom,
doc: "The name of the action that should be published",
required: true
],
topic: [
type: {:custom, __MODULE__, :topic, []},
doc: "The topic to publish",
required: true
],
event: [
type: :string,
doc: "The name of the event to publish. Defaults to the action name"
]
]
@publish_all_schema @schema
|> Keyword.update!(:action, &Keyword.delete(&1, :required))
|> Keyword.put(:type,
type: {:in, [:create, :update, :destroy]},
doc:
"In the case of multiple actions with the same name, you may need to provide the action type as well."
)
def schema, do: @schema
def publish_all_schema, do: @publish_all_schema
@doc false
def topic(topic) when is_binary(topic) do
{:ok, [topic]}
end
def topic(topic) when is_list(topic) do
if nested_list_of_binaries_or_atoms?(topic) do
{:ok, topic}
else
{:error,
"Expected topic to be a string or a list of strings or attribute names (as atoms), got: #{
inspect(topic)
}"}
end
end
def topic(other) do
{:error,
"Expected topic to be a string or a list of strings or attribute names (as atoms), got: #{
inspect(other)
}"}
end
defp nested_list_of_binaries_or_atoms?(list) when is_list(list) do
Enum.all?(list, &nested_list_of_binaries_or_atoms?/1)
end
defp nested_list_of_binaries_or_atoms?(value) when is_binary(value) or is_atom(value) do
true
end
defp nested_list_of_binaries_or_atoms?(_) do
false
end
end
| 25.337838 | 128 | 0.5984 |
79643470b02ed821c3a1b6879db551996ead425c | 2,021 | ex | Elixir | lib/codes/codes_l66.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_l66.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_l66.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | defmodule IcdCode.ICDCode.Codes_L66 do
alias IcdCode.ICDCode
def _L660 do
%ICDCode{full_code: "L660",
category_code: "L66",
short_code: "0",
full_name: "Pseudopelade",
short_name: "Pseudopelade",
category_name: "Pseudopelade"
}
end
def _L661 do
%ICDCode{full_code: "L661",
category_code: "L66",
short_code: "1",
full_name: "Lichen planopilaris",
short_name: "Lichen planopilaris",
category_name: "Lichen planopilaris"
}
end
def _L662 do
%ICDCode{full_code: "L662",
category_code: "L66",
short_code: "2",
full_name: "Folliculitis decalvans",
short_name: "Folliculitis decalvans",
category_name: "Folliculitis decalvans"
}
end
def _L663 do
%ICDCode{full_code: "L663",
category_code: "L66",
short_code: "3",
full_name: "Perifolliculitis capitis abscedens",
short_name: "Perifolliculitis capitis abscedens",
category_name: "Perifolliculitis capitis abscedens"
}
end
def _L664 do
%ICDCode{full_code: "L664",
category_code: "L66",
short_code: "4",
full_name: "Folliculitis ulerythematosa reticulata",
short_name: "Folliculitis ulerythematosa reticulata",
category_name: "Folliculitis ulerythematosa reticulata"
}
end
def _L668 do
%ICDCode{full_code: "L668",
category_code: "L66",
short_code: "8",
full_name: "Other cicatricial alopecia",
short_name: "Other cicatricial alopecia",
category_name: "Other cicatricial alopecia"
}
end
def _L669 do
%ICDCode{full_code: "L669",
category_code: "L66",
short_code: "9",
full_name: "Cicatricial alopecia, unspecified",
short_name: "Cicatricial alopecia, unspecified",
category_name: "Cicatricial alopecia, unspecified"
}
end
end
| 28.871429 | 65 | 0.603167 |
7964352737cceaadb50edbb2bc2c2178618c10d9 | 185 | exs | Elixir | priv/repo/migrations/20170603012354_muted_default_true.exs | mgwidmann/slack_coder | 3ff4375777c1bdbc94ba1491fdf4e1c0fa465edf | [
"MIT"
] | 15 | 2015-09-23T16:03:28.000Z | 2018-12-04T21:48:04.000Z | priv/repo/migrations/20170603012354_muted_default_true.exs | mgwidmann/slack_coder | 3ff4375777c1bdbc94ba1491fdf4e1c0fa465edf | [
"MIT"
] | 27 | 2016-01-12T16:44:31.000Z | 2017-10-13T16:09:36.000Z | priv/repo/migrations/20170603012354_muted_default_true.exs | mgwidmann/slack_coder | 3ff4375777c1bdbc94ba1491fdf4e1c0fa465edf | [
"MIT"
] | 4 | 2016-09-01T12:08:24.000Z | 2017-09-21T15:07:57.000Z | defmodule SlackCoder.Repo.Migrations.MutedDefaultTrue do
use Ecto.Migration
def change do
alter table(:users) do
modify :muted, :boolean, default: true
end
end
end
| 18.5 | 56 | 0.718919 |
796436824a26d386c6670c87b499f630e2aab683 | 60 | exs | Elixir | mix.exs | Nathaniel-N/erlbus | 0033b44a7be15088ea7b09b3eadb7b2355bf7795 | [
"MIT"
] | null | null | null | mix.exs | Nathaniel-N/erlbus | 0033b44a7be15088ea7b09b3eadb7b2355bf7795 | [
"MIT"
] | null | null | null | mix.exs | Nathaniel-N/erlbus | 0033b44a7be15088ea7b09b3eadb7b2355bf7795 | [
"MIT"
] | null | null | null | def deps do
[
{:ebus, "~> 0.2", hex: :erlbus}
]
end
| 10 | 35 | 0.45 |
796459fbe2f693c949e9c1205657ce3b5654e525 | 687 | exs | Elixir | lib/elixir/test/elixir/kernel/charlist_test.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 19,291 | 2015-01-01T02:42:49.000Z | 2022-03-31T21:01:40.000Z | lib/elixir/test/elixir/kernel/charlist_test.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 8,082 | 2015-01-01T04:16:23.000Z | 2022-03-31T22:08:02.000Z | lib/elixir/test/elixir/kernel/charlist_test.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 3,472 | 2015-01-03T04:11:56.000Z | 2022-03-29T02:07:30.000Z | Code.require_file("../test_helper.exs", __DIR__)
defmodule CharlistTest do
use ExUnit.Case, async: true
test "heredoc" do
assert __ENV__.line == 7
assert 'foo\nbar\n' == '''
foo
bar
'''
assert __ENV__.line == 14
assert 'foo\nbar \'\'\'\n' == '''
foo
bar \'\'\'
'''
end
test "UTF-8" do
assert length(' ゆんゆん') == 5
end
test "hex" do
assert '\x76' == 'v'
assert '\u00fF' == 'ÿ'
assert '\u{A}' == '\n'
assert '\u{e9}' == 'é'
assert '\u{10F}' == [271]
assert '\u{10FF}' == [4351]
assert '\u{10FFF}' == [69631]
assert '\u{10FFFF}' == [1_114_111]
end
end
| 18.567568 | 48 | 0.477438 |
79645f88a98df3366f5b1653050b9e7f8db3a861 | 723 | ex | Elixir | lib/survey_api/accounts.ex | AkioCode/elixir-survey | 420f4e5f60b84d381707f162b473dd91eb0fe9f2 | [
"MIT"
] | null | null | null | lib/survey_api/accounts.ex | AkioCode/elixir-survey | 420f4e5f60b84d381707f162b473dd91eb0fe9f2 | [
"MIT"
] | null | null | null | lib/survey_api/accounts.ex | AkioCode/elixir-survey | 420f4e5f60b84d381707f162b473dd91eb0fe9f2 | [
"MIT"
] | null | null | null | defmodule SurveyApi.Accounts do
@moduledoc """
The Accounts context.
"""
import Ecto.Query, warn: false
alias SurveyApi.Repo
alias SurveyApi.Accounts.User
def list_users do
Repo.all(User)
end
def get_user(id) do
Repo.get(User, id)
|> case do
nil ->
{:error, "Usuário não existe", 404}
user ->
user
end
end
def get_user_by_name(name), do: Repo.get_by(User, name: name)
def create_user(attrs \\ %{}) do
%User{}
|> User.changeset(attrs)
|> Repo.insert()
end
def update_user(%User{} = user, attrs) do
user
|> User.changeset(attrs)
|> Repo.update()
end
def delete_user(%User{} = user) do
Repo.delete(user)
end
end
| 16.813953 | 63 | 0.605809 |
7964ac4b82002abab5e96d93ea7f05f56d3f54ee | 3,508 | exs | Elixir | test/integration/typespec_test.exs | jeremyowensboggs/zigler | d1ec07ae66db5c007ff7cfc8a820fb21c661bcea | [
"MIT"
] | 349 | 2019-10-02T07:21:17.000Z | 2022-03-21T17:50:06.000Z | test/integration/typespec_test.exs | jeremyowensboggs/zigler | d1ec07ae66db5c007ff7cfc8a820fb21c661bcea | [
"MIT"
] | 223 | 2019-10-05T05:36:08.000Z | 2022-03-31T23:12:02.000Z | test/integration/typespec_test.exs | jeremyowensboggs/zigler | d1ec07ae66db5c007ff7cfc8a820fb21c661bcea | [
"MIT"
] | 20 | 2019-10-08T16:29:39.000Z | 2022-03-31T15:07:20.000Z | defmodule ZiglerTest.Integration.TypespecTest do
use ExUnit.Case, async: true
#
# note that this module doesn't make sense unless you have the context of the
# support module `ZiglerTest.Types`. This support module can be found in the
# following location:
#
# test/support/types.ex
#
@moduletag :typespec
{:ok, type_list} = Code.Typespec.fetch_specs(ZiglerTest.Types)
@types type_list
# we need a consistent way of matching things in the type list
defp assert_typespec(fn_name, arity, args, return) do
assert {_, [spec]} = Enum.find(@types, &match?({{^fn_name, ^arity}, _}, &1))
assert {:type, _, :fun, [{:type, _, :product, ts_args}, ts_return]} = spec
args
|> Enum.zip(ts_args)
|> Enum.each(&compare_type/1)
compare_type(return, ts_return)
end
defp compare_type({type, target}), do: compare_type(type, target)
defp compare_type(type, target) when is_atom(type), do: assert match?({:type, _, ^type, _}, target)
defp compare_type({:atom, atom}, target), do: assert match?({:atom, _, ^atom}, target)
defp compare_type({:list, type}, target) do
assert match?({:type, _, :list, [{:type, _, ^type, _}]}, target)
end
defp compare_type(first..last, target) when first >= 0 do
assert match?({:type, _, :range, [{:integer, _, ^first}, {:integer, _, ^last}]}, target)
end
defp compare_type(first..last, target) do
neg_first = -first
assert match?({:type, _, :range, [
{:op, _, :-, {:integer, _, ^neg_first}},
{:integer, _, ^last}]}, target)
end
describe "the dummy elixir typespec matches" do
test "for basic integer" do
assert_typespec(:dummy_integer, 1, [:integer], :integer)
end
end
describe "for the selected function values" do
test "void return gives ok" do
assert_typespec(:void_out, 0, [], {:atom, :ok})
end
test "u32 is specced correctly" do
assert_typespec(:u32_in_out, 1, [0..0xFFFF_FFFF], 0..0xFFFF_FFFF)
end
@i32_range -2_147_483_648..2_147_483_647
test "i32 is specced correctly" do
assert_typespec(:i32_in_out, 1, [@i32_range], @i32_range)
end
test "i64 is specced correctly" do
assert_typespec(:i64_in_out, 1, [:integer], :integer)
end
test "f64 is specced correctly" do
assert_typespec(:f64_in_out, 1, [:float], :float)
end
test "bool is specced correctly" do
assert_typespec(:bool_in_out, 1, [:boolean], :boolean)
end
test "term is specced correctly" do
assert_typespec(:term_in_out, 1, [:term], :term)
assert_typespec(:eterm_in_out, 1, [:term], :term)
end
test "pid is specced correctly" do
assert_typespec(:pid_in_out, 1, [:pid], :pid)
assert_typespec(:epid_in_out, 1, [:pid], :pid)
end
test "atom is specced correctly" do
assert_typespec(:atom_in_out, 1, [:atom], :atom)
end
test "string is specced correctly" do
assert_typespec(:str_in_out, 1, [:binary], :binary)
end
test "slice is specced correctly" do
assert_typespec(:islice_in_out, 1, [{:list, :integer}], {:list, :integer})
end
test "multiargument is specced correctly" do
assert_typespec(:multiarg, 2, [:integer, :float], :float)
end
test "with env it's specced correctly" do
assert_typespec(:env_zero, 0, [], :integer)
assert_typespec(:eenv_zero, 0, [], :integer)
assert_typespec(:env_one, 1, [:integer], :integer)
assert_typespec(:eenv_one, 1, [:integer], :integer)
end
end
end
| 31.890909 | 101 | 0.651368 |
7964b258c77690a7c1eba2e9f778023e3f869ddb | 705 | ex | Elixir | samples/client/petstore/elixir/lib/openapi_petstore/model/type_holder_default.ex | MalcolmScoffable/openapi-generator | 73605a0c0e0c825286c95123c63678ba75b44d5c | [
"Apache-2.0"
] | 11,868 | 2018-05-12T02:58:07.000Z | 2022-03-31T21:19:39.000Z | samples/client/petstore/elixir/lib/openapi_petstore/model/type_holder_default.ex | MalcolmScoffable/openapi-generator | 73605a0c0e0c825286c95123c63678ba75b44d5c | [
"Apache-2.0"
] | 9,672 | 2018-05-12T14:25:43.000Z | 2022-03-31T23:59:30.000Z | samples/client/petstore/elixir/lib/openapi_petstore/model/type_holder_default.ex | MalcolmScoffable/openapi-generator | 73605a0c0e0c825286c95123c63678ba75b44d5c | [
"Apache-2.0"
] | 4,776 | 2018-05-12T12:06:08.000Z | 2022-03-31T19:52:51.000Z | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule OpenapiPetstore.Model.TypeHolderDefault do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:"string_item",
:"number_item",
:"integer_item",
:"bool_item",
:"array_item"
]
@type t :: %__MODULE__{
:"string_item" => String.t,
:"number_item" => float(),
:"integer_item" => integer(),
:"bool_item" => boolean(),
:"array_item" => [integer()]
}
end
defimpl Poison.Decoder, for: OpenapiPetstore.Model.TypeHolderDefault do
def decode(value, _options) do
value
end
end
| 20.735294 | 91 | 0.652482 |
7964c5c1d9042eb7b69b77236281d1231a655193 | 249 | ex | Elixir | nolive/hiyoko/lib/hiyoko.ex | rykawamu/phoenix_liveview_practice_02 | 7384b0cab78dbeefe6d57a41744f791919f1467a | [
"MIT"
] | null | null | null | nolive/hiyoko/lib/hiyoko.ex | rykawamu/phoenix_liveview_practice_02 | 7384b0cab78dbeefe6d57a41744f791919f1467a | [
"MIT"
] | 1 | 2019-09-11T14:22:29.000Z | 2019-09-11T14:22:29.000Z | hiyoko/lib/hiyoko.ex | rykawamu/phoenix_liveview_practice_01 | e30ab2304473a995a3b36335ceb212a9bbed012f | [
"MIT"
] | null | null | null | defmodule Hiyoko do
@moduledoc """
Hiyoko keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
end
| 24.9 | 66 | 0.751004 |
7964d1074c0708630625266ced833cf6afaae8c6 | 6,476 | ex | Elixir | lib/eex/lib/eex.ex | sunaku/elixir | 8aa43eaedd76be8ac0d495049eb9ecd56971f4fe | [
"Apache-2.0"
] | null | null | null | lib/eex/lib/eex.ex | sunaku/elixir | 8aa43eaedd76be8ac0d495049eb9ecd56971f4fe | [
"Apache-2.0"
] | null | null | null | lib/eex/lib/eex.ex | sunaku/elixir | 8aa43eaedd76be8ac0d495049eb9ecd56971f4fe | [
"Apache-2.0"
] | null | null | null | defmodule EEx.SyntaxError do
defexception [:message]
end
defmodule EEx do
@moduledoc ~S"""
EEx stands for Embedded Elixir. It allows you to embed
Elixir code inside a string in a robust way:
iex> EEx.eval_string "foo <%= bar %>", [bar: "baz"]
"foo baz"
## API
This module provides 3 main APIs for you to use:
1. Evaluate a string (`eval_string`) or a file (`eval_file`)
directly. This is the simplest API to use but also the
slowest, since the code is evaluated and not compiled before.
2. Define a function from a string (`function_from_string`)
or a file (`function_from_file`). This allows you to embed
the template as a function inside a module which will then
be compiled. This is the preferred API if you have access
to the template at compilation time.
3. Compile a string (`compile_string`) or a file (`compile_file`)
into Elixir syntax tree. This is the API used by both functions
above and is available to you if you want to provide your own
ways of handling the compiled template.
## Options
All functions in this module accepts EEx-related options.
They are:
* `:line` - the line to be used as the template start. Defaults to 1.
* `:file` - the file to be used in the template. Defaults to the given
file the template is read from or to "nofile" when compiling from a string.
* `:engine` - the EEx engine to be used for compilation.
## Engine
EEx has the concept of engines which allows you to modify or
transform the code extracted from the given string or file.
By default, `EEx` uses the `EEx.SmartEngine` that provides some
conveniences on top of the simple `EEx.Engine`.
### Tags
`EEx.SmartEngine` supports the following tags:
<% Elixir expression - inline with output %>
<%= Elixir expression - replace with result %>
<%% EEx quotation - returns the contents inside %>
<%# Comments - they are discarded from source %>
All expressions that output something to the template
**must** use the equals sign (`=`). Since everything in
Elixir is a macro, there are no exceptions for this rule.
For example, while some template languages would special-
case `if` clauses, they are treated the same in EEx and
also require `=` in order to have their result printed:
<%= if true do %>
It is obviously true
<% else %>
This will never appear
<% end %>
Notice that different engines may have different rules
for each tag. Other tags may be added in future versions.
### Macros
`EEx.SmartEngine` also adds some macros to your template.
An example is the `@` macro which allows easy data access
in a template:
iex> EEx.eval_string "<%= @foo %>", assigns: [foo: 1]
"1"
In other words, `<%= @foo %>` is simply translated to:
<%= Dict.get assigns, :foo %>
The assigns extension is useful when the number of variables
required by the template is not specified at compilation time.
"""
@doc """
Generates a function definition from the string.
The kind (`:def` or `:defp`) must be given, the
function name, its arguments and the compilation options.
## Examples
iex> defmodule Sample do
...> require EEx
...> EEx.function_from_string :def, :sample, "<%= a + b %>", [:a, :b]
...> end
iex> Sample.sample(1, 2)
"3"
"""
defmacro function_from_string(kind, name, source, args \\ [], options \\ []) do
quote bind_quoted: binding do
info = Keyword.merge [file: __ENV__.file, line: __ENV__.line], options
args = Enum.map args, fn arg -> {arg, [line: info[:line]], nil} end
compiled = EEx.compile_string(source, info)
case kind do
:def -> def(unquote(name)(unquote_splicing(args)), do: unquote(compiled))
:defp -> defp(unquote(name)(unquote_splicing(args)), do: unquote(compiled))
end
end
end
@doc """
Generates a function definition from the file contents.
The kind (`:def` or `:defp`) must be given, the
function name, its arguments and the compilation options.
This function is useful in case you have templates but
you want to precompile inside a module for speed.
## Examples
# sample.eex
<%= a + b %>
# sample.ex
defmodule Sample do
require EEx
EEx.function_from_file :def, :sample, "sample.eex", [:a, :b]
end
# iex
Sample.sample(1, 2) #=> "3"
"""
defmacro function_from_file(kind, name, file, args \\ [], options \\ []) do
quote bind_quoted: binding do
info = Keyword.merge options, [file: file, line: 1]
args = Enum.map args, fn arg -> {arg, [line: 1], nil} end
compiled = EEx.compile_file(file, info)
@external_resource file
@file file
case kind do
:def -> def(unquote(name)(unquote_splicing(args)), do: unquote(compiled))
:defp -> defp(unquote(name)(unquote_splicing(args)), do: unquote(compiled))
end
end
end
@doc """
Get a string `source` and generate a quoted expression
that can be evaluated by Elixir or compiled to a function.
"""
def compile_string(source, options \\ []) do
EEx.Compiler.compile(source, options)
end
@doc """
Get a `filename` and generate a quoted expression
that can be evaluated by Elixir or compiled to a function.
"""
def compile_file(filename, options \\ []) do
options = Keyword.merge options, [file: filename, line: 1]
compile_string(File.read!(filename), options)
end
@doc """
Get a string `source` and evaluate the values using the `bindings`.
## Examples
iex> EEx.eval_string "foo <%= bar %>", [bar: "baz"]
"foo baz"
"""
def eval_string(source, bindings \\ [], options \\ []) do
compiled = compile_string(source, options)
do_eval(compiled, bindings, options)
end
@doc """
Get a `filename` and evaluate the values using the `bindings`.
## Examples
# sample.ex
foo <%= bar %>
# iex
EEx.eval_file "sample.ex", [bar: "baz"] #=> "foo baz"
"""
def eval_file(filename, bindings \\ [], options \\ []) do
options = Keyword.put options, :file, filename
compiled = compile_file(filename, options)
do_eval(compiled, bindings, options)
end
### Helpers
defp do_eval(compiled, bindings, options) do
{result, _} = Code.eval_quoted(compiled, bindings, options)
result
end
end
| 29.981481 | 83 | 0.651328 |
7964e4f8815d9587767839c83f33872af2838c26 | 1,085 | ex | Elixir | lib/absinthe/schema/rule/object_interfaces_must_be_valid.ex | scrogson/absinthe | aa7e9c83dc10603c72f80e09a60d12495bc1c6b7 | [
"Unlicense"
] | 3 | 2017-06-22T16:33:58.000Z | 2021-07-07T15:21:09.000Z | lib/absinthe/schema/rule/object_interfaces_must_be_valid.ex | scrogson/absinthe | aa7e9c83dc10603c72f80e09a60d12495bc1c6b7 | [
"Unlicense"
] | null | null | null | lib/absinthe/schema/rule/object_interfaces_must_be_valid.ex | scrogson/absinthe | aa7e9c83dc10603c72f80e09a60d12495bc1c6b7 | [
"Unlicense"
] | null | null | null | defmodule Absinthe.Schema.Rule.ObjectInterfacesMustBeValid do
use Absinthe.Schema.Rule
alias Absinthe.Schema
alias Absinthe.Type
@moduledoc false
@description """
Only interfaces may be present in an Object's interface list.
Reference: https://github.com/facebook/graphql/blob/master/spec/Section%203%20--%20Type%20System.md#interfaces
"""
def explanation(%{data: %{object: obj, interface: interface}}) do
"""
Type "#{obj}" cannot implement non-interface type "#{interface}"
#{@description}
"""
end
def check(schema) do
Schema.types(schema)
|> Enum.flat_map(&check_type(schema, &1))
end
defp check_type(schema, %{interfaces: ifaces} = type) do
ifaces
|> Enum.map(&Schema.lookup_type(schema, &1))
|> Enum.reduce([], fn
nil, _ ->
raise "No type found in #{inspect ifaces}"
%Type.Interface{}, acc ->
acc
iface_type, acc ->
[report(type.__reference__.location, %{object: type.name, interface: iface_type.name}) | acc]
end)
end
defp check_type(_, _) do
[]
end
end
| 24.659091 | 112 | 0.654378 |
79651151826dfdb1c62fceb3a7fefb29571ddae5 | 642 | ex | Elixir | parkapp_server/lib/parkapp/reservations/reservation_status.ex | bitmaker-software/parkapp | 39d9fd3cb8ab6bd1d54b776a5467eccf3b65f310 | [
"MIT"
] | 2 | 2018-11-06T12:21:16.000Z | 2018-11-21T10:20:17.000Z | parkapp_server/lib/parkapp/reservations/reservation_status.ex | bitmaker-software/parkapp | 39d9fd3cb8ab6bd1d54b776a5467eccf3b65f310 | [
"MIT"
] | null | null | null | parkapp_server/lib/parkapp/reservations/reservation_status.ex | bitmaker-software/parkapp | 39d9fd3cb8ab6bd1d54b776a5467eccf3b65f310 | [
"MIT"
] | null | null | null | defmodule Parkapp.Reservations.ReservationStatus do
use Ecto.Schema
import Ecto.Changeset
schema "reservation_status" do
field :description, :string
field :name, :string
timestamps()
end
@doc false
def changeset(reservation_status, attrs) do
reservation_status
|> cast(attrs, [:name, :description, :id])
|> validate_required([:name, :description, :id])
end
end
defmodule Parkapp.Reservations.ReservationStatus.Enum do
@moduledoc """
Enum for reservation status
"""
def open(), do: 1
def in_park(), do: 2
def external_payment(), do: 4
def payment2(), do: 5
def closed(), do: 6
end
| 20.0625 | 56 | 0.688474 |
79651f626d698d94d275fb96841fcfda84e3edda | 6,988 | ex | Elixir | lib/prolly/count_min_sketch.ex | ckampfe/prolly | a43f92d64caa016ebe74b62c68cbc748e4b4a4c4 | [
"MIT"
] | 2 | 2017-06-19T23:17:30.000Z | 2021-01-08T04:01:59.000Z | lib/prolly/count_min_sketch.ex | ckampfe/prolly | a43f92d64caa016ebe74b62c68cbc748e4b4a4c4 | [
"MIT"
] | 4 | 2017-06-18T06:51:30.000Z | 2017-06-24T05:20:11.000Z | lib/prolly/count_min_sketch.ex | ckampfe/prolly | a43f92d64caa016ebe74b62c68cbc748e4b4a4c4 | [
"MIT"
] | null | null | null | defmodule Prolly.CountMinSketch do
require Vector
@moduledoc """
Use CountMinSketch when you want to count and query the
approximate number of occurences of values in a stream using sublinear memory
For example, "how many times has the string `foo` been in the stream so far?" is
a reasonable question for CountMinSketch.
A CountMinSketch will not undercount occurences, but may overcount occurences,
reporting a count that is higher than the real number of occurences for a given
value.
"""
@opaque t :: %__MODULE__{
matrix: Vector.t,
hash_fns: list((String.t -> integer)),
depth: pos_integer
}
# storing depth on the struct is an optimization so it doesn't
# have to be computed for every single update and query
defstruct [matrix: nil, hash_fns: nil, depth: 1]
@doc """
Create a CountMinSketch
## Examples
iex> require Prolly.CountMinSketch, as: Sketch
iex> Sketch.new(3, 5,
...> [fn(value) -> :crypto.hash(:sha, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:md5, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:sha256, value) |> :crypto.bytes_to_integer() end]).matrix
...> |> Enum.map(&Vector.to_list(&1))
[[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]]
"""
@spec new(pos_integer, pos_integer, list((String.t -> integer))) :: t
def new(width, depth, hash_fns) when is_integer(width) and is_integer(depth) do
matrix =
Enum.map(1..width, fn(_) ->
Vector.new(Enum.map(1..depth, fn _ -> 0 end))
end)
|> Vector.new
%__MODULE__{
matrix: matrix,
hash_fns: hash_fns,
depth: depth
}
end
@doc """
Query a sketch for the count of a given value
## Examples
iex> require Prolly.CountMinSketch, as: Sketch
iex> Sketch.new(3, 5,
...> [fn(value) -> :crypto.hash(:sha, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:md5, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:sha256, value) |> :crypto.bytes_to_integer() end])
...> |> Sketch.update("hi") |> Sketch.get_count("hi")
1
iex> require Prolly.CountMinSketch, as: Sketch
iex> sketch = Sketch.new(3, 5,
...> [fn(value) -> :crypto.hash(:sha, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:md5, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:sha256, value) |> :crypto.bytes_to_integer() end])
...> |> Sketch.update("hi")
...> |> Sketch.update("hi")
...> |> Sketch.update("hi")
iex> Sketch.get_count(sketch, "hi")
3
iex> require Prolly.CountMinSketch, as: Sketch
iex> sketch = Sketch.new(3, 5,
...> [fn(value) -> :crypto.hash(:sha, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:md5, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:sha256, value) |> :crypto.bytes_to_integer() end])
...> |> Sketch.update([77, "list"])
...> |> Sketch.update([77, "list"])
...> |> Sketch.update([77, "list"])
...> |> Sketch.update([77, "list"])
...> |> Sketch.update([77, "list"])
iex> Sketch.get_count(sketch, [77, "list"])
5
"""
@spec get_count(t, String.Chars) :: integer
def get_count(%__MODULE__{matrix: matrix, hash_fns: hash_fns, depth: depth}, value) when is_binary(value) do
hash_fns
|> Enum.with_index
|> Enum.map(fn({hash_fn, i}) ->
[i, compute_index(hash_fn, value, depth)]
end)
|> Enum.map(fn(path) ->
Kernel.get_in(matrix, path)
end)
|> Enum.min
end
def get_count(%__MODULE__{} = sketch, value) do
get_count(sketch, to_string(value))
end
@doc """
Update a sketch with a value
## Examples
iex> require Prolly.CountMinSketch, as: Sketch
iex> sketch = Sketch.new(3, 5,
...> [fn(value) -> :crypto.hash(:sha, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:md5, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:sha256, value) |> :crypto.bytes_to_integer() end])
...> |> Sketch.update("hi")
iex> sketch.matrix |> Enum.map(&Vector.to_list(&1))
[[0, 1, 0, 0, 0], [0, 0, 1, 0, 0], [0, 1, 0, 0, 0]]
iex> require Prolly.CountMinSketch, as: Sketch
iex> sketch = Sketch.new(3, 5,
...> [fn(value) -> :crypto.hash(:sha, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:md5, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:sha256, value) |> :crypto.bytes_to_integer() end])
...> |> Sketch.update(["a", "list", "of", "things"])
iex> sketch.matrix |> Enum.map(&Vector.to_list(&1))
[[0, 0, 0, 0, 1], [0, 0, 1, 0, 0], [0, 0, 1, 0, 0]]
"""
@spec update(t, String.Chars) :: t
def update(%__MODULE__{matrix: matrix, hash_fns: hash_fns, depth: depth} = sketch, value) when is_binary(value) do
new_matrix =
hash_fns
|> Enum.with_index
|> Enum.reduce(matrix, fn({hash_fn, i}, acc) ->
Kernel.update_in(
acc,
[i, compute_index(hash_fn, value, depth)],
&(&1 + 1)
)
end)
%{sketch | matrix: new_matrix}
end
def update(%__MODULE__{} = sketch, value) do
update(sketch, to_string(value))
end
@doc """
Union two sketches by cell-wise adding their counts
## Examples
iex> require Prolly.CountMinSketch, as: Sketch
iex> sketch1 = Sketch.new(3, 5,
...> [fn(value) -> :crypto.hash(:sha, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:md5, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:sha256, value) |> :crypto.bytes_to_integer() end])
...> |> Sketch.update("hi")
iex> sketch2 = Sketch.new(3, 5,
...> [fn(value) -> :crypto.hash(:sha, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:md5, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:sha256, value) |> :crypto.bytes_to_integer() end])
...> |> Sketch.update("hi")
iex> Sketch.union(sketch1, sketch2).matrix |> Enum.map(&Vector.to_list(&1))
[[0, 2, 0, 0, 0], [0, 0, 2, 0, 0], [0, 2, 0, 0, 0]]
"""
@spec union(t, t) :: t
def union(
%__MODULE__{matrix: matrix1, hash_fns: hash_fns, depth: depth} = sketch1,
%__MODULE__{matrix: matrix2} = _sketch2
) do
paths =
for w <- 0..(Enum.count(hash_fns) - 1),
d <- 0..(depth - 1), do: [w, d]
new_matrix =
Enum.reduce(paths, matrix1, fn(path, matrix) ->
Kernel.update_in(matrix, path, fn(first) ->
first + Kernel.get_in(matrix2, path)
end)
end)
%{sketch1 | matrix: new_matrix}
end
defp compute_index(hash_fn, value, k) do
hash_fn.(value) |> (fn(n) -> rem(n, k) end).()
end
end | 36.395833 | 116 | 0.586005 |
79657db71155a0125545524950810b0a991f604c | 14,170 | ex | Elixir | lib/bamboo/adapters/smtp_adapter.ex | dbii/bamboo_smtp | 8846a2631302732e6e09aacc26e286faad25e418 | [
"MIT"
] | null | null | null | lib/bamboo/adapters/smtp_adapter.ex | dbii/bamboo_smtp | 8846a2631302732e6e09aacc26e286faad25e418 | [
"MIT"
] | null | null | null | lib/bamboo/adapters/smtp_adapter.ex | dbii/bamboo_smtp | 8846a2631302732e6e09aacc26e286faad25e418 | [
"MIT"
] | null | null | null | defmodule Bamboo.SMTPAdapter do
@moduledoc """
Sends email using SMTP.
Use this adapter to send emails through SMTP. This adapter requires
that some settings are set in the config. See the example section below.
*Sensitive credentials should not be committed to source control and are best kept in environment variables.
Using `{:system, "ENV_NAME"}` configuration is read from the named environment variable at runtime.*
## Example config
# In config/config.exs, or config.prod.exs, etc.
config :my_app, MyApp.Mailer,
adapter: Bamboo.SMTPAdapter,
server: "smtp.domain",
hostname: "www.mydomain.com",
port: 1025,
username: "[email protected]", # or {:system, "SMTP_USERNAME"}
password: "pa55word", # or {:system, "SMTP_PASSWORD"}
tls: :if_available, # can be `:always` or `:never`
allowed_tls_versions: [:"tlsv1", :"tlsv1.1", :"tlsv1.2"],
# or {":system", ALLOWED_TLS_VERSIONS"} w/ comma seprated values (e.g. "tlsv1.1,tlsv1.2")
ssl: false, # can be `true`,
retries: 1,
no_mx_lookups: false, # can be `true`
auth: :if_available # can be `always`. If your smtp relay requires authentication set it to `always`.
# Define a Mailer. Maybe in lib/my_app/mailer.ex
defmodule MyApp.Mailer do
use Bamboo.Mailer, otp_app: :my_app
end
"""
@behaviour Bamboo.Adapter
require Logger
@required_configuration [:server, :port]
@default_configuration %{
tls: :if_available,
ssl: :false,
retries: 1,
transport: :gen_smtp_client,
auth: :if_available
}
@tls_versions ~w(tlsv1 tlsv1.1 tlsv1.2)
defmodule SMTPError do
@moduledoc false
defexception [:message, :raw]
def exception(raw = {reason, detail}) do
message = """
There was a problem sending the email through SMTP.
The error is #{inspect(reason)}
More detail below:
#{inspect(detail)}
"""
%SMTPError{message: message, raw: raw}
end
end
def deliver(email, config) do
gen_smtp_config =
config
|> to_gen_smtp_server_config
email
|> Bamboo.Mailer.normalize_addresses
|> to_gen_smtp_message
|> config[:transport].send_blocking(gen_smtp_config)
|> handle_response
end
@doc false
def handle_config(config) do
config
|> check_required_configuration
|> put_default_configuration
end
@doc false
def supports_attachments?, do: true
defp handle_response({:error, :no_credentials = reason}) do
raise SMTPError, {reason, "Username and password were not provided for authentication."}
end
defp handle_response({:error, reason, detail}) do
raise SMTPError, {reason, detail}
end
defp handle_response(response) do
{:ok, response}
end
defp add_bcc(body, %Bamboo.Email{bcc: recipients}) do
add_smtp_header_line(body, :bcc, format_email_as_string(recipients, :bcc))
end
defp add_cc(body, %Bamboo.Email{cc: recipients}) do
add_smtp_header_line(body, :cc, format_email_as_string(recipients, :cc))
end
defp add_custom_header(body, {key, value}) do
add_smtp_header_line(body, key, value)
end
defp add_custom_headers(body, %Bamboo.Email{headers: headers}) do
Enum.reduce(headers, body, &add_custom_header(&2, &1))
end
defp add_ending_header(body) do
add_smtp_line(body, "")
end
defp add_ending_multipart(body, delimiter) do
add_smtp_line(body, "--#{delimiter}--")
end
defp add_html_body(body, %Bamboo.Email{html_body: html_body}, _multi_part_delimiter)
when html_body == nil do
body
end
defp add_html_body(body, %Bamboo.Email{html_body: html_body}, multi_part_delimiter) do
body
|> add_multipart_delimiter(multi_part_delimiter)
|> add_smtp_header_line("Content-Type", "text/html;charset=UTF-8")
|> add_smtp_line("")
|> add_smtp_line(html_body)
end
defp add_from(body, %Bamboo.Email{from: from}) do
add_smtp_header_line(body, :from, format_email_as_string(from, :from))
end
defp add_mime_header(body) do
add_smtp_header_line(body, "MIME-Version", "1.0")
end
defp add_multipart_delimiter(body, delimiter) do
add_smtp_line(body, "--#{delimiter}")
end
defp add_multipart_header(body, delimiter) do
add_smtp_header_line(body, "Content-Type", ~s(multipart/alternative; boundary="#{delimiter}"))
end
defp add_multipart_mixed_header(body, delimiter) do
add_smtp_header_line(body, "Content-Type", ~s(multipart/mixed; boundary="#{delimiter}"))
end
defp add_smtp_header_line(body, type, content) when is_list(content) do
Enum.reduce(content, body, &add_smtp_header_line(&2, type, &1))
end
defp add_smtp_header_line(body, type, content) when is_atom(type) do
add_smtp_header_line(body, String.capitalize(to_string(type)), content)
end
defp add_smtp_header_line(body, type, content) when is_binary(type) do
add_smtp_line(body, "#{type}: #{content}")
end
defp add_smtp_line(body, content), do: body <> content <> "\r\n"
defp add_subject(body, %Bamboo.Email{subject: subject}) when is_nil(subject) do
add_smtp_header_line(body, :subject, "")
end
defp add_subject(body, %Bamboo.Email{subject: subject}) do
add_smtp_header_line(body, :subject, rfc822_encode(subject))
end
defp rfc822_encode(content) do
"=?UTF-8?B?#{Base.encode64(content)}?="
end
defp add_text_body(body, %Bamboo.Email{text_body: text_body}, _multi_part_delimiter)
when text_body == nil do
body
end
defp add_text_body(body, %Bamboo.Email{text_body: text_body}, multi_part_delimiter) do
body
|> add_multipart_delimiter(multi_part_delimiter)
|> add_smtp_header_line("Content-Type", "text/plain;charset=UTF-8")
|> add_smtp_line("")
|> add_smtp_line(text_body)
end
defp add_attachment_header(body, attachment) do
<< random :: size(32) >> = :crypto.strong_rand_bytes(4)
body
|> add_smtp_line("Content-Type: #{attachment.content_type}; name=\"#{attachment.filename}\"")
|> add_smtp_line("Content-Disposition: attachment; filename=\"#{attachment.filename}\"")
|> add_smtp_line("Content-Transfer-Encoding: base64")
|> add_smtp_line("X-Attachment-Id: #{random}")
end
defp add_attachment_body(body, data) do
data =
data
|> Base.encode64()
|> Stream.unfold(&String.split_at(&1, 76))
|> Enum.take_while(&(&1 != ""))
|> Enum.join("\r\n")
add_smtp_line(body, data)
end
defp add_attachment(nil, _), do: ""
defp add_attachment(attachment, multi_part_mixed_delimiter) do
""
|> add_multipart_delimiter(multi_part_mixed_delimiter)
|> add_attachment_header(attachment)
|> add_smtp_line("")
|> add_attachment_body(attachment.data)
end
defp add_attachments(body, %Bamboo.Email{attachments: nil}, _), do: body
defp add_attachments(body, %Bamboo.Email{attachments: attachments}, multi_part_mixed_delimiter) do
attachment_part =
attachments |> Enum.map(fn(attachment) -> add_attachment(attachment, multi_part_mixed_delimiter) end)
"#{body}#{attachment_part}"
end
defp add_to(body, %Bamboo.Email{to: recipients}) do
add_smtp_header_line(body, :to, format_email_as_string(recipients, :to))
end
defp aggregate_errors(config, key, errors) do
config
|> Map.fetch(key)
|> build_error(key, errors)
end
defp apply_default_configuration({:ok, value}, _default, config) when value != nil do
config
end
defp apply_default_configuration(_not_found_value, {key, default_value}, config) do
Map.put_new(config, key, default_value)
end
defp generate_multi_part_delimiter do
<< random1 :: size(32), random2 :: size(32), random3 :: size(32) >> = :crypto.strong_rand_bytes(12)
"----=_Part_#{random1}_#{random2}.#{random3}"
end
defp body(email = %Bamboo.Email{}) do
multi_part_delimiter = generate_multi_part_delimiter()
multi_part_mixed_delimiter = generate_multi_part_delimiter()
""
|> add_subject(email)
|> add_from(email)
|> add_bcc(email)
|> add_cc(email)
|> add_to(email)
|> add_custom_headers(email)
|> add_mime_header
|> add_multipart_mixed_header(multi_part_mixed_delimiter)
|> add_ending_header
|> add_multipart_delimiter(multi_part_mixed_delimiter)
|> add_multipart_header(multi_part_delimiter)
|> add_ending_header
|> add_text_body(email, multi_part_delimiter)
|> add_html_body(email, multi_part_delimiter)
|> add_ending_multipart(multi_part_delimiter)
|> add_attachments(email, multi_part_mixed_delimiter)
|> add_ending_multipart(multi_part_mixed_delimiter)
end
defp build_error({:ok, value}, _key, errors) when value != nil, do: errors
defp build_error(_not_found_value, key, errors) do
["Key #{key} is required for SMTP Adapter" | errors]
end
defp check_required_configuration(config) do
@required_configuration
|> Enum.reduce([], &aggregate_errors(config, &1, &2))
|> raise_on_missing_configuration(config)
end
defp format_email({nil, email}, _format), do: email
defp format_email({name, email}, true), do: "#{rfc822_encode(name)} <#{email}>"
defp format_email({_name, email}, false), do: email
defp format_email(emails, format) when is_list(emails) do
Enum.map(emails, &format_email(&1, format))
end
defp format_email(email, type, format \\ true) do
email
|> Bamboo.Formatter.format_email_address(type)
|> format_email(format)
end
defp format_email_as_string(emails) when is_list(emails) do
Enum.join(emails, ", ")
end
defp format_email_as_string(email) do
email
end
defp format_email_as_string(email, type) do
email
|> format_email(type)
|> format_email_as_string
end
defp from_without_format(%Bamboo.Email{from: from}) do
from
|> format_email(:from, false)
end
defp put_default_configuration(config) do
@default_configuration
|> Enum.reduce(config, &put_default_configuration(&2, &1))
end
defp put_default_configuration(config, default = {key, _default_value}) do
config
|> Map.fetch(key)
|> apply_default_configuration(default, config)
end
defp raise_on_missing_configuration([], config), do: config
defp raise_on_missing_configuration(errors, config) do
formatted_errors =
errors
|> Enum.map(&("* #{&1}"))
|> Enum.join("\n")
raise ArgumentError, """
The following settings have not been found in your settings:
#{formatted_errors}
They are required to make the SMTP adapter work. Here you configuration:
#{inspect config}
"""
end
defp to_without_format(email = %Bamboo.Email{}) do
email
|> Bamboo.Email.all_recipients
|> format_email(:to, false)
end
defp to_gen_smtp_message(email = %Bamboo.Email{}) do
{from_without_format(email), to_without_format(email), body(email)}
end
defp to_gen_smtp_server_config(config) do
Enum.reduce(config, [], &to_gen_smtp_server_config/2)
end
defp to_gen_smtp_server_config({:server, value}, config) when is_binary(value) do
[{:relay, value} | config]
end
defp to_gen_smtp_server_config({:username, value}, config) when is_binary(value) do
[{:username, value} | config]
end
defp to_gen_smtp_server_config({:password, value}, config) when is_binary(value) do
[{:password, value} | config]
end
defp to_gen_smtp_server_config({:tls, "if_available"}, config) do
[{:tls, :if_available} | config]
end
defp to_gen_smtp_server_config({:tls, "always"}, config) do
[{:tls, :always} | config]
end
defp to_gen_smtp_server_config({:tls, "never"}, config) do
[{:tls, :never} | config]
end
defp to_gen_smtp_server_config({:tls, value}, config) when is_atom(value) do
[{:tls, value} | config]
end
defp to_gen_smtp_server_config({:allowed_tls_versions, value}, config) when is_binary(value) do
[{:tls_options, [{:versions, string_to_tls_versions(value)}]} | config]
end
defp to_gen_smtp_server_config({:allowed_tls_versions, value}, config) when is_list(value) do
[{:tls_options, [{:versions, value}]} | config]
end
defp to_gen_smtp_server_config({:port, value}, config) when is_binary(value) do
[{:port, String.to_integer(value)} | config]
end
defp to_gen_smtp_server_config({:port, value}, config) when is_integer(value) do
[{:port, value} | config]
end
defp to_gen_smtp_server_config({:ssl, "true"}, config) do
[{:ssl, true} | config]
end
defp to_gen_smtp_server_config({:ssl, "false"}, config) do
[{:ssl, false} | config]
end
defp to_gen_smtp_server_config({:ssl, value}, config) when is_boolean(value) do
[{:ssl, value} | config]
end
defp to_gen_smtp_server_config({:retries, value}, config) when is_binary(value) do
[{:retries, String.to_integer(value)} | config]
end
defp to_gen_smtp_server_config({:retries, value}, config) when is_integer(value) do
[{:retries, value} | config]
end
defp to_gen_smtp_server_config({:hostname, value}, config) when is_binary(value) do
[{:hostname, value} | config]
end
defp to_gen_smtp_server_config({:no_mx_lookups, "true"}, config) do
[{:no_mx_lookups, true} | config]
end
defp to_gen_smtp_server_config({:no_mx_lookups, "false"}, config) do
[{:no_mx_lookups, false} | config]
end
defp to_gen_smtp_server_config({:no_mx_lookups, value}, config) when is_boolean(value) do
[{:no_mx_lookups, value} | config]
end
defp to_gen_smtp_server_config({:auth, "if_available"}, config) do
[{:auth, :if_available} | config]
end
defp to_gen_smtp_server_config({:auth, "always"}, config) do
[{:auth, :always} | config]
end
defp to_gen_smtp_server_config({:auth, value}, config) when is_atom(value) do
[{:auth, value} | config]
end
defp to_gen_smtp_server_config({conf, {:system, var}}, config) do
to_gen_smtp_server_config({conf, System.get_env(var)}, config)
end
defp to_gen_smtp_server_config({_key, _value}, config) do
config
end
defp string_to_tls_versions(version_string) do
version_string
|> String.split(",")
|> Enum.filter(&(&1 in @tls_versions))
|> Enum.map(&String.to_atom/1)
end
end
| 32.058824 | 110 | 0.698518 |
7965aec6be74a53432acb68e24719d2717592981 | 3,676 | ex | Elixir | clients/big_query_data_transfer/lib/google_api/big_query_data_transfer/v1/model/data_source_parameter.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/big_query_data_transfer/lib/google_api/big_query_data_transfer/v1/model/data_source_parameter.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/big_query_data_transfer/lib/google_api/big_query_data_transfer/v1/model/data_source_parameter.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.BigQueryDataTransfer.V1.Model.DataSourceParameter do
@moduledoc """
Represents a data source parameter with validation rules, so that parameters can be rendered in the UI. These parameters are given to us by supported data sources, and include all needed information for rendering and validation. Thus, whoever uses this api can decide to generate either generic ui, or custom data source specific forms.
## Attributes
- allowedValues (List[String]): All possible values for the parameter. Defaults to: `null`.
- description (String): Parameter description. Defaults to: `null`.
- displayName (String): Parameter display name in the user interface. Defaults to: `null`.
- fields (List[DataSourceParameter]): When parameter is a record, describes child fields. Defaults to: `null`.
- immutable (Boolean): Cannot be changed after initial creation. Defaults to: `null`.
- maxValue (Float): For integer and double values specifies maxminum allowed value. Defaults to: `null`.
- minValue (Float): For integer and double values specifies minimum allowed value. Defaults to: `null`.
- paramId (String): Parameter identifier. Defaults to: `null`.
- recurse (Boolean): If set to true, schema should be taken from the parent with the same parameter_id. Only applicable when parameter type is RECORD. Defaults to: `null`.
- repeated (Boolean): Can parameter have multiple values. Defaults to: `null`.
- required (Boolean): Is parameter required. Defaults to: `null`.
- type (String): Parameter type. Defaults to: `null`.
- Enum - one of [TYPE_UNSPECIFIED, STRING, INTEGER, DOUBLE, BOOLEAN, RECORD, PLUS_PAGE]
- validationDescription (String): Description of the requirements for this field, in case the user input does not fulfill the regex pattern or min/max values. Defaults to: `null`.
- validationHelpUrl (String): URL to a help document to further explain the naming requirements. Defaults to: `null`.
- validationRegex (String): Regular expression which can be used for parameter validation. Defaults to: `null`.
"""
defstruct [
:"allowedValues",
:"description",
:"displayName",
:"fields",
:"immutable",
:"maxValue",
:"minValue",
:"paramId",
:"recurse",
:"repeated",
:"required",
:"type",
:"validationDescription",
:"validationHelpUrl",
:"validationRegex"
]
end
defimpl Poison.Decoder, for: GoogleApi.BigQueryDataTransfer.V1.Model.DataSourceParameter do
import GoogleApi.BigQueryDataTransfer.V1.Deserializer
def decode(value, options) do
value
|> deserialize(:"fields", :list, GoogleApi.BigQueryDataTransfer.V1.Model.DataSourceParameter, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.BigQueryDataTransfer.V1.Model.DataSourceParameter do
def encode(value, options) do
GoogleApi.BigQueryDataTransfer.V1.Deserializer.serialize_non_nil(value, options)
end
end
| 47.74026 | 338 | 0.746464 |
7965d03c712c10ca4f75ed28d93e2cd8310abe30 | 2,177 | exs | Elixir | test/parser/nonvoid_elements_aliases_test.exs | mhanberg/dsl | 21e3c7e3a24fabecd436fb09271d90c3e9fb8c30 | [
"MIT"
] | null | null | null | test/parser/nonvoid_elements_aliases_test.exs | mhanberg/dsl | 21e3c7e3a24fabecd436fb09271d90c3e9fb8c30 | [
"MIT"
] | null | null | null | test/parser/nonvoid_elements_aliases_test.exs | mhanberg/dsl | 21e3c7e3a24fabecd436fb09271d90c3e9fb8c30 | [
"MIT"
] | null | null | null | defmodule Temple.Parser.NonvoidElementsAliasesTest do
use ExUnit.Case, async: true
alias Temple.Parser.NonvoidElementsAliases
alias Temple.Parser.ElementList
describe "applicable?/1" do
test "returns true when the node is a nonvoid element or alias" do
raw_asts = [
quote do
div do
"foo"
end
end,
quote do
select__ do
option do
"Label"
end
end
end
]
for raw_ast <- raw_asts do
assert NonvoidElementsAliases.applicable?(raw_ast)
end
end
test "returns false when the node is anything other than a nonvoid element or alias" do
raw_asts = [
quote do
Temple.div do
"foo"
end
end,
quote do
link to: "/the/route" do
"Label"
end
end
]
for raw_ast <- raw_asts do
refute NonvoidElementsAliases.applicable?(raw_ast)
end
end
end
describe "run/2" do
test "adds a node to the buffer" do
raw_ast =
quote do
div class: "foo", id: var do
select__ do
option do
"foo"
end
end
end
end
ast = NonvoidElementsAliases.run(raw_ast)
assert %NonvoidElementsAliases{
name: "div",
attrs: [class: "foo", id: {:var, [], _}],
children: %ElementList{
children: [
%NonvoidElementsAliases{
name: "select",
children: %ElementList{
children: [
%NonvoidElementsAliases{
name: "option",
children: %ElementList{
children: [
%Temple.Parser.Text{text: "foo"}
]
}
}
]
}
}
]
}
} = ast
end
end
end
| 24.188889 | 91 | 0.430409 |
7965eff98e1600eb07cb6c020011ca42927b8b2d | 5,283 | ex | Elixir | lib/ucx_ucc_web/coherence_messages.ex | josephkabraham/ucx_ucc | 0dbd9e3eb5940336b4870cff033482ceba5f6ee7 | [
"MIT"
] | null | null | null | lib/ucx_ucc_web/coherence_messages.ex | josephkabraham/ucx_ucc | 0dbd9e3eb5940336b4870cff033482ceba5f6ee7 | [
"MIT"
] | null | null | null | lib/ucx_ucc_web/coherence_messages.ex | josephkabraham/ucx_ucc | 0dbd9e3eb5940336b4870cff033482ceba5f6ee7 | [
"MIT"
] | null | null | null | defmodule UcxUccWeb.Coherence.Messages do
@moduledoc """
Application facing messages generated by the Coherence application.
This module was created by the coh.install mix task. It contains all the
messages used in the coherence application except those in other generated
files like the view and templates.
To assist in upgrading Coherence, the `Coherence.Messages behaviour will
alway contain every message for the current version. This will help in upgrades
to ensure the user had added new the new messages from the current version.
"""
@behaviour Coherence.Messages
import UcxUccWeb.Gettext
# Change this to override the "coherence" gettext domain. If you would like
# the coherence message to be part of your projects domain change it to "default"
@domain "coherence"
##################
# Messages
def account_already_confirmed, do: dgettext(@domain, "Account already confirmed.")
def account_is_not_locked, do: dgettext(@domain, "Account is not locked.")
def account_updated_successfully, do: dgettext(@domain, "Account updated successfully.")
def already_confirmed, do: dgettext(@domain, "already confirmed")
def already_locked, do: dgettext(@domain, "already locked")
def already_logged_in, do: dgettext(@domain, "Already logged in.")
def cant_be_blank, do: dgettext(@domain, "can't be blank")
def cant_find_that_token, do: dgettext(@domain, "Can't find that token")
def confirmation_email_sent, do: dgettext(@domain, "Confirmation email sent.")
def confirmation_token_expired, do: dgettext(@domain, "Confirmation token expired.")
def could_not_find_that_email_address, do: dgettext(@domain, "Could not find that email address")
def forgot_your_password, do: dgettext(@domain, "Forgot your password?")
def http_authentication_required, do: dgettext(@domain, "HTTP Authentication Required")
def incorrect_login_or_password(opts), do: dgettext(@domain, "Incorrect %{login_field} or password.", opts)
def invalid_current_password, do: dgettext(@domain, "invalid current password")
def invalid_invitation, do: dgettext(@domain, "Invalid Invitation. Please contact the site administrator.")
def invalid_request, do: dgettext(@domain, "Invalid Request.")
def invalid_confirmation_token, do: dgettext(@domain, "Invalid confirmation token.")
def invalid_email_or_password, do: dgettext(@domain, "Invalid email or password.")
def invalid_invitation_token, do: dgettext(@domain, "Invalid invitation token.")
def invalid_reset_token, do: dgettext(@domain, "Invalid reset token.")
def invalid_unlock_token, do: dgettext(@domain, "Invalid unlock token.")
def invitation_already_sent, do: dgettext(@domain, "Invitation already sent.")
def invitation_sent, do: dgettext(@domain, "Invitation sent.")
def invite_someone, do: dgettext(@domain, "Invite Someone")
def maximum_login_attempts_exceeded, do: dgettext(@domain, "Maximum Login attempts exceeded. Your account has been locked.")
def need_an_account, do: dgettext(@domain, "Need An Account?")
def not_locked, do: dgettext(@domain, "not locked")
def password_reset_token_expired, do: dgettext(@domain, "Password reset token expired.")
def password_updated_successfully, do: dgettext(@domain, "Password updated successfully.")
def problem_confirming_user_account, do: dgettext(@domain, "Problem confirming user account. Please contact the system administrator.")
def registration_created_successfully, do: dgettext(@domain, "Registration created successfully.")
def required, do: dgettext(@domain, "required")
def resend_confirmation_email, do: dgettext(@domain, "Resend confirmation email")
def reset_email_sent, do: dgettext(@domain, "Reset email sent. Check your email for a reset link.")
def restricted_area, do: dgettext(@domain, "Restricted Area")
def send_an_unlock_email, do: dgettext(@domain, "Send an unlock email")
def sign_in, do: dgettext(@domain, "Sign In")
def sign_out, do: dgettext(@domain, "Sign Out")
def signed_in_successfully, do: dgettext(@domain, "Signed in successfully.")
def too_many_failed_login_attempts, do: dgettext(@domain, "Too many failed login attempts. Account has been locked.")
def unauthorized_ip_address, do: dgettext(@domain, "Unauthorized IP Address")
def unlock_instructions_sent, do: dgettext(@domain, "Unlock Instructions sent.")
def user_account_confirmed_successfully, do: dgettext(@domain, "User account confirmed successfully.")
def user_already_has_an_account, do: dgettext(@domain, "User already has an account!")
def you_must_confirm_your_account, do: dgettext(@domain, "You must confirm your account before you can login.")
def your_account_has_been_unlocked, do: dgettext(@domain, "Your account has been unlocked")
def your_account_is_not_locked, do: dgettext(@domain, "Your account is not locked.")
def verify_user_token(opts),
do: dgettext(@domain, "Invalid %{user_token} error: %{error}", opts)
def you_are_using_an_invalid_security_token,
do: dgettext(@domain, "You are using an invalid security token for this site! This security\n" <>
"violation has been logged.\n")
def mailer_required, do: dgettext(@domain, "Mailer configuration required!")
def account_is_inactive(), do: dgettext(@domain, "Account is inactive!")
end
| 66.0375 | 137 | 0.767556 |
7965f2bf05ea3eb146f5fc7e2853106477775ee8 | 384 | ex | Elixir | lib/core/bot_state/supervisor.ex | bahanni/custom_rpi4 | ddefa85d30bacaae40151a63a9a0ebbf4ad30ed5 | [
"MIT"
] | null | null | null | lib/core/bot_state/supervisor.ex | bahanni/custom_rpi4 | ddefa85d30bacaae40151a63a9a0ebbf4ad30ed5 | [
"MIT"
] | null | null | null | lib/core/bot_state/supervisor.ex | bahanni/custom_rpi4 | ddefa85d30bacaae40151a63a9a0ebbf4ad30ed5 | [
"MIT"
] | null | null | null | defmodule FarmbotOS.BotState.Supervisor do
use Supervisor
def start_link(args) do
Supervisor.start_link(__MODULE__, args, name: __MODULE__)
end
def init([]) do
Supervisor.init(children(), strategy: :one_for_all)
end
def children,
do: [
FarmbotOS.BotState,
FarmbotOS.BotState.FileSystem,
FarmbotOS.BotState.SchedulerUsageReporter
]
end
| 20.210526 | 61 | 0.710938 |
796604c27506530689895dcd2d3e0872610ec5e6 | 1,228 | exs | Elixir | mix.exs | regularfellow/downstream | 436de5d57eabc58e60e1bf43c3dab78170dbe8e3 | [
"MIT"
] | null | null | null | mix.exs | regularfellow/downstream | 436de5d57eabc58e60e1bf43c3dab78170dbe8e3 | [
"MIT"
] | null | null | null | mix.exs | regularfellow/downstream | 436de5d57eabc58e60e1bf43c3dab78170dbe8e3 | [
"MIT"
] | null | null | null | defmodule Downstream.MixProject do
use Mix.Project
def project do
[
app: :downstream,
deps: deps(),
docs: docs(),
description: "An Elixir Client for Streaming Downloads",
elixir: "~> 1.11",
name: "Downstream",
package: package(),
preferred_cli_env: ["coveralls.html": :test],
start_permanent: Mix.env() == :prod,
source_url: "https://github.com/mpiercy827/downstream",
test_coverage: [tool: ExCoveralls],
version: "1.0.0"
]
end
def application do
[
applications: [:httpoison]
]
end
defp deps do
[
{:credo, "~> 1.0", only: [:dev, :test], runtime: false},
{:dialyxir, "~> 1.0", only: [:dev, :test], runtime: false},
{:excoveralls, "~> 0.10", only: [:test]},
{:ex_doc, "~> 0.19", only: :dev, runtime: false},
{:httpoison, "~> 1.7"},
{:mimic, "~> 0.2", only: :test}
]
end
defp docs do
[
main: "Downstream"
]
end
defp package do
[
files: ["lib", "LICENSE", "mix.exs", "README.md"],
maintainers: ["Matt Piercy"],
licenses: ["MIT"],
links: %{
"github" => "https://github.com/mpiercy827/downstream"
}
]
end
end
| 22.327273 | 65 | 0.534202 |
79660a473714fc4fa40abbeeefe56c01915cb9ee | 16,066 | ex | Elixir | lib/aws/generated/sfn.ex | smanolloff/aws-elixir | c7cb6577802f5010be7e7b6ccb2c0f3c8c73ea84 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/sfn.ex | smanolloff/aws-elixir | c7cb6577802f5010be7e7b6ccb2c0f3c8c73ea84 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/sfn.ex | smanolloff/aws-elixir | c7cb6577802f5010be7e7b6ccb2c0f3c8c73ea84 | [
"Apache-2.0"
] | null | null | null | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.SFN do
@moduledoc """
AWS Step Functions
AWS Step Functions is a service that lets you coordinate the components of
distributed applications and microservices using visual workflows.
You can use Step Functions to build applications from individual components,
each of which performs a discrete function, or *task*, allowing you to scale and
change applications quickly. Step Functions provides a console that helps
visualize the components of your application as a series of steps. Step
Functions automatically triggers and tracks each step, and retries steps when
there are errors, so your application executes predictably and in the right
order every time. Step Functions logs the state of each step, so you can quickly
diagnose and debug any issues.
Step Functions manages operations and underlying infrastructure to ensure your
application is available at any scale. You can run tasks on AWS, your own
servers, or any system that has access to AWS. You can access and use Step
Functions using the console, the AWS SDKs, or an HTTP API. For more information
about Step Functions, see the * [AWS Step Functions Developer Guide](https://docs.aws.amazon.com/step-functions/latest/dg/welcome.html) *.
"""
@doc """
Creates an activity.
An activity is a task that you write in any programming language and host on any
machine that has access to AWS Step Functions. Activities must poll Step
Functions using the `GetActivityTask` API action and respond using `SendTask*`
API actions. This function lets Step Functions know the existence of your
activity and returns an identifier for use in a state machine and when polling
from the activity.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
`CreateActivity` is an idempotent API. Subsequent requests won’t create a
duplicate resource if it was already created. `CreateActivity`'s idempotency
check is based on the activity `name`. If a following request has different
`tags` values, Step Functions will ignore these differences and treat it as an
idempotent request of the previous. In this case, `tags` will not be updated,
even if they are different.
"""
def create_activity(client, input, options \\ []) do
request(client, "CreateActivity", input, options)
end
@doc """
Creates a state machine.
A state machine consists of a collection of states that can do work (`Task`
states), determine to which states to transition next (`Choice` states), stop an
execution with an error (`Fail` states), and so on. State machines are specified
using a JSON-based, structured language. For more information, see [Amazon States
Language](https://docs.aws.amazon.com/step-functions/latest/dg/concepts-amazon-states-language.html)
in the AWS Step Functions User Guide.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
`CreateStateMachine` is an idempotent API. Subsequent requests won’t create a
duplicate resource if it was already created. `CreateStateMachine`'s idempotency
check is based on the state machine `name`, `definition`, `type`,
`LoggingConfiguration` and `TracingConfiguration`. If a following request has a
different `roleArn` or `tags`, Step Functions will ignore these differences and
treat it as an idempotent request of the previous. In this case, `roleArn` and
`tags` will not be updated, even if they are different.
"""
def create_state_machine(client, input, options \\ []) do
request(client, "CreateStateMachine", input, options)
end
@doc """
Deletes an activity.
"""
def delete_activity(client, input, options \\ []) do
request(client, "DeleteActivity", input, options)
end
@doc """
Deletes a state machine.
This is an asynchronous operation: It sets the state machine's status to
`DELETING` and begins the deletion process.
For `EXPRESS`state machines, the deletion will happen eventually (usually less
than a minute). Running executions may emit logs after `DeleteStateMachine` API
is called.
"""
def delete_state_machine(client, input, options \\ []) do
request(client, "DeleteStateMachine", input, options)
end
@doc """
Describes an activity.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
"""
def describe_activity(client, input, options \\ []) do
request(client, "DescribeActivity", input, options)
end
@doc """
Describes an execution.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
This API action is not supported by `EXPRESS` state machines.
"""
def describe_execution(client, input, options \\ []) do
request(client, "DescribeExecution", input, options)
end
@doc """
Describes a state machine.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
"""
def describe_state_machine(client, input, options \\ []) do
request(client, "DescribeStateMachine", input, options)
end
@doc """
Describes the state machine associated with a specific execution.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
This API action is not supported by `EXPRESS` state machines.
"""
def describe_state_machine_for_execution(client, input, options \\ []) do
request(client, "DescribeStateMachineForExecution", input, options)
end
@doc """
Used by workers to retrieve a task (with the specified activity ARN) which has
been scheduled for execution by a running state machine.
This initiates a long poll, where the service holds the HTTP connection open and
responds as soon as a task becomes available (i.e. an execution of a task of
this type is needed.) The maximum time the service holds on to the request
before responding is 60 seconds. If no task is available within 60 seconds, the
poll returns a `taskToken` with a null string.
Workers should set their client side socket timeout to at least 65 seconds (5
seconds higher than the maximum time the service may hold the poll request).
Polling with `GetActivityTask` can cause latency in some implementations. See
[Avoid Latency When Polling for Activity Tasks](https://docs.aws.amazon.com/step-functions/latest/dg/bp-activity-pollers.html)
in the Step Functions Developer Guide.
"""
def get_activity_task(client, input, options \\ []) do
request(client, "GetActivityTask", input, options)
end
@doc """
Returns the history of the specified execution as a list of events.
By default, the results are returned in ascending order of the `timeStamp` of
the events. Use the `reverseOrder` parameter to get the latest events first.
If `nextToken` is returned, there are more results available. The value of
`nextToken` is a unique pagination token for each page. Make the call again
using the returned token to retrieve the next page. Keep all other arguments
unchanged. Each pagination token expires after 24 hours. Using an expired
pagination token will return an *HTTP 400 InvalidToken* error.
This API action is not supported by `EXPRESS` state machines.
"""
def get_execution_history(client, input, options \\ []) do
request(client, "GetExecutionHistory", input, options)
end
@doc """
Lists the existing activities.
If `nextToken` is returned, there are more results available. The value of
`nextToken` is a unique pagination token for each page. Make the call again
using the returned token to retrieve the next page. Keep all other arguments
unchanged. Each pagination token expires after 24 hours. Using an expired
pagination token will return an *HTTP 400 InvalidToken* error.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
"""
def list_activities(client, input, options \\ []) do
request(client, "ListActivities", input, options)
end
@doc """
Lists the executions of a state machine that meet the filtering criteria.
Results are sorted by time, with the most recent execution first.
If `nextToken` is returned, there are more results available. The value of
`nextToken` is a unique pagination token for each page. Make the call again
using the returned token to retrieve the next page. Keep all other arguments
unchanged. Each pagination token expires after 24 hours. Using an expired
pagination token will return an *HTTP 400 InvalidToken* error.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
This API action is not supported by `EXPRESS` state machines.
"""
def list_executions(client, input, options \\ []) do
request(client, "ListExecutions", input, options)
end
@doc """
Lists the existing state machines.
If `nextToken` is returned, there are more results available. The value of
`nextToken` is a unique pagination token for each page. Make the call again
using the returned token to retrieve the next page. Keep all other arguments
unchanged. Each pagination token expires after 24 hours. Using an expired
pagination token will return an *HTTP 400 InvalidToken* error.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
"""
def list_state_machines(client, input, options \\ []) do
request(client, "ListStateMachines", input, options)
end
@doc """
List tags for a given resource.
Tags may only contain Unicode letters, digits, white space, or these symbols: `_
. : / = + - @`.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Used by activity workers and task states using the
[callback](https://docs.aws.amazon.com/step-functions/latest/dg/connect-to-resource.html#connect-wait-token)
pattern to report that the task identified by the `taskToken` failed.
"""
def send_task_failure(client, input, options \\ []) do
request(client, "SendTaskFailure", input, options)
end
@doc """
Used by activity workers and task states using the
[callback](https://docs.aws.amazon.com/step-functions/latest/dg/connect-to-resource.html#connect-wait-token) pattern to report to Step Functions that the task represented by the specified
`taskToken` is still making progress.
This action resets the `Heartbeat` clock. The `Heartbeat` threshold is specified
in the state machine's Amazon States Language definition (`HeartbeatSeconds`).
This action does not in itself create an event in the execution history.
However, if the task times out, the execution history contains an
`ActivityTimedOut` entry for activities, or a `TaskTimedOut` entry for for tasks
using the [job
run](https://docs.aws.amazon.com/step-functions/latest/dg/connect-to-resource.html#connect-sync)
or
[callback](https://docs.aws.amazon.com/step-functions/latest/dg/connect-to-resource.html#connect-wait-token)
pattern.
The `Timeout` of a task, defined in the state machine's Amazon States Language
definition, is its maximum allowed duration, regardless of the number of
`SendTaskHeartbeat` requests received. Use `HeartbeatSeconds` to configure the
timeout interval for heartbeats.
"""
def send_task_heartbeat(client, input, options \\ []) do
request(client, "SendTaskHeartbeat", input, options)
end
@doc """
Used by activity workers and task states using the
[callback](https://docs.aws.amazon.com/step-functions/latest/dg/connect-to-resource.html#connect-wait-token)
pattern to report that the task identified by the `taskToken` completed
successfully.
"""
def send_task_success(client, input, options \\ []) do
request(client, "SendTaskSuccess", input, options)
end
@doc """
Starts a state machine execution.
`StartExecution` is idempotent. If `StartExecution` is called with the same name
and input as a running execution, the call will succeed and return the same
response as the original request. If the execution is closed or if the input is
different, it will return a 400 `ExecutionAlreadyExists` error. Names can be
reused after 90 days.
"""
def start_execution(client, input, options \\ []) do
request(client, "StartExecution", input, options)
end
@doc """
Stops an execution.
This API action is not supported by `EXPRESS` state machines.
"""
def stop_execution(client, input, options \\ []) do
request(client, "StopExecution", input, options)
end
@doc """
Add a tag to a Step Functions resource.
An array of key-value pairs. For more information, see [Using Cost Allocation Tags](https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/cost-alloc-tags.html)
in the *AWS Billing and Cost Management User Guide*, and [Controlling Access Using IAM
Tags](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_iam-tags.html).
Tags may only contain Unicode letters, digits, white space, or these symbols: `_
. : / = + - @`.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Remove a tag from a Step Functions resource
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Updates an existing state machine by modifying its `definition`, `roleArn`, or
`loggingConfiguration`.
Running executions will continue to use the previous `definition` and `roleArn`.
You must include at least one of `definition` or `roleArn` or you will receive a
`MissingRequiredParameter` error.
All `StartExecution` calls within a few seconds will use the updated
`definition` and `roleArn`. Executions started immediately after calling
`UpdateStateMachine` may use the previous state machine `definition` and
`roleArn`.
"""
def update_state_machine(client, input, options \\ []) do
request(client, "UpdateStateMachine", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "states"}
host = build_host("states", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.0"},
{"X-Amz-Target", "AWSStepFunctions.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
| 40.165 | 189 | 0.733661 |
79661ef83bf25f7583a2a4469bc251de70c40ca1 | 1,475 | ex | Elixir | deps/plug/lib/plug/session/store.ex | JoakimEskils/elixir-absinthe | d81e24ec7c7b1164e6d152101dd50422f192d7e9 | [
"MIT"
] | null | null | null | deps/plug/lib/plug/session/store.ex | JoakimEskils/elixir-absinthe | d81e24ec7c7b1164e6d152101dd50422f192d7e9 | [
"MIT"
] | null | null | null | deps/plug/lib/plug/session/store.ex | JoakimEskils/elixir-absinthe | d81e24ec7c7b1164e6d152101dd50422f192d7e9 | [
"MIT"
] | null | null | null | defmodule Plug.Session.Store do
@moduledoc """
Specification for session stores.
"""
@typedoc """
The internal reference to the session in the store.
"""
@type sid :: term | nil
@typedoc """
The cookie value that will be sent in cookie headers. This value should be
base64 encoded to avoid security issues.
"""
@type cookie :: binary
@typedoc """
The session contents, the final data to be stored after it has been built
with `Plug.Conn.put_session/3` and the other session manipulating functions.
"""
@type session :: map
@doc """
Initializes the store.
The options returned from this function will be given
to `get/3`, `put/4` and `delete/3`.
"""
@callback init(Plug.opts) :: Plug.opts
@doc """
Parses the given cookie.
Returns a session id and the session contents. The session id is any
value that can be used to identify the session by the store.
The session id may be nil in case the cookie does not identify any
value in the store. The session contents must be a map.
"""
@callback get(Plug.Conn.t, cookie, Plug.opts) :: {sid, session}
@doc """
Stores the session associated with given session id.
If `nil` is given as id, a new session id should be
generated and returned.
"""
@callback put(Plug.Conn.t, sid, any, Plug.opts) :: cookie
@doc """
Removes the session associated with given session id from the store.
"""
@callback delete(Plug.Conn.t, sid, Plug.opts) :: :ok
end
| 26.818182 | 78 | 0.686102 |
7966303254f3d1f6f6453846cb51bf13c6fd5e72 | 699 | ex | Elixir | lib/rockelivery/orders/validate_and_mutiply_items.ex | riosvictor/rockelivery | d34c8ccd76f95bb5bc8131f8ef1fb9111f554ebb | [
"MIT"
] | 1 | 2022-03-16T20:41:29.000Z | 2022-03-16T20:41:29.000Z | lib/rockelivery/orders/validate_and_mutiply_items.ex | riosvictor/rockelivery | d34c8ccd76f95bb5bc8131f8ef1fb9111f554ebb | [
"MIT"
] | null | null | null | lib/rockelivery/orders/validate_and_mutiply_items.ex | riosvictor/rockelivery | d34c8ccd76f95bb5bc8131f8ef1fb9111f554ebb | [
"MIT"
] | null | null | null | defmodule Rockelivery.Orders.ValidateAndMutiplyItems do
def call(items, items_ids, items_params) do
items_map = Map.new(items, fn item -> {item.id, item} end)
items_ids
|> Enum.map(fn id -> {id, Map.get(items_map, id)} end)
|> Enum.any?(fn {_id, value} -> is_nil(value) end)
|> multiply_items(items_map, items_params)
end
defp multiply_items(true, _items, _items_params), do: {:error, "Invalid ids!"}
defp multiply_items(false, items, items_params) do
items =
Enum.reduce(items_params, [], fn %{"id" => id, "quantity" => quantity}, acc ->
item = Map.get(items, id)
acc ++ List.duplicate(item, quantity)
end)
{:ok, items}
end
end
| 30.391304 | 84 | 0.642346 |
79666d11c23707f8c377ccdd9160d80a443a59ca | 76 | exs | Elixir | test/views/page_view_test.exs | retrixer/retrixer | 5efeb9cd51fdf728cb06a4be5678ca824dc50568 | [
"MIT"
] | 3 | 2015-11-11T11:49:02.000Z | 2015-11-12T18:20:43.000Z | test/views/page_view_test.exs | retrixer/retrixer | 5efeb9cd51fdf728cb06a4be5678ca824dc50568 | [
"MIT"
] | null | null | null | test/views/page_view_test.exs | retrixer/retrixer | 5efeb9cd51fdf728cb06a4be5678ca824dc50568 | [
"MIT"
] | null | null | null | defmodule Retrixer.PageViewTest do
use Retrixer.ConnCase, async: true
end
| 19 | 36 | 0.815789 |
7966868f396c0d0fc9aba8511ba4d1ec2eb9850a | 271 | exs | Elixir | examples/phx_server/config/test.exs | hrzndhrn/json_rpc | c9a1d70b92abbf59260be6394bb5ecc41229a599 | [
"MIT"
] | null | null | null | examples/phx_server/config/test.exs | hrzndhrn/json_rpc | c9a1d70b92abbf59260be6394bb5ecc41229a599 | [
"MIT"
] | null | null | null | examples/phx_server/config/test.exs | hrzndhrn/json_rpc | c9a1d70b92abbf59260be6394bb5ecc41229a599 | [
"MIT"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :phx_server, PhxServerWeb.Endpoint,
http: [port: 4002],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
| 24.636364 | 56 | 0.741697 |
7966a6226487a62b27fdf8275ed2ae5c9aeb5584 | 1,425 | ex | Elixir | lib/flawless/types/datetime.ex | colinsmetz/flawless | 87fd76f88758f16b42813b1e0f0f0ee8163d9185 | [
"MIT"
] | null | null | null | lib/flawless/types/datetime.ex | colinsmetz/flawless | 87fd76f88758f16b42813b1e0f0f0ee8163d9185 | [
"MIT"
] | null | null | null | lib/flawless/types/datetime.ex | colinsmetz/flawless | 87fd76f88758f16b42813b1e0f0f0ee8163d9185 | [
"MIT"
] | null | null | null | defmodule Flawless.Types.DateTime do
alias Flawless.Helpers
alias Flawless.Rule
def datetime(opts \\ []) do
Helpers.opaque_struct_type(
DateTime,
opts,
converter: &cast_from/2,
shortcut_rules: [
after: &after_datetime/1,
before: &before_datetime/1,
between: &between_datetimes/2
]
)
end
def after_datetime(datetime) do
Rule.rule(
fn value -> DateTime.compare(value, datetime) in [:gt, :eq] end,
"The datetime should be later than #{datetime}."
)
end
def before_datetime(datetime) do
Rule.rule(
fn value -> DateTime.compare(value, datetime) in [:lt, :eq] end,
"The datetime should be earlier than #{datetime}."
)
end
def between_datetimes(datetime1, datetime2) do
Rule.rule(
fn value ->
DateTime.compare(value, datetime1) in [:gt, :eq] and
DateTime.compare(value, datetime2) in [:lt, :eq]
end,
"The datetime should be comprised between #{datetime1} and #{datetime2}."
)
end
defp cast_from(value, :string) do
value
|> DateTime.from_iso8601()
|> case do
{:ok, datetime, _} -> {:ok, datetime}
_ -> :error
end
end
defp cast_from(value, :integer) do
value
|> DateTime.from_unix()
|> case do
{:ok, datetime} -> {:ok, datetime}
_ -> :error
end
end
defp cast_from(_value, _type), do: :error
end
| 22.983871 | 79 | 0.607719 |
7966c61c407d768bc1d1167531f615ce2adb2c86 | 274 | ex | Elixir | sample_application/lib/release_tasks.ex | Yamilquery/kaufmann_ex | 5158ae8f524f8780647766ff35db88ebd761da29 | [
"MIT"
] | 84 | 2018-03-20T08:19:10.000Z | 2022-01-30T07:40:56.000Z | sample_application/lib/release_tasks.ex | Yamilquery/kaufmann_ex | 5158ae8f524f8780647766ff35db88ebd761da29 | [
"MIT"
] | 23 | 2018-03-29T15:15:56.000Z | 2019-12-04T14:53:57.000Z | sample_application/lib/release_tasks.ex | Yamilquery/kaufmann_ex | 5158ae8f524f8780647766ff35db88ebd761da29 | [
"MIT"
] | 8 | 2018-07-03T18:18:27.000Z | 2022-03-08T14:04:09.000Z | defmodule Sample.ReleaseTasks do
def migrate_schemas do
Application.load(:kaufmann_ex)
KaufmannEx.ReleaseTasks.migrate_schemas(:sample)
end
def reinit_service do
Application.load(:kaufmann_ex)
KaufmannEx.ReleaseTasks.reinit_service(:sample)
end
end
| 22.833333 | 52 | 0.781022 |
7966d9b89eee606767c8d2f7d6b34270774a0711 | 5,075 | ex | Elixir | lib/thrift/generator/binary/framed/server.ex | pguillory/elixir-thrift | adff91ece6c303db3fd5cfc641fd43249d7c1dc9 | [
"Apache-2.0"
] | null | null | null | lib/thrift/generator/binary/framed/server.ex | pguillory/elixir-thrift | adff91ece6c303db3fd5cfc641fd43249d7c1dc9 | [
"Apache-2.0"
] | null | null | null | lib/thrift/generator/binary/framed/server.ex | pguillory/elixir-thrift | adff91ece6c303db3fd5cfc641fd43249d7c1dc9 | [
"Apache-2.0"
] | null | null | null | defmodule Thrift.Generator.Binary.Framed.Server do
@moduledoc false
alias Thrift.AST.Function
alias Thrift.Generator.{
Service,
Utils
}
alias Thrift.Parser.FileGroup
def generate(service_module, service, file_group) do
functions =
service.functions
|> Map.values()
|> Enum.map(&generate_handler_function(file_group, service_module, &1))
quote do
defmodule Binary.Framed.Server do
@moduledoc false
require Logger
alias Thrift.Binary.Framed.Server, as: ServerImpl
defdelegate stop(name), to: ServerImpl
def start_link(handler_module, port, opts \\ []) do
ServerImpl.start_link(__MODULE__, port, handler_module, opts)
end
unquote_splicing(functions)
def handle_thrift(method, _binary_data, _handler_module) do
error =
Thrift.TApplicationException.exception(
type: :unknown_method,
message: "Unknown method: #{method}"
)
{:client_error, error}
end
end
end
end
def generate_handler_function(file_group, service_module, %Function{params: []} = function) do
fn_name = Atom.to_string(function.name)
handler_fn_name = Utils.underscore(function.name)
response_module = Module.concat(service_module, Service.module_name(function, :response))
handler_args = []
body = build_responder(function.return_type, handler_fn_name, handler_args, response_module)
handler = wrap_with_try_catch(body, function, file_group, response_module)
quote do
def handle_thrift(unquote(fn_name), _binary_data, handler_module) do
unquote(handler)
end
end
end
def generate_handler_function(file_group, service_module, function) do
fn_name = Atom.to_string(function.name)
args_module = Module.concat(service_module, Service.module_name(function, :args))
response_module = Module.concat(service_module, Service.module_name(function, :response))
struct_matches =
Enum.map(function.params, fn param ->
{param.name, Macro.var(param.name, nil)}
end)
quote do
def handle_thrift(unquote(fn_name), binary_data, handler_module) do
case unquote(Module.concat(args_module, BinaryProtocol)).deserialize(binary_data) do
{%unquote(args_module){unquote_splicing(struct_matches)}, ""} ->
unquote(build_handler_call(file_group, function, response_module))
{_, extra} ->
raise Thrift.TApplicationException,
type: :protocol_error,
message: "Could not decode #{inspect(extra)}"
end
end
end
end
defp build_handler_call(file_group, function, response_module) do
handler_fn_name = Utils.underscore(function.name)
handler_args = Enum.map(function.params, &Macro.var(&1.name, nil))
body = build_responder(function.return_type, handler_fn_name, handler_args, response_module)
wrap_with_try_catch(body, function, file_group, response_module)
end
defp wrap_with_try_catch(body, function, file_group, response_module) do
# Quoted clauses for exception types defined by the schema.
exception_clauses =
Enum.flat_map(function.exceptions, fn
exc ->
resolved = FileGroup.resolve(file_group, exc)
dest_module = FileGroup.dest_module(file_group, resolved.type)
error_var = Macro.var(exc.name, nil)
field_setter = quote do: {unquote(exc.name), unquote(error_var)}
quote do
:error, %unquote(dest_module){} = unquote(error_var) ->
response = %unquote(response_module){unquote(field_setter)}
{:reply,
unquote(Module.concat(response_module, BinaryProtocol)).serialize(response)}
end
end)
# Quoted clauses for our standard catch clauses (common to all functions).
catch_clauses =
quote do
kind, reason ->
formatted_exception = Exception.format(kind, reason, System.stacktrace())
Logger.error("Exception not defined in thrift spec was thrown: #{formatted_exception}")
error =
Thrift.TApplicationException.exception(
type: :internal_error,
message: "Server error: #{formatted_exception}"
)
{:server_error, error}
end
quote do
try do
unquote(body)
catch
unquote(Enum.concat(exception_clauses, catch_clauses))
end
end
end
defp build_responder(:void, handler_fn_name, handler_args, _response_module) do
quote do
_result = handler_module.unquote(handler_fn_name)(unquote_splicing(handler_args))
:noreply
end
end
defp build_responder(_, handler_fn_name, handler_args, response_module) do
quote do
result = handler_module.unquote(handler_fn_name)(unquote_splicing(handler_args))
response = %unquote(response_module){success: result}
{:reply, unquote(Module.concat(response_module, BinaryProtocol)).serialize(response)}
end
end
end
| 33.609272 | 97 | 0.677438 |
7966fc2f293a66f859383167e083ca2f889b66e5 | 1,089 | ex | Elixir | lib/bitcoin/block/compact_bits.ex | coinscript/bitcoinsv-elixir | 2dda03c81edc5662743ed2922abb5b1910d9c09a | [
"Apache-2.0"
] | 2 | 2019-08-12T04:53:57.000Z | 2019-09-03T03:47:33.000Z | lib/bitcoin/block/compact_bits.ex | coinscript/bitcoinsv-elixir | 2dda03c81edc5662743ed2922abb5b1910d9c09a | [
"Apache-2.0"
] | null | null | null | lib/bitcoin/block/compact_bits.ex | coinscript/bitcoinsv-elixir | 2dda03c81edc5662743ed2922abb5b1910d9c09a | [
"Apache-2.0"
] | null | null | null | defmodule Bitcoin.Block.CompactBits do
@moduledoc """
256-bit target threshold in the block header is encoded in a 32-bit space (compact bits a.k.a nBits).
This module provides functions to encode and decode compact bits.
Simple and understandable explanation:
http://bitcoin.stackexchange.com/questions/2924/how-to-calculate-new-bits-value
"""
import Binary
@doc """
Encode target integer to compact bits integer
"""
@spec encode(integer) :: integer
def encode(int), do: int |> Binary.from_integer() |> encode_digits
@doc """
Docede compact bits integer to target integer
"""
@spec decode(integer) :: integer
def decode(int) do
{<<num>>, digits} = int |> Binary.from_integer() |> split_at(1)
digits
|> trim_trailing
|> pad_trailing(num)
|> to_integer
end
defp encode_digits(<<first, _::binary>> = digits) when first > 0x7F,
do: digits |> prepend(0) |> encode_digits
defp encode_digits(digits) do
digits
|> prepend(digits |> byte_size)
|> take(4)
|> pad_trailing(4)
|> to_integer
end
end
| 25.928571 | 105 | 0.672176 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.