hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9e2f8713940e4556ef84509dd9ed5988e094455d | 2,562 | ex | Elixir | turtle_pose_phoenix/lib/turtle_pose_phoenix_web.ex | rclex/rclex_examples | 1ad49e9d917c015161992836ad8d4e8d028615da | [
"Apache-2.0"
] | null | null | null | turtle_pose_phoenix/lib/turtle_pose_phoenix_web.ex | rclex/rclex_examples | 1ad49e9d917c015161992836ad8d4e8d028615da | [
"Apache-2.0"
] | 4 | 2022-01-25T00:53:34.000Z | 2022-03-26T15:28:18.000Z | turtle_pose_phoenix/lib/turtle_pose_phoenix_web.ex | rclex/rclex_examples | 1ad49e9d917c015161992836ad8d4e8d028615da | [
"Apache-2.0"
] | null | null | null | defmodule TurtlePosePhoenixWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use TurtlePosePhoenixWeb, :controller
use TurtlePosePhoenixWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: TurtlePosePhoenixWeb
import Plug.Conn
import TurtlePosePhoenixWeb.Gettext
alias TurtlePosePhoenixWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/turtle_pose_phoenix_web/templates",
namespace: TurtlePosePhoenixWeb
# Import convenience functions from controllers
import Phoenix.Controller,
only: [get_flash: 1, get_flash: 2, view_module: 1, view_template: 1]
# Include shared imports and aliases for views
unquote(view_helpers())
end
end
def live_view do
quote do
use Phoenix.LiveView,
layout: {TurtlePosePhoenixWeb.LayoutView, "live.html"}
unquote(view_helpers())
end
end
def live_component do
quote do
use Phoenix.LiveComponent
unquote(view_helpers())
end
end
def component do
quote do
use Phoenix.Component
unquote(view_helpers())
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
import Phoenix.LiveView.Router
end
end
def channel do
quote do
use Phoenix.Channel
import TurtlePosePhoenixWeb.Gettext
end
end
defp view_helpers do
quote do
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
# Import LiveView and .heex helpers (live_render, live_patch, <.form>, etc)
import Phoenix.LiveView.Helpers
# Import basic rendering functionality (render, render_layout, etc)
import Phoenix.View
import TurtlePosePhoenixWeb.ErrorHelpers
import TurtlePosePhoenixWeb.Gettext
alias TurtlePosePhoenixWeb.Router.Helpers, as: Routes
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 23.081081 | 81 | 0.693599 |
9e2f8bffc393f9ba76e52e6ada80144e9e2137f1 | 21,933 | ex | Elixir | kousa/lib/broth/socket_handler.ex | Semro/dogehouse | da6ee5296b9d0ae2a55413f3783e9b2f8bd025c9 | [
"MIT"
] | null | null | null | kousa/lib/broth/socket_handler.ex | Semro/dogehouse | da6ee5296b9d0ae2a55413f3783e9b2f8bd025c9 | [
"MIT"
] | null | null | null | kousa/lib/broth/socket_handler.ex | Semro/dogehouse | da6ee5296b9d0ae2a55413f3783e9b2f8bd025c9 | [
"MIT"
] | null | null | null | defmodule Broth.SocketHandler do
require Logger
alias Kousa.Utils.RegUtils
alias Beef.Users
alias Beef.Rooms
alias Beef.Follows
alias Ecto.UUID
alias Beef.RoomPermissions
# TODO: just collapse this into its parent module.
defmodule State do
@type t :: %__MODULE__{
awaiting_init: boolean(),
user_id: String.t(),
encoding: atom(),
compression: String.t()
}
defstruct awaiting_init: true,
user_id: nil,
platform: nil,
encoding: nil,
compression: nil
end
@behaviour :cowboy_websocket
def init(request, _state) do
compression =
request
|> :cowboy_req.parse_qs()
|> Enum.find(fn {name, _value} -> name == "compression" end)
|> case do
{_name, "zlib_json"} -> :zlib
{_name, "zlib"} -> :zlib
_ -> :json
end
encoding =
request
|> :cowboy_req.parse_qs()
|> Enum.find(fn {name, _value} -> name == "encoding" end)
|> case do
{_name, "etf"} -> :etf
_ -> :json
end
state = %State{
awaiting_init: true,
user_id: nil,
encoding: encoding,
compression: compression
}
{:cowboy_websocket, request, state}
end
def websocket_init(state) do
Process.send_after(self(), {:finish_awaiting}, 10_000)
{:ok, state}
end
def websocket_info({:finish_awaiting}, state) do
if state.awaiting_init do
{:stop, state}
else
{:ok, state}
end
end
def websocket_info({:remote_send, message}, state) do
{:reply, construct_socket_msg(state.encoding, state.compression, message), state}
end
# needed for Task.async not to crash things
def websocket_info({:EXIT, _, _}, state) do
{:ok, state}
end
def websocket_info({:send_to_linked_session, message}, state) do
send(state.linked_session, message)
{:ok, state}
end
def websocket_info({:kill}, state) do
{:reply, {:close, 4003, "killed_by_server"}, state}
end
def websocket_handle({:text, "ping"}, state) do
{:reply, construct_socket_msg(state.encoding, state.compression, "pong"), state}
end
def websocket_handle({:ping, _}, state) do
{:reply, construct_socket_msg(state.encoding, state.compression, "pong"), state}
end
def websocket_handle({:text, json}, state) do
with {:ok, json} <- Poison.decode(json) do
case json["op"] do
"auth" ->
%{
"accessToken" => accessToken,
"refreshToken" => refreshToken,
"platform" => platform,
"reconnectToVoice" => reconnectToVoice,
"muted" => muted
} = json["d"]
case Kousa.Utils.TokenUtils.tokens_to_user_id(accessToken, refreshToken) do
{nil, nil} ->
{:reply, {:close, 4001, "invalid_authentication"}, state}
x ->
{user_id, tokens, user} =
case x do
{user_id, tokens} -> {user_id, tokens, Beef.Users.get_by_id(user_id)}
y -> y
end
cond do
user ->
{:ok, session} =
GenRegistry.lookup_or_start(Onion.UserSession, user_id, [
%Onion.UserSession.State{
user_id: user_id,
username: user.username,
avatar_url: user.avatarUrl,
display_name: user.displayName,
current_room_id: user.currentRoomId,
muted: muted
}
])
GenServer.call(session, {:set_pid, self()})
if tokens do
GenServer.cast(session, {:new_tokens, tokens})
end
roomIdFromFrontend = Map.get(json["d"], "currentRoomId", nil)
currentRoom =
cond do
not is_nil(user.currentRoomId) ->
# @todo this should probably go inside room business logic
room = Rooms.get_room_by_id(user.currentRoomId)
{:ok, room_session} =
GenRegistry.lookup_or_start(Onion.RoomSession, user.currentRoomId, [
%{
room_id: user.currentRoomId,
voice_server_id: room.voiceServerId
}
])
GenServer.cast(
room_session,
{:join_room, user, muted}
)
if reconnectToVoice == true do
Kousa.Room.join_vc_room(user.id, room)
end
room
not is_nil(roomIdFromFrontend) ->
case Kousa.Room.join_room(user.id, roomIdFromFrontend) do
%{room: room} -> room
_ -> nil
end
true ->
nil
end
{:reply,
construct_socket_msg(state.encoding, state.compression, %{
op: "auth-good",
d: %{user: user, currentRoom: currentRoom}
}), %{state | user_id: user_id, awaiting_init: false, platform: platform}}
true ->
{:reply, {:close, 4001, "invalid_authentication"}, state}
end
end
_ ->
if not is_nil(state.user_id) do
try do
case json do
%{"op" => op, "d" => d, "fetchId" => fetch_id} ->
{:reply,
prepare_socket_msg(
%{
op: "fetch_done",
d: f_handler(op, d, state),
fetchId: fetch_id
},
state
), state}
%{"op" => op, "d" => d} ->
handler(op, d, state)
end
rescue
e ->
err_msg = Exception.message(e)
IO.inspect(e)
Logger.error(err_msg)
Logger.error(Exception.format_stacktrace())
op = Map.get(json, "op", "")
IO.puts("error for op: " <> op)
Sentry.capture_exception(e,
stacktrace: __STACKTRACE__,
extra: %{op: op}
)
{:reply,
construct_socket_msg(state.encoding, state.compression, %{
op: "error",
d: err_msg
}), state}
end
else
{:reply, {:close, 4004, "not_authenticated"}, state}
end
end
end
end
defp construct_socket_msg(encoding, compression, data) do
data =
case encoding do
:etf ->
data
_ ->
data |> Poison.encode!()
end
case compression do
:zlib ->
z = :zlib.open()
:zlib.deflateInit(z)
data = :zlib.deflate(z, data, :finish)
:zlib.deflateEnd(z)
{:binary, data}
_ ->
{:text, data}
end
end
# def handler("join-as-new-peer", _data, state) do
# Kousa.Room.join_vc_room(state.user_id)
# {:ok, state}
# end
def handler("fetch_following_online", %{"cursor" => cursor}, state) do
{users, next_cursor} = Follows.fetch_following_online(state.user_id, cursor)
{:reply,
construct_socket_msg(state.encoding, state.compression, %{
op: "fetch_following_online_done",
d: %{users: users, nextCursor: next_cursor, initial: cursor == 0}
}), state}
end
def handler("invite_to_room", %{"userId" => user_id_to_invite}, state) do
Kousa.Room.invite_to_room(state.user_id, user_id_to_invite)
{:ok, state}
end
def handler("make_room_public", %{"newName" => new_name}, state) do
Kousa.Room.make_room_public(state.user_id, new_name)
{:ok, state}
end
def handler("fetch_invite_list", %{"cursor" => cursor}, state) do
{users, next_cursor} = Follows.fetch_invite_list(state.user_id, cursor)
{:reply,
construct_socket_msg(state.encoding, state.compression, %{
op: "fetch_invite_list_done",
d: %{users: users, nextCursor: next_cursor, initial: cursor == 0}
}), state}
end
def handler("ban", %{"username" => username, "reason" => reason}, state) do
worked = Kousa.User.ban(state.user_id, username, reason)
{:reply,
construct_socket_msg(state.encoding, state.compression, %{
op: "ban_done",
d: %{worked: worked}
}), state}
end
def handler("set_auto_speaker", %{"value" => value}, state) do
Kousa.Room.set_auto_speaker(state.user_id, value)
{:ok, state}
end
# @deprecated
def handler("create-room", data, state) do
resp =
case Kousa.Room.create_room(
state.user_id,
data["roomName"],
data["description"] || "",
data["value"] == "private",
Map.get(data, "userIdToInvite")
) do
{:ok, d} ->
%{
op: "new_current_room",
d: d
}
{:error, d} ->
%{
op: "error",
d: d
}
end
{:reply,
construct_socket_msg(
state.encoding,
state.compression,
resp
), state}
end
# @deprecated
def handler("get_top_public_rooms", data, state) do
{:reply,
construct_socket_msg(state.encoding, state.compression, %{
op: "get_top_public_rooms_done",
d: f_handler("get_top_public_rooms", data, state)
}), state}
end
def handler("speaking_change", %{"value" => value}, state) do
current_room_id = Beef.Users.get_current_room_id(state.user_id)
if not is_nil(current_room_id) do
Kousa.Utils.RegUtils.lookup_and_cast(
Onion.RoomSession,
current_room_id,
{:speaking_change, state.user_id, value}
)
end
{:ok, state}
end
# @deprecated
def handler("edit_room_name", %{"name" => name}, state) do
case Kousa.Room.edit_room(state.user_id, name, "", false) do
{:error, message} ->
{:reply, prepare_socket_msg(%{op: "error", d: message}, state), state}
_ ->
{:ok, state}
end
end
# @deprecated in new design
def handler("leave_room", _data, state) do
case Kousa.Room.leave_room(state.user_id) do
{:ok, d} ->
{:reply, prepare_socket_msg(%{op: "you_left_room", d: d}, state), state}
_ ->
{:ok, state}
end
end
def handler("join_room", %{"roomId" => room_id}, state) do
case Kousa.Room.join_room(state.user_id, room_id) do
d ->
{:reply,
construct_socket_msg(state.encoding, state.compression, %{
op: "join_room_done",
d: d
}), state}
end
end
def handler(
"block_from_room",
%{"userId" => user_id_to_block_from_room},
state
) do
Kousa.Room.block_from_room(state.user_id, user_id_to_block_from_room)
{:ok, state}
end
def handler("add_speaker", %{"userId" => user_id_to_make_speaker}, state) do
Kousa.Room.make_speaker(state.user_id, user_id_to_make_speaker)
{:ok, state}
end
def handler("change_mod_status", %{"userId" => user_id_to_change, "value" => value}, state) do
Kousa.Room.change_mod(state.user_id, user_id_to_change, value)
{:ok, state}
end
def handler("block_user_and_from_room", %{"userId" => user_id_to_block}, state) do
Kousa.UserBlock.block(state.user_id, user_id_to_block)
Kousa.Room.block_from_room(state.user_id, user_id_to_block)
{:ok, state}
end
def handler("change_room_creator", %{"userId" => user_id_to_change}, state) do
Kousa.Room.change_room_creator(state.user_id, user_id_to_change)
{:ok, state}
end
def handler("ban_from_room_chat", %{"userId" => user_id_to_ban}, state) do
Kousa.RoomChat.ban_user(state.user_id, user_id_to_ban)
{:ok, state}
end
def handler("send_room_chat_msg", %{"tokens" => tokens, "whisperedTo" => whispered_to}, state) do
Kousa.RoomChat.send_msg(state.user_id, tokens, whispered_to)
{:ok, state}
end
def handler("send_room_chat_msg", %{"tokens" => tokens}, state) do
Kousa.RoomChat.send_msg(state.user_id, tokens, [])
{:ok, state}
end
# def handler("delete_account", _data, %State{} = state) do
# Kousa.User.delete(state.user_id)
# # this will log the user out
# {:reply, {:close, 4001, "invalid_authentication"}, state}
# end
def handler(
"delete_room_chat_message",
%{"messageId" => message_id, "userId" => user_id},
state
) do
Kousa.RoomChat.delete_message(state.user_id, message_id, user_id)
{:ok, state}
end
def handler("follow", %{"userId" => userId, "value" => value}, state) do
Kousa.Follow.follow(state.user_id, userId, value)
{:ok, state}
end
def handler(
"fetch_follow_list",
%{"userId" => user_id, "isFollowing" => get_following_list, "cursor" => cursor},
state
) do
{users, next_cursor} =
Kousa.Follow.get_follow_list(state.user_id, user_id, get_following_list, cursor)
{:reply,
construct_socket_msg(state.encoding, state.compression, %{
op: "fetch_follow_list_done",
d: %{
isFollowing: get_following_list,
userId: user_id,
users: users,
nextCursor: next_cursor,
initial: cursor == 0
}
}), state}
end
def handler("set_listener", %{"userId" => user_id_to_make_listener}, state) do
Kousa.Room.set_listener(state.user_id, user_id_to_make_listener)
{:ok, state}
end
def handler("follow_info", %{"userId" => other_user_id}, state) do
{:reply,
construct_socket_msg(state.encoding, state.compression, %{
op: "follow_info_done",
d:
Map.merge(
%{userId: other_user_id},
Follows.get_info(state.user_id, other_user_id)
)
}), state}
end
def handler("mute", %{"value" => value}, state) do
Onion.UserSession.send_cast(state.user_id, {:set_mute, value})
{:ok, state}
end
# @deprecated in new design
def handler("get_current_room_users", data, state) do
{:reply,
prepare_socket_msg(
%{
op: "get_current_room_users_done",
d: f_handler("get_current_room_users", data, state)
},
state
), state}
end
def handler("ask_to_speak", _data, state) do
with {:ok, room_id} <- Users.tuple_get_current_room_id(state.user_id) do
case RoomPermissions.ask_to_speak(state.user_id, room_id) do
{:ok, %{isSpeaker: true}} ->
Kousa.Room.internal_set_speaker(state.user_id, room_id)
_ ->
Kousa.Utils.RegUtils.lookup_and_cast(
Onion.RoomSession,
room_id,
{:send_ws_msg, :vscode,
%{
op: "hand_raised",
d: %{userId: state.user_id, roomId: room_id}
}}
)
end
end
{:ok, state}
end
def handler("audio_autoplay_error", _data, state) do
Kousa.Utils.RegUtils.lookup_and_cast(
Onion.UserSession,
state.user_id,
{:send_ws_msg, :vscode,
%{
op: "error",
d: "browser can't autoplay audio the first time, go press play audio in your browser"
}}
)
{:ok, state}
end
def handler(op, data, state) do
with {:ok, room_id} <- Beef.Users.tuple_get_current_room_id(state.user_id),
{:ok, voice_server_id} <-
RegUtils.lookup_and_call(Onion.RoomSession, room_id, {:get_voice_server_id}) do
d =
cond do
String.first(op) == "@" ->
Map.merge(data, %{
peerId: state.user_id,
roomId: room_id
})
true ->
data
end
Onion.VoiceRabbit.send(voice_server_id, %{
op: op,
d: d,
uid: state.user_id
})
{:ok, state}
else
x ->
IO.puts("you should never see this general rabbbitmq handler in socker_handler")
IO.inspect(x)
{:reply,
prepare_socket_msg(
%{
op: "error",
d: "you should never see this, if you do, try refreshing"
},
state
), state}
end
end
def f_handler("mute", %{"value" => value}, %State{} = state) do
Onion.UserSession.send_cast(state.user_id, {:set_mute, value})
%{}
end
def f_handler("leave_room", _data, %State{} = state) do
case Kousa.Room.leave_room(state.user_id) do
{:ok, x} -> x
_ -> %{}
end
end
def f_handler("get_current_room_users", _data, %State{} = state) do
{room_id, users} = Beef.Users.get_users_in_current_room(state.user_id)
{muteMap, autoSpeaker, activeSpeakerMap} =
cond do
not is_nil(room_id) ->
case GenRegistry.lookup(Onion.RoomSession, room_id) do
{:ok, session} ->
GenServer.call(session, {:get_maps})
_ ->
{%{}, false, %{}}
end
true ->
{%{}, false, %{}}
end
%{
users: users,
muteMap: muteMap,
activeSpeakerMap: activeSpeakerMap,
roomId: room_id,
autoSpeaker: autoSpeaker
}
end
@spec f_handler(<<_::64, _::_*8>>, any, atom | map) :: any
def f_handler("get_my_scheduled_rooms_about_to_start", _data, %State{} = state) do
%{scheduledRooms: Kousa.ScheduledRoom.get_my_scheduled_rooms_about_to_start(state.user_id)}
end
def f_handler("get_top_public_rooms", data, %State{} = state) do
{rooms, next_cursor} =
Rooms.get_top_public_rooms(
state.user_id,
data["cursor"]
)
%{rooms: rooms, nextCursor: next_cursor, initial: data["cursor"] == 0}
end
def f_handler(
"edit_room",
%{"name" => name, "description" => description, "privacy" => privacy},
state
) do
case Kousa.Room.edit_room(state.user_id, name, description, privacy == "private") do
{:error, message} ->
%{
error: message
}
_ ->
true
end
end
def f_handler("get_scheduled_rooms", data, %State{} = state) do
{scheduled_rooms, next_cursor} =
Kousa.ScheduledRoom.get_scheduled_rooms(
state.user_id,
Map.get(data, "getOnlyMyScheduledRooms") == true,
Map.get(data, "cursor")
)
%{
scheduledRooms: scheduled_rooms,
nextCursor: next_cursor
}
end
def f_handler("edit_scheduled_room", %{"id" => id, "data" => data}, %State{} = state) do
case Kousa.ScheduledRoom.edit(
state.user_id,
id,
data
) do
:ok ->
%{}
{:error, msg} ->
%{error: msg}
end
end
def f_handler("delete_scheduled_room", %{"id" => id}, %State{} = state) do
Kousa.ScheduledRoom.delete(
state.user_id,
id
)
%{}
end
def f_handler(
"create_room_from_scheduled_room",
%{
"id" => scheduled_room_id,
"name" => name,
"description" => description
},
%State{} = state
) do
case Kousa.ScheduledRoom.create_room_from_scheduled_room(
state.user_id,
scheduled_room_id,
name,
description
) do
{:ok, d} ->
d
{:error, d} ->
%{
error: d
}
end
end
def f_handler("create_room", data, %State{} = state) do
case Kousa.Room.create_room(
state.user_id,
data["name"],
data["description"],
data["privacy"] == "private",
Map.get(data, "userIdToInvite")
) do
{:ok, d} ->
d
{:error, d} ->
%{
error: d
}
end
end
def f_handler("schedule_room", data, %State{} = state) do
case Kousa.ScheduledRoom.schedule(state.user_id, data) do
{:ok, scheduledRoom} ->
%{scheduledRoom: scheduledRoom}
{:error, msg} ->
%{error: msg}
end
end
def f_handler("unban_from_room", %{"userId" => user_id}, %State{} = state) do
Kousa.RoomBlock.unban(state.user_id, user_id)
%{}
end
def f_handler("edit_profile", %{"data" => data}, %State{} = state) do
%{
isUsernameTaken:
case Kousa.User.edit_profile(state.user_id, data) do
:username_taken -> true
_ -> false
end
}
end
def f_handler("get_blocked_from_room_users", %{"offset" => offset}, %State{} = state) do
case Kousa.RoomBlock.get_blocked_users(state.user_id, offset) do
{users, next_cursor} ->
%{users: users, nextCursor: next_cursor}
_ ->
%{users: [], nextCursor: nil}
end
end
def f_handler("get_user_profile", %{"userId" => id_or_username}, %State{} = _state) do
case UUID.cast(id_or_username) do
{:ok, uuid} ->
Beef.Users.get_by_id(uuid)
_ ->
Beef.Users.get_by_username(id_or_username)
end
end
defp prepare_socket_msg(data, %State{compression: compression, encoding: encoding}) do
data
|> encode_data(encoding)
|> compress_data(compression)
end
defp encode_data(data, :etf) do
data
end
defp encode_data(data, _encoding) do
data |> Poison.encode!()
end
defp compress_data(data, :zlib) do
z = :zlib.open()
:zlib.deflateInit(z)
data = :zlib.deflate(z, data, :finish)
:zlib.deflateEnd(z)
{:binary, data}
end
defp compress_data(data, _compression) do
{:text, data}
end
end
| 26.78022 | 99 | 0.548306 |
9e2fa93a9294f5bf043c8686b8123b8330ce4870 | 1,466 | ex | Elixir | lib/epi_locator/accounts/user_token.ex | RatioPBC/epi-locator | 58c90500c4e0071ce365d76ec9812f9051d6a9f9 | [
"Apache-2.0"
] | null | null | null | lib/epi_locator/accounts/user_token.ex | RatioPBC/epi-locator | 58c90500c4e0071ce365d76ec9812f9051d6a9f9 | [
"Apache-2.0"
] | 6 | 2021-10-19T01:55:57.000Z | 2022-02-15T01:04:19.000Z | lib/epi_locator/accounts/user_token.ex | RatioPBC/epi-locator | 58c90500c4e0071ce365d76ec9812f9051d6a9f9 | [
"Apache-2.0"
] | 2 | 2022-01-21T08:38:50.000Z | 2022-01-21T08:42:04.000Z | defmodule EpiLocator.Accounts.UserToken do
@moduledoc """
Simple auth model that generates/stores session tokens.
"""
use Ecto.Schema
import Ecto.Query
@hash_algorithm :sha256
@rand_size_session_token 32
@session_validity_in_days 1
schema "users_tokens" do
field(:token, :binary)
field(:user_id, :string)
timestamps(updated_at: false)
end
@doc """
Generates a token that will be stored in a signed place,
such as session or cookie. As they are signed, those
tokens do not need to be hashed.
"""
def build_session_token(user_id) do
token = generate_session_token()
{token, %EpiLocator.Accounts.UserToken{token: token, user_id: user_id}}
end
def generate_session_token do
:crypto.strong_rand_bytes(@rand_size_session_token)
end
def verify_session_token_query(token) do
query =
from(token in session_token_query(token),
where: token.inserted_at > ago(@session_validity_in_days, "day")
)
{:ok, query}
end
def session_token_query(token) do
from(EpiLocator.Accounts.UserToken, where: [token: ^token])
end
def user_id_query(user_id) do
from(EpiLocator.Accounts.UserToken, where: [user_id: ^user_id])
end
def encode(binary_token) do
Base.encode16(binary_token, padding: false)
end
def decode(string_token) do
Base.decode16(string_token, padding: false)
end
def hash_bytes(bytes) do
:crypto.hash(@hash_algorithm, bytes)
end
end
| 23.269841 | 75 | 0.718281 |
9e2fb242e62523183a865869c9a24a580e84d6be | 495 | ex | Elixir | app/lib/noodl/events/event_bans.ex | nathanjohnson320/noodl | 2e449aab15b54fc5a1dc45ebf4b79e7b64b7c967 | [
"MIT"
] | 1 | 2021-01-20T20:00:50.000Z | 2021-01-20T20:00:50.000Z | app/lib/noodl/events/event_bans.ex | nathanjohnson320/noodl | 2e449aab15b54fc5a1dc45ebf4b79e7b64b7c967 | [
"MIT"
] | null | null | null | app/lib/noodl/events/event_bans.ex | nathanjohnson320/noodl | 2e449aab15b54fc5a1dc45ebf4b79e7b64b7c967 | [
"MIT"
] | null | null | null | defmodule Noodl.Events.EventBan do
use Ecto.Schema
import Ecto.Changeset
alias Noodl.Accounts.User
alias Noodl.Events.Event
@primary_key {:id, :binary_id, autogenerate: true}
@foreign_key_type :binary_id
schema "event_bans" do
belongs_to :user, User
belongs_to :event, Event
timestamps()
end
@doc false
def changeset(event_ban, attrs \\ %{}) do
event_ban
|> cast(attrs, [:user_id, :event_id])
|> validate_required([:user_id, :event_id])
end
end
| 20.625 | 52 | 0.694949 |
9e2fb9db91f644e8c16f09936d32fd5828c97a7b | 19,895 | exs | Elixir | old/test/blue_jet_web/controllers/product_controller_test.exs | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 44 | 2018-05-09T01:08:57.000Z | 2021-01-19T07:25:26.000Z | old/test/blue_jet_web/controllers/product_controller_test.exs | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 36 | 2018-05-08T23:59:54.000Z | 2018-09-28T13:50:30.000Z | old/test/blue_jet_web/controllers/product_controller_test.exs | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 9 | 2018-05-09T14:09:19.000Z | 2021-03-21T21:04:04.000Z | defmodule BlueJetWeb.ProductControllerTest do
use BlueJetWeb.ConnCase
alias BlueJet.Identity.User
alias BlueJet.Storefront.Product
alias BlueJet.FileStorage.File
alias BlueJet.FileStorage.FileCollection
alias BlueJet.Repo
@valid_attrs %{
"status" => "active",
"name" => "Apple",
"customData" => %{
"kind" => "Gala"
}
}
@valid_fields %{
status: "active",
name: "Orange",
custom_data: %{
"kind" => "Blue Jay"
}
}
@invalid_attrs %{
"name" => ""
}
setup do
{_, %User{ default_account_id: account1_id }} = Identity.create_user(%{
fields: %{
"first_name" => Faker.Name.first_name(),
"last_name" => Faker.Name.last_name(),
"email" => "[email protected]",
"password" => "test1234",
"account_name" => Faker.Company.name()
}
})
{:ok, %{ access_token: uat1 }} = Identity.authenticate(%{ username: "[email protected]", password: "test1234", scope: "type:user" })
conn = build_conn()
|> put_req_header("accept", "application/vnd.api+json")
|> put_req_header("content-type", "application/vnd.api+json")
%{ conn: conn, uat1: uat1, account1_id: account1_id }
end
describe "POST /v1/products" do
test "with no access token", %{conn: conn} do
conn = post(conn, "/v1/products", %{
"data" => %{
"type" => "Product",
"attributes" => @valid_attrs
}
})
assert conn.status == 401
end
test "with invalid attrs and rels", %{ conn: conn, uat1: uat1 } do
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = post(conn, "/v1/products", %{
"data" => %{
"type" => "Product",
"attributes" => @invalid_attrs
}
})
assert json_response(conn, 422)["errors"]
assert length(json_response(conn, 422)["errors"]) > 0
end
test "with valid attrs and rels", %{ conn: conn, uat1: uat1 } do
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = post(conn, "/v1/products", %{
"data" => %{
"type" => "Product",
"attributes" => @valid_attrs
}
})
assert json_response(conn, 201)["data"]["id"]
assert json_response(conn, 201)["data"]["attributes"]["status"] == @valid_attrs["status"]
assert json_response(conn, 201)["data"]["attributes"]["name"] == @valid_attrs["name"]
assert json_response(conn, 201)["data"]["attributes"]["customData"] == @valid_attrs["customData"]
assert json_response(conn, 201)["data"]["attributes"]["customData"]["kind"] == @valid_attrs["customData"]["kind"]
end
test "with valid attrs, rels and include", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
%File{ id: avatar_id } = Repo.insert!(%File{
account_id: account1_id,
name: Faker.Lorem.word(),
status: "uploaded",
content_type: "image/png",
size_bytes: 42
})
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = post(conn, "/v1/products?include=avatar", %{
"data" => %{
"type" => "Product",
"attributes" => @valid_attrs,
"relationships" => %{
"avatar" => %{
"data" => %{
"type" => "File",
"id" => avatar_id
}
}
}
}
})
assert json_response(conn, 201)["data"]["id"]
assert json_response(conn, 201)["data"]["attributes"]["status"] == @valid_attrs["status"]
assert json_response(conn, 201)["data"]["attributes"]["name"] == @valid_attrs["name"]
assert json_response(conn, 201)["data"]["attributes"]["customData"] == @valid_attrs["customData"]
assert json_response(conn, 201)["data"]["attributes"]["customData"]["kind"] == @valid_attrs["customData"]["kind"]
assert json_response(conn, 201)["data"]["relationships"]["avatar"]["data"]["id"]
assert length(Enum.filter(json_response(conn, 201)["included"], fn(item) -> item["type"] == "File" end)) == 1
end
end
describe "GET /v1/products/:id" do
test "with no access token", %{conn: conn} do
conn = get(conn, "/v1/products/test")
assert conn.status == 401
end
test "with access token of a different account", %{ conn: conn, uat1: uat1 } do
{:ok, %User{ default_account_id: account2_id }} = Identity.create_user(%{
fields: %{
"first_name" => Faker.Name.first_name(),
"last_name" => Faker.Name.last_name(),
"email" => "[email protected]",
"password" => "test1234",
"account_name" => Faker.Company.name()
}
})
product = Repo.insert!(
Map.merge(%Product{ account_id: account2_id }, @valid_fields)
)
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
assert_error_sent(404, fn ->
get(conn, "/v1/products/#{product.id}")
end)
end
test "with valid access token and id", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
product = Repo.insert!(
Map.merge(%Product{ account_id: account1_id }, @valid_fields)
)
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = get(conn, "/v1/products/#{product.id}")
assert json_response(conn, 200)["data"]["id"] == product.id
assert json_response(conn, 200)["data"]["attributes"]["status"] == product.status
assert json_response(conn, 200)["data"]["attributes"]["name"] == product.name
assert json_response(conn, 200)["data"]["attributes"]["customData"] == product.custom_data
end
test "with valid access token, id, locale and include", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
product = Repo.insert!(
Map.merge(%Product{
account_id: account1_id,
translations: %{
"zh-CN" => %{
"name" => "橙子"
}
}
},
@valid_fields)
)
Repo.insert!(%FileCollection{
account_id: account1_id,
product_id: product.id,
label: "primary_images",
translations: %{
"zh-CN" => %{
"name" => "图片"
}
}
})
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = get(conn, "/v1/products/#{product.id}?locale=zh-CN&include=fileCollections")
assert json_response(conn, 200)["data"]["id"] == product.id
assert json_response(conn, 200)["data"]["attributes"]["status"] == product.status
assert json_response(conn, 200)["data"]["attributes"]["name"] == "橙子"
assert json_response(conn, 200)["data"]["attributes"]["customData"] == product.custom_data
assert length(Enum.filter(json_response(conn, 200)["included"], fn(item) -> item["type"] == "FileCollection" end)) == 1
assert length(Enum.filter(json_response(conn, 200)["included"], fn(item) -> item["attributes"]["name"] == "图片" end)) == 1
end
end
describe "PATCH /v1/products/:id" do
test "with no access token", %{conn: conn} do
conn = patch(conn, "/v1/products/test", %{
"data" => %{
"id" => "test",
"type" => "Product",
"attributes" => @valid_attrs
}
})
assert conn.status == 401
end
test "with access token of a different account", %{ conn: conn, uat1: uat1 } do
{:ok, %User{ default_account_id: account2_id }} = Identity.create_user(%{
fields: %{
"first_name" => Faker.Name.first_name(),
"last_name" => Faker.Name.last_name(),
"email" => "[email protected]",
"password" => "test1234",
"account_name" => Faker.Company.name()
}
})
product = Repo.insert!(
Map.merge(%Product{ account_id: account2_id }, @valid_fields)
)
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
assert_error_sent(404, fn ->
patch(conn, "/v1/products/#{product.id}", %{
"data" => %{
"id" => product.id,
"type" => "Product",
"attributes" => @valid_attrs
}
})
end)
end
test "with valid access token, invalid attrs and rels", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
product = Repo.insert!(
Map.merge(%Product{ account_id: account1_id }, @valid_fields)
)
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = patch(conn, "/v1/products/#{product.id}", %{
"data" => %{
"id" => product.id,
"type" => "Product",
"attributes" => @invalid_attrs
}
})
assert json_response(conn, 422)["errors"]
assert length(json_response(conn, 422)["errors"]) > 0
end
test "with valid access token, attrs and rels", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
product = Repo.insert!(
Map.merge(%Product{ account_id: account1_id }, @valid_fields)
)
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = patch(conn, "/v1/products/#{product.id}", %{
"data" => %{
"id" => product.id,
"type" => "Product",
"attributes" => @valid_attrs
}
})
assert json_response(conn, 200)["data"]["id"]
assert json_response(conn, 200)["data"]["attributes"]["status"] == @valid_attrs["status"]
assert json_response(conn, 200)["data"]["attributes"]["name"] == @valid_attrs["name"]
assert json_response(conn, 200)["data"]["attributes"]["customData"]["kind"] == @valid_attrs["customData"]["kind"]
end
test "with valid access token, attrs, rels and locale", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
product = Repo.insert!(
Map.merge(%Product{
account_id: account1_id
},
@valid_fields)
)
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = patch(conn, "/v1/products/#{product.id}?locale=zh-CN", %{
"data" => %{
"id" => product.id,
"type" => "Product",
"attributes" => %{
"name" => "橙子"
}
}
})
assert json_response(conn, 200)["data"]["id"]
assert json_response(conn, 200)["data"]["attributes"]["status"] == @valid_fields[:status]
assert json_response(conn, 200)["data"]["attributes"]["name"] == "橙子"
assert json_response(conn, 200)["data"]["attributes"]["customData"] == @valid_fields[:custom_data]
end
test "with valid access token, attrs, rels, locale and include", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
product = Repo.insert!(
Map.merge(%Product{
account_id: account1_id
},
@valid_fields)
)
Repo.insert!(%FileCollection{
account_id: account1_id,
product_id: product.id,
label: "primary_images",
translations: %{
"zh-CN" => %{
"name" => "图片"
}
}
})
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = patch(conn, "/v1/products/#{product.id}?locale=zh-CN&include=fileCollections", %{
"data" => %{
"id" => product.id,
"type" => "Product",
"attributes" => %{
"name" => "橙子"
}
}
})
assert json_response(conn, 200)["data"]["id"]
assert json_response(conn, 200)["data"]["attributes"]["status"] == @valid_fields[:status]
assert json_response(conn, 200)["data"]["attributes"]["name"] == "橙子"
assert json_response(conn, 200)["data"]["attributes"]["customData"] == @valid_fields[:custom_data]
assert length(Enum.filter(json_response(conn, 200)["included"], fn(item) -> item["type"] == "FileCollection" end)) == 1
assert length(Enum.filter(json_response(conn, 200)["included"], fn(item) -> item["attributes"]["name"] == "图片" end)) == 1
end
end
describe "GET /v1/products" do
test "with no access token", %{conn: conn} do
conn = get(conn, "/v1/products")
assert conn.status == 401
end
test "with valid access token", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
{:ok, %User{ default_account_id: account2_id }} = Identity.create_user(%{
fields: %{
"first_name" => Faker.Name.first_name(),
"last_name" => Faker.Name.last_name(),
"email" => "[email protected]",
"password" => "test1234",
"account_name" => Faker.Company.name()
}
})
Repo.insert!(
Map.merge(%Product{ account_id: account2_id }, @valid_fields)
)
Repo.insert!(
Map.merge(%Product{ account_id: account1_id }, @valid_fields)
)
Repo.insert!(
Map.merge(%Product{ account_id: account1_id }, @valid_fields)
)
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = get(conn, "/v1/products")
assert length(json_response(conn, 200)["data"]) == 2
end
test "with valid access token and pagination", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
Repo.insert!(%Product{
account_id: account1_id,
status: "active",
name: "Apple"
})
Repo.insert!(%Product{
account_id: account1_id,
status: "active",
name: "Apple"
})
Repo.insert!(%Product{
account_id: account1_id,
status: "active",
name: "Apple"
})
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = get(conn, "/v1/products?page[number]=2&page[size]=1")
assert length(json_response(conn, 200)["data"]) == 1
assert json_response(conn, 200)["meta"]["resultCount"] == 3
assert json_response(conn, 200)["meta"]["totalCount"] == 3
end
test "with valid access token and filter", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
Repo.insert!(%Product{
account_id: account1_id,
status: "active",
name: "Apple"
})
Repo.insert!(%Product{
account_id: account1_id,
status: "active",
name: "Apple"
})
Repo.insert!(%Product{
account_id: account1_id,
status: "disabled",
name: "Apple"
})
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = get(conn, "/v1/products?filter[status][]=active&filter[status][]=internal")
assert length(json_response(conn, 200)["data"]) == 2
assert json_response(conn, 200)["meta"]["resultCount"] == 2
assert json_response(conn, 200)["meta"]["totalCount"] == 3
end
test "with valid access token and locale", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
Repo.insert!(%Product{
account_id: account1_id,
status: "active",
name: "Apple",
translations: %{
"zh-CN" => %{
"name" => "苹果"
}
}
})
Repo.insert!(%Product{
account_id: account1_id,
status: "active",
name: "Apple",
translations: %{
"zh-CN" => %{
"name" => "苹果"
}
}
})
Repo.insert!(%Product{
account_id: account1_id,
status: "active",
name: "Apple"
})
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = get(conn, "/v1/products?locale=zh-CN")
assert length(json_response(conn, 200)["data"]) == 3
assert json_response(conn, 200)["meta"]["resultCount"] == 3
assert json_response(conn, 200)["meta"]["totalCount"] == 3
assert length(Enum.filter(json_response(conn, 200)["data"], fn(item) -> item["attributes"]["name"] == "苹果" end)) == 2
end
test "with valid access token, locale and search", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
Repo.insert!(%Product{
account_id: account1_id,
status: "active",
name: "Apple",
translations: %{
"zh-CN" => %{
"name" => "苹果"
}
}
})
Repo.insert!(%Product{
account_id: account1_id,
status: "active",
name: "Apple",
translations: %{
"zh-CN" => %{
"name" => "苹果"
}
}
})
Repo.insert!(%Product{
account_id: account1_id,
status: "active",
name: "Apple"
})
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = get(conn, "/v1/products?locale=zh-CN&search=苹")
assert length(json_response(conn, 200)["data"]) == 2
assert json_response(conn, 200)["meta"]["resultCount"] == 2
assert json_response(conn, 200)["meta"]["totalCount"] == 3
assert length(Enum.filter(json_response(conn, 200)["data"], fn(item) -> item["attributes"]["name"] == "苹果" end)) == 2
end
test "with valid access token, locale and include", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
Repo.insert!(%Product{
account_id: account1_id,
status: "active",
name: "Apple",
translations: %{
"zh-CN" => %{
"name" => "苹果"
}
}
})
Repo.insert!(%Product{
account_id: account1_id,
status: "active",
name: "Apple",
translations: %{
"zh-CN" => %{
"name" => "苹果"
}
}
})
product = Repo.insert!(%Product{
account_id: account1_id,
status: "active",
name: "Apple"
})
Repo.insert!(%FileCollection{
account_id: account1_id,
product_id: product.id,
label: "primary_images",
translations: %{
"zh-CN" => %{
"name" => "图片"
}
}
})
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = get(conn, "/v1/products?locale=zh-CN&include=fileCollections")
assert length(json_response(conn, 200)["data"]) == 3
assert json_response(conn, 200)["meta"]["resultCount"] == 3
assert json_response(conn, 200)["meta"]["totalCount"] == 3
assert length(Enum.filter(json_response(conn, 200)["data"], fn(item) -> item["attributes"]["name"] == "苹果" end)) == 2
assert length(Enum.filter(json_response(conn, 200)["included"], fn(item) -> item["type"] == "FileCollection" end)) == 1
assert length(Enum.filter(json_response(conn, 200)["included"], fn(item) -> item["attributes"]["name"] == "图片" end)) == 1
end
end
describe "DELETE /v1/products/:id" do
test "with no access token", %{conn: conn} do
conn = delete(conn, "/v1/products/test")
assert conn.status == 401
end
test "with access token of a different account", %{ conn: conn, uat1: uat1 } do
{:ok, %User{ default_account_id: account2_id }} = Identity.create_user(%{
fields: %{
"first_name" => Faker.Name.first_name(),
"last_name" => Faker.Name.last_name(),
"email" => "[email protected]",
"password" => "test1234",
"account_name" => Faker.Company.name()
}
})
product = Repo.insert!(
Map.merge(%Product{ account_id: account2_id }, @valid_fields)
)
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
assert_error_sent(404, fn ->
delete(conn, "/v1/products/#{product.id}")
end)
end
test "with valid access token and id", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
product = Repo.insert!(
Map.merge(%Product{ account_id: account1_id }, @valid_fields)
)
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = delete(conn, "/v1/products/#{product.id}")
assert conn.status == 204
end
end
end
| 32.140549 | 136 | 0.558231 |
9e2fbf03a28607014629c0a87e03aa78c3e66105 | 13,576 | ex | Elixir | lib/mix/lib/mix/tasks/xref.ex | evalphobia/elixir | a07a2362e5827b09d8b27be2c1ad2980d25b9768 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/xref.ex | evalphobia/elixir | a07a2362e5827b09d8b27be2c1ad2980d25b9768 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/xref.ex | evalphobia/elixir | a07a2362e5827b09d8b27be2c1ad2980d25b9768 | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.Xref do
use Mix.Task
alias Mix.Tasks.Compile.Elixir, as: E
import Mix.Compilers.Elixir, only: [read_manifest: 2, source: 1, source: 2, module: 1]
@shortdoc "Performs cross reference checks"
@recursive true
@moduledoc """
Performs cross reference checks between modules.
## Xref modes
The `xref` task expects a mode as first argument:
mix xref MODE
All available modes are discussed below.
### warnings
Prints warnings for violated cross reference checks:
mix xref warnings
This is the mode used by Mix during compilation.
### unreachable
Prints all unreachable "file:line: module.function/arity" entries:
mix xref unreachable
The "file:line" represents the file and line a call to an unknown
"module.function/arity" is made.
### callers CALLEE
Prints all callers of the given `CALLEE`, which can be one of: `Module`,
`Module.function`, or `Module.function/arity`. Examples:
mix xref callers MyMod
mix xref callers MyMod.fun
mix xref callers MyMod.fun/3
### graph
Prints a file dependency graph where an edge from `A` to `B` indicates
that `A` depends on `B`.
mix xref graph --format dot
The following options are accepted:
* `--exclude` - paths to exclude
* `--source` - displays all files that the given source file references (directly or indirectly)
* `--sink` - displays all files that reference the given file (directly or indirectly)
* `--format` - can be set to one of:
* `pretty` - uses Unicode codepoints for formatting the graph.
This is the default except on Windows
* `plain` - does not use Unicode codepoints for formatting the graph.
This is the default on Windows
* `dot` - produces a DOT graph description in `xref_graph.dot` in the
current directory. Warning: this will override any previously generated file
The `--source` and `--sink` options are particularly useful when trying to understand how
the modules in a particular file interact with the whole system.
## Shared options
Those options are shared across all modes:
* `--no-compile` - does not compile even if files require compilation
* `--no-deps-check` - does not check dependencies
* `--no-archives-check` - does not check archives
* `--no-elixir-version-check` - does not check the Elixir version from mix.exs
## Configuration
All configuration for Xref should be placed under the key `:xref`.
* `:exclude` - a list of modules and `{module, function, arity}` tuples to ignore when checking
cross references. For example: `[MissingModule, {MissingModule2, :missing_func, 2}]`
"""
@switches [compile: :boolean, deps_check: :boolean, archives_check: :boolean,
elixir_version_check: :boolean, exclude: :keep, format: :string,
source: :string, sink: :string]
@doc """
Runs this task.
"""
@spec run(OptionParser.argv) :: :ok | :error
def run(args) do
{opts, args} =
OptionParser.parse!(args, strict: @switches)
Mix.Task.run("loadpaths")
if Keyword.get(opts, :compile, true) do
Mix.Task.run("compile")
end
case args do
["warnings"] ->
warnings()
["unreachable"] ->
unreachable()
["callers", callee] ->
callers(callee)
["graph"] ->
graph(opts)
_ ->
Mix.raise "xref doesn't support this command, see \"mix help xref\" for more information"
end
end
## Modes
defp warnings() do
if unreachable(&print_warnings/2) == [] do
:ok
else
:error
end
end
defp unreachable() do
if unreachable(&print_entry/2) == [] do
:ok
else
:error
end
end
defp callers(callee) do
callee
|> filter_for_callee()
|> do_callers()
:ok
end
defp graph(opts) do
write_graph(file_references(), excluded(opts), opts)
:ok
end
## Unreachable
defp unreachable(pair_fun) do
excludes = excludes()
each_source_entries(&source_warnings(&1, excludes), pair_fun)
end
defp source_warnings(source, excludes) do
source(runtime_dispatches: runtime_dispatches) = source
for {module, func_arity_lines} <- runtime_dispatches,
exports = load_exports(module),
{{func, arity}, lines} <- func_arity_lines,
warning = unreachable_mfa(exports, module, func, arity, lines, excludes),
do: warning
end
defp load_exports(module) do
if :code.is_loaded(module) do
# If the module is loaded, we will use the faster function_exported?/3 check
module
else
# Otherwise we get all exports from :beam_lib to avoid loading modules
with file when is_list(file) <- :code.which(module),
{:ok, {^module, [exports: exports]}} <- :beam_lib.chunks(file, [:exports]) do
exports
else
_ -> :unknown_module
end
end
end
defp unreachable_mfa(exports, module, func, arity, lines, excludes) do
cond do
excluded?(module, func, arity, excludes) ->
nil
skip?(module, func, arity) ->
nil
exports == :unknown_module ->
{Enum.sort(lines), :unknown_module, module, func, arity, nil}
is_atom(exports) and not function_exported?(module, func, arity) ->
{Enum.sort(lines), :unknown_function, module, func, arity, nil}
is_list(exports) and {func, arity} not in exports ->
{Enum.sort(lines), :unknown_function, module, func, arity, exports}
true ->
nil
end
end
## Print entries
defp print_entry(file, entries) do
entries
|> Enum.sort()
|> Enum.each(&IO.write(format_entry(file, &1)))
end
defp format_entry(file, {lines, _, module, function, arity, _}) do
for line <- lines do
[Exception.format_file_line(file, line), ?\s, Exception.format_mfa(module, function, arity), ?\n]
end
end
## Print warnings
defp print_warnings(file, entries) do
prefix = IO.ANSI.format([:yellow, "warning: "])
entries
|> Enum.sort()
|> Enum.each(&IO.write(:stderr, [prefix, format_warning(file, &1), ?\n]))
end
defp format_warning(file, {lines, :unknown_function, module, function, arity, exports}) do
message =
[module: module, function: function, arity: arity, reason: :"function not exported", exports: exports]
|> UndefinedFunctionError.exception()
|> Exception.message()
[message, "\n", format_file_lines(file, lines)]
end
defp format_warning(file, {lines, :unknown_module, module, function, arity, _}) do
["function ", Exception.format_mfa(module, function, arity),
" is undefined (module #{inspect module} is not available)\n" | format_file_lines(file, lines)]
end
defp format_file_lines(file, [line]) do
format_file_line(file, line)
end
defp format_file_lines(file, lines) do
["Found at #{length(lines)} locations:\n" |
Enum.map(lines, &format_file_line(file, &1))]
end
defp format_file_line(file, line) do
[" ", file, ?:, Integer.to_string(line), ?\n]
end
## "Unreachable" helpers
@protocol_builtins for {_, type} <- Protocol.__builtin__(), do: type
defp skip?(:erlang, func, 2) when func in [:andalso, :orelse] do
true
end
defp skip?(module, :__impl__, 1) do
{maybe_protocol, maybe_builtin} = module |> Module.split() |> Enum.split(-1)
maybe_protocol = Module.concat(maybe_protocol)
maybe_builtin = Module.concat(maybe_builtin)
maybe_builtin in @protocol_builtins and
Code.ensure_loaded?(maybe_protocol) and
function_exported?(maybe_protocol, :__protocol__, 1)
end
defp skip?(_, _, _) do
false
end
defp excludes() do
Mix.Project.config()
|> Keyword.get(:xref, [])
|> Keyword.get(:exclude, [])
|> MapSet.new()
end
defp excluded?(module, func, arity, excludes) do
MapSet.member?(excludes, module) or MapSet.member?(excludes, {module, func, arity})
end
## Callers
defp do_callers(filter) do
each_source_entries(&source_calls_for_filter(&1, filter), &print_calls/2)
end
defp source_calls_for_filter(source, filter) do
runtime_dispatches = source(source, :runtime_dispatches)
compile_dispatches = source(source, :compile_dispatches)
dispatches = runtime_dispatches ++ compile_dispatches
calls =
for {module, func_arity_lines} <- dispatches,
{{func, arity}, lines} <- func_arity_lines,
filter.({module, func, arity}),
do: {module, func, arity, lines}
Enum.reduce calls, %{}, fn {module, func, arity, lines}, merged_calls ->
lines = MapSet.new(lines)
Map.update(merged_calls, {module, func, arity}, lines, &MapSet.union(&1, lines))
end
end
## Print callers
defp print_calls(file, calls) do
calls
|> Enum.sort()
|> Enum.each(&IO.write(format_call(file, &1)))
end
defp format_call(file, {{module, func, arity}, lines}) do
for line <- Enum.sort(lines),
do: [file, ":", to_string(line), ": ", Exception.format_mfa(module, func, arity), ?\n]
end
## "Callers" helpers
defp filter_for_callee(callee) do
case Mix.Utils.parse_mfa(callee) do
{:ok, mfa_list} ->
mfa_list_length = length(mfa_list)
fn {module, function, arity} ->
mfa_list == Enum.take([module, function, arity], mfa_list_length)
end
:error ->
Mix.raise "xref callers CALLEE expects Module, Module.function, or Module.function/arity, " <>
"got: " <> callee
end
end
## Graph helpers
defp excluded(opts) do
opts
|> Keyword.get_values(:exclude)
|> Enum.flat_map(&[{&1, nil}, {&1, "(compile)"}, {&1, "(runtime)"}])
end
defp file_references() do
module_sources =
for manifest <- E.manifests(),
manifest_data = read_manifest(manifest, ""),
module(module: module, source: source) <- manifest_data,
source = Enum.find(manifest_data, &match?(source(source: ^source), &1)),
do: {module, source},
into: %{}
all_modules = MapSet.new(module_sources, &elem(&1, 0))
Map.new module_sources, fn {module, source} ->
source(runtime_references: runtime, compile_references: compile, source: file) = source
compile_references =
compile
|> MapSet.new()
|> MapSet.delete(module)
|> MapSet.intersection(all_modules)
|> Enum.filter(&module_sources[&1] != source)
|> Enum.map(&{source(module_sources[&1], :source), "(compile)"})
runtime_references =
runtime
|> MapSet.new()
|> MapSet.delete(module)
|> MapSet.intersection(all_modules)
|> Enum.filter(&module_sources[&1] != source)
|> Enum.map(&{source(module_sources[&1], :source), nil})
{file, compile_references ++ runtime_references}
end
end
defp write_graph(file_references, excluded, opts) do
{root, file_references} =
case {opts[:source], opts[:sink]} do
{nil, nil} ->
{Enum.map(file_references, &{elem(&1, 0), nil}) -- excluded, file_references}
{source, nil} ->
if file_references[source] do
{[{source, nil}], file_references}
else
Mix.raise "Source could not be found: #{source}"
end
{nil, sink} ->
if file_references[sink] do
file_references = filter_for_sink(file_references, sink)
roots =
file_references
|> Map.delete(sink)
|> Enum.map(&{elem(&1, 0), nil})
{roots -- excluded, file_references}
else
Mix.raise "Sink could not be found: #{sink}"
end
{_, _} ->
Mix.raise "mix xref graph expects only one of --source and --sink"
end
callback =
fn {file, type} ->
children = Map.get(file_references, file, [])
{{file, type}, children -- excluded}
end
if opts[:format] == "dot" do
Mix.Utils.write_dot_graph!("xref_graph.dot", "xref graph",
root, callback, opts)
"""
Generated "xref_graph.dot" in the current directory. To generate a PNG:
dot -Tpng xref_graph.dot -o xref_graph.png
For more options see http://www.graphviz.org/.
"""
|> String.trim_trailing()
|> Mix.shell.info()
else
Mix.Utils.print_tree(root, callback, opts)
end
end
defp filter_for_sink(file_references, sink) do
file_references
|> invert_references()
|> do_filter_for_sink([{sink, nil}], %{})
|> invert_references()
end
defp do_filter_for_sink(file_references, new_nodes, acc) do
Enum.reduce new_nodes, acc, fn {new_node_name, _type}, acc ->
new_nodes = file_references[new_node_name]
if acc[new_node_name] || !new_nodes do
acc
else
do_filter_for_sink(file_references, new_nodes, Map.put(acc, new_node_name, new_nodes))
end
end
end
defp invert_references(file_references) do
Enum.reduce file_references, %{}, fn {file, references}, acc ->
Enum.reduce references, acc, fn {reference, type}, acc ->
Map.update(acc, reference, [{file, type}], &[{file, type} | &1])
end
end
end
## Helpers
defp each_source_entries(entries_fun, pair_fun) do
for manifest <- E.manifests(),
source(source: file) = source <- read_manifest(manifest, ""),
entries = entries_fun.(source),
entries != [] and entries != %{},
do: pair_fun.(file, entries)
end
end
| 28.701903 | 108 | 0.632587 |
9e2fbff373380d397b15c1e3813adf08c680a1e6 | 7,332 | ex | Elixir | lib/webbkoll_web/controllers/site_controller.ex | vaginessa/webbkoll | 9a0be0cdc2eaeac9e577589b35a63a81f68655d7 | [
"MIT"
] | null | null | null | lib/webbkoll_web/controllers/site_controller.ex | vaginessa/webbkoll | 9a0be0cdc2eaeac9e577589b35a63a81f68655d7 | [
"MIT"
] | null | null | null | lib/webbkoll_web/controllers/site_controller.ex | vaginessa/webbkoll | 9a0be0cdc2eaeac9e577589b35a63a81f68655d7 | [
"MIT"
] | null | null | null | defmodule WebbkollWeb.SiteController do
use WebbkollWeb, :controller
alias WebbkollWeb.Site
@backends Application.get_env(:webbkoll, :backends)
@rate_limit_client Application.get_env(:webbkoll, :rate_limit_client)
@rate_limit_host Application.get_env(:webbkoll, :rate_limit_host)
@validate_urls Application.get_env(:webbkoll, :validate_urls)
plug(:check_for_bots when action in [:check])
plug(:scrub_params, "url" when action in [:check])
plug(:get_proper_url when action in [:check])
plug(:validate_url when action in [:check] and @validate_urls)
plug(:check_if_site_exists when action in [:check])
plug(:check_rate_ip when action in [:check])
plug(:check_rate_url_host when action in [:check])
def index(conn, _params) do
render(
conn,
"index.html",
locale: conn.assigns.locale,
page_title: gettext("Analyze"),
page_description: gettext(
"This tool helps you check what data-protecting measures a site has taken to help you exercise control over your privacy."
)
)
end
def indexi18n(conn, params), do: index(conn, params)
def about(conn, _params) do
render(
conn,
"about.html",
locale: conn.assigns.locale,
page_title: gettext("About"),
page_description: gettext(
"The what and why of data protection and the principles of the EU general data protection regulation."
)
)
end
def tech(conn, _params) do
render(
conn,
"tech.html",
locale: conn.assigns.locale,
page_title: gettext("Tech"),
page_description: gettext(
"How our web privacy check tool works and how you can run your own instance."
)
)
end
def check(%Plug.Conn{assigns: %{input_url: proper_url}} = conn, _params) do
site = %Site{
input_url: proper_url,
try_count: 0,
status: "queue",
inserted_at: System.system_time(:microsecond)
}
id = UUID.uuid4()
ConCache.put(:site_cache, id, site)
{queue, settings} = Enum.random(@backends)
Jumbo.Queue.enqueue(queue, Webbkoll.Worker, [
id,
proper_url,
conn.params["refresh"],
settings.url
])
redirect(conn, to: site_path(conn, :status, conn.assigns.locale, id: id))
end
def status(conn, %{"id" => id}) do
case UUID.info(id) do
{:error, _} -> handle_status(nil, id, conn)
{:ok, _} -> ConCache.get(:site_cache, id) |> handle_status(id, conn)
end
end
defp handle_status(nil, _id, conn) do
redirect(conn, to: site_path(conn, :indexi18n, conn.assigns.locale))
end
defp handle_status(site, id, conn) do
case site.status do
"queue" ->
render(conn, "status.html", id: id, site: site, page_title: gettext("Status"))
"processing" ->
render(conn, "status.html", id: id, site: site, page_title: gettext("Status"))
"failed" ->
redirect(conn, to: site_path(conn, :results, conn.assigns.locale, url: site.input_url))
"done" ->
redirect(conn, to: site_path(conn, :results, conn.assigns.locale, url: site.final_url))
end
end
def results(conn, %{"url" => url}) do
url
|> get_latest_from_cache()
|> handle_results(conn, url)
end
defp handle_results(nil, conn, url) do
redirect(conn, to: site_path(conn, :check, conn.assigns.locale, url: url))
end
defp handle_results({id, site}, conn, _url) do
case site.status do
"queue" ->
redirect(conn, to: site_path(conn, :status, conn.assigns.locale, id: id, site: site))
"processing" ->
redirect(conn, to: site_path(conn, :status, conn.assigns.locale, id: id, site: site))
"failed" ->
render(conn, :failed, site: site, page_title: gettext("Processing failed"))
"done" ->
render(
conn,
:results,
site: site,
page_title: gettext("Results for %{url}", url: truncate(site.final_url, 50)),
page_description: gettext("How this website is doing with regards to privacy.")
)
end
end
defp get_latest_from_cache(url) do
input = :ets.match_object(ConCache.ets(:site_cache), {:_, %{input_url: url}})
final = :ets.match_object(ConCache.ets(:site_cache), {:_, %{final_url: url}})
(input ++ final)
|> Enum.filter(&is_tuple/1)
|> Enum.sort(fn x, y ->
elem(x, 1) |> Map.get(:inserted_at) > elem(y, 1) |> Map.get(:inserted_at)
end)
|> List.first()
end
# Plugs
defp check_for_bots(conn, _params) do
conn
|> get_req_header("user-agent")
|> check_user_agent(conn)
end
defp check_user_agent([user_agent], conn) do
bot_pattern = ~r/bot|crawl|slurp|spider/i
case Regex.match?(bot_pattern, user_agent) do
true -> render_error(conn, gettext("Sorry, bots aren't allowed."))
false -> conn
end
end
defp check_user_agent([], conn), do: conn
defp get_proper_url(url = %URI{}) do
path = url.path || "/"
case @validate_urls do
true -> "http://#{String.downcase(url.host)}#{path}"
false -> "http://#{String.downcase(url.authority)}#{path}"
end
end
defp get_proper_url(conn, _params) do
url =
case String.starts_with?(conn.params["url"], ["http://", "https://"]) do
true -> conn.params["url"] |> URI.parse() |> get_proper_url
false -> "http://#{conn.params["url"]}" |> URI.parse() |> get_proper_url
end
assign(conn, :input_url, url)
end
defp validate_url(conn, _params) do
conn.assigns.input_url
|> URI.parse()
|> Map.get(:host)
|> PublicSuffix.matches_explicit_rule?()
|> case do
true ->
conn
false ->
render_error(
conn,
gettext("Invalid domain: %{domain}", domain: conn.assigns.input_url)
)
end
end
defp check_if_site_exists(%Plug.Conn{assigns: %{input_url: proper_url}} = conn, _params) do
case conn.params["refresh"] do
"on" -> conn
_ -> check_site_in_cache(conn, proper_url)
end
end
defp check_site_in_cache(conn, proper_url) do
proper_url
|> get_latest_from_cache()
|> handle_check_site_in_cache(conn)
end
defp handle_check_site_in_cache({id, _site}, conn) do
conn
|> redirect(to: site_path(conn, :status, conn.assigns.locale, id: id))
|> halt
end
defp handle_check_site_in_cache(_, conn), do: conn
defp check_rate_ip(conn, _params) do
conn.remote_ip
|> Tuple.to_list()
|> Enum.join(".")
|> ExRated.check_rate(@rate_limit_client["scale"], @rate_limit_client["limit"])
|> case do
{:ok, _} ->
conn
{:error, _} ->
render_error(conn, gettext("You're requesting too frequently. Install locally?"))
end
end
defp check_rate_url_host(conn, _params) do
conn.assigns.input_url
|> URI.parse()
|> Map.get(:host)
|> ExRated.check_rate(@rate_limit_host["scale"], @rate_limit_host["limit"])
|> case do
{:ok, _} ->
conn
{:error, _} ->
render_error(conn, gettext("Trying same host too frequently. Try again in a minute."))
end
end
defp render_error(conn, error_message) do
conn
|> put_status(400)
|> render(:error, error_message: error_message, page_title: gettext("Error"))
|> halt
end
end
| 27.984733 | 130 | 0.62575 |
9e2fc656d62f43bd124e8a78ee5760ef4120fd33 | 1,147 | exs | Elixir | mix.exs | schutm/enum_type | e97d937697886adb6046b5bd3502b6212f57bf9f | [
"Apache-2.0"
] | 10 | 2019-11-25T13:28:54.000Z | 2022-02-01T20:17:13.000Z | mix.exs | schutm/enum_type | e97d937697886adb6046b5bd3502b6212f57bf9f | [
"Apache-2.0"
] | 11 | 2019-03-08T01:44:39.000Z | 2022-02-17T16:29:10.000Z | mix.exs | schutm/enum_type | e97d937697886adb6046b5bd3502b6212f57bf9f | [
"Apache-2.0"
] | 9 | 2019-03-22T17:08:01.000Z | 2021-12-08T16:25:07.000Z | defmodule EnumType.MixProject do
use Mix.Project
@version "1.1.3"
def project do
[
app: :enum_type,
version: @version,
elixir: "~> 1.5",
start_permanent: Mix.env() == :prod,
deps: deps(),
description:
"An Elixir friendly Enum module generator that can be used by itself or with Ecto.",
name: "EnumType",
package: %{
licenses: ["Apache 2.0"],
maintainers: ["Joseph Lindley"],
links: %{"GitHub" => "https://github.com/onboardingsystems/enum_type"},
files: ~w(mix.exs README.md lib)
},
docs: [
source_ref: "v#{@version}",
main: "readme",
canonical: "http://hexdocs.pm/enum_type",
source_url: "https://github.com/onboardingsystems/enum_type",
extras: ["README.md"]
]
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:ex_doc, "~> 0.0", only: [:docs, :dev]},
{:ecto, "~> 3.0", only: [:test]}
]
end
end
| 24.404255 | 92 | 0.562337 |
9e2fe5440fde97e1aefcc8db8c9c267f3b3d1fdb | 2,196 | ex | Elixir | clients/people/lib/google_api/people/v1/model/url.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/people/lib/google_api/people/v1/model/url.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/people/lib/google_api/people/v1/model/url.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.People.V1.Model.Url do
@moduledoc """
A person's associated URLs.
## Attributes
* `formattedType` (*type:* `String.t`, *default:* `nil`) - Output only. The type of the URL translated and formatted in the viewer's
account locale or the `Accept-Language` HTTP header locale.
* `metadata` (*type:* `GoogleApi.People.V1.Model.FieldMetadata.t`, *default:* `nil`) - Metadata about the URL.
* `type` (*type:* `String.t`, *default:* `nil`) - The type of the URL. The type can be custom or one of these predefined
values:
* `home`
* `work`
* `blog`
* `profile`
* `homePage`
* `ftp`
* `reservations`
* `appInstallPage`: website for a Google+ application.
* `other`
* `value` (*type:* `String.t`, *default:* `nil`) - The URL.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:formattedType => String.t(),
:metadata => GoogleApi.People.V1.Model.FieldMetadata.t(),
:type => String.t(),
:value => String.t()
}
field(:formattedType)
field(:metadata, as: GoogleApi.People.V1.Model.FieldMetadata)
field(:type)
field(:value)
end
defimpl Poison.Decoder, for: GoogleApi.People.V1.Model.Url do
def decode(value, options) do
GoogleApi.People.V1.Model.Url.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.People.V1.Model.Url do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.294118 | 136 | 0.673497 |
9e2fe767c0e78db0e19d315f0240606f15294afb | 1,712 | exs | Elixir | integration_test/sql/sandbox.exs | victorsolis/ecto | 6c0dbf1ee2afd9b5bdf1f3feee8d361c8197c99a | [
"Apache-2.0"
] | null | null | null | integration_test/sql/sandbox.exs | victorsolis/ecto | 6c0dbf1ee2afd9b5bdf1f3feee8d361c8197c99a | [
"Apache-2.0"
] | null | null | null | integration_test/sql/sandbox.exs | victorsolis/ecto | 6c0dbf1ee2afd9b5bdf1f3feee8d361c8197c99a | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Integration.SandboxTest do
use ExUnit.Case
alias Ecto.Adapters.SQL.Sandbox
alias Ecto.Integration.TestRepo
alias Ecto.Integration.Post
test "can use the repository when checked out" do
assert_raise RuntimeError, ~r"cannot find ownership process", fn ->
TestRepo.all(Post)
end
Sandbox.checkout(TestRepo)
assert TestRepo.all(Post) == []
Sandbox.checkin(TestRepo)
assert_raise RuntimeError, ~r"cannot find ownership process", fn ->
TestRepo.all(Post)
end
end
test "can use the repository when allowed from another process" do
assert_raise RuntimeError, ~r"cannot find ownership process", fn ->
TestRepo.all(Post)
end
parent = self()
Task.start_link fn ->
Sandbox.checkout(TestRepo)
Sandbox.allow(TestRepo, self(), parent)
send parent, :allowed
:timer.sleep(:infinity)
end
assert_receive :allowed
assert TestRepo.all(Post) == []
end
test "runs inside a sandbox that is rolled back on checkin" do
Sandbox.checkout(TestRepo)
assert TestRepo.insert(%Post{})
assert TestRepo.all(Post) != []
Sandbox.checkin(TestRepo)
Sandbox.checkout(TestRepo)
assert TestRepo.all(Post) == []
Sandbox.checkin(TestRepo)
end
test "runs inside a sandbox that may be disabled" do
Sandbox.checkout(TestRepo, sandbox: false)
assert TestRepo.insert(%Post{})
assert TestRepo.all(Post) != []
Sandbox.checkin(TestRepo)
Sandbox.checkout(TestRepo)
assert {1, _} = TestRepo.delete_all(Post)
Sandbox.checkin(TestRepo)
Sandbox.checkout(TestRepo, sandbox: false)
assert {1, _} = TestRepo.delete_all(Post)
Sandbox.checkin(TestRepo)
end
end
| 27.612903 | 71 | 0.692173 |
9e302f2cf22461317b68beeb7d4a90b930afc9e6 | 1,148 | exs | Elixir | mix.exs | thekirinlab/bureaucrat | 5db4f283b111b08bfc09d6a14d733eca1d3fb8f5 | [
"Unlicense"
] | null | null | null | mix.exs | thekirinlab/bureaucrat | 5db4f283b111b08bfc09d6a14d733eca1d3fb8f5 | [
"Unlicense"
] | null | null | null | mix.exs | thekirinlab/bureaucrat | 5db4f283b111b08bfc09d6a14d733eca1d3fb8f5 | [
"Unlicense"
] | null | null | null | defmodule Bureaucrat.Mixfile do
use Mix.Project
def project do
[
app: :bureaucrat,
version: "0.2.9",
elixir: "~> 1.10",
description: "Generate Phoenix API documentation from tests",
deps: deps(),
package: package()
]
end
# Configuration for the OTP application
#
# Type `mix help compile.app` for more information
def application do
[
registered: [Bureaucrat.Recorder],
mod: {Bureaucrat, []},
env: [
writer: Bureaucrat.MarkdownWriter,
default_path: "API.md",
paths: [],
titles: [],
env_var: "DOC"
]
]
end
defp deps do
[
{:plug, ">= 1.0.0"},
{:poison, "~> 1.5 or ~> 2.0 or ~> 3.0 or ~> 4.0", optional: true},
{:phoenix, ">= 1.2.0", optional: true},
{:ex_doc, "~> 0.19", only: :dev},
{:inflex, ">= 1.10.0"}
]
end
defp package do
[
files: ~w(lib mix.exs README.md UNLICENSE),
maintainers: ["Roman Kuznietsov", "Opak Alex", "Arno Dirlam"],
licenses: ["Unlicense"],
links: %{"GitHub" => "https://github.com/api-hogs/bureaucrat"}
]
end
end
| 22.509804 | 72 | 0.538328 |
9e304b4966ea42b7928d56d556ebdfaa2b243b60 | 554 | ex | Elixir | lib/hl7/2.2/segments/nk1.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.2/segments/nk1.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.2/segments/nk1.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | defmodule HL7.V2_2.Segments.NK1 do
@moduledoc false
require Logger
alias HL7.V2_2.{DataTypes}
use HL7.Segment,
fields: [
segment: nil,
set_id_next_of_kin: nil,
name: DataTypes.Pn,
relationship: DataTypes.Ce,
address: DataTypes.Ad,
phone_number: nil,
business_phone_number: nil,
contact_role: DataTypes.Ce,
start_date: nil,
end_date: nil,
next_of_kin: nil,
next_of_kin_job_code_class: nil,
next_of_kin_employee_number: nil,
organization_name: nil
]
end
| 22.16 | 39 | 0.658845 |
9e308582912a9d45334a1517e97ada748a07ca00 | 6,262 | exs | Elixir | test/groupher_server_web/mutation/cms/flags/guide_flag_test.exs | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 240 | 2018-11-06T09:36:54.000Z | 2022-02-20T07:12:36.000Z | test/groupher_server_web/mutation/cms/flags/guide_flag_test.exs | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 363 | 2018-07-11T03:38:14.000Z | 2021-12-14T01:42:40.000Z | test/groupher_server_web/mutation/cms/flags/guide_flag_test.exs | mydearxym/mastani_server | f24034a4a5449200165cf4a547964a0961793eab | [
"Apache-2.0"
] | 22 | 2019-01-27T11:47:56.000Z | 2021-02-28T13:17:52.000Z | defmodule GroupherServer.Test.Mutation.Flags.GuideFlag do
use GroupherServer.TestTools
alias GroupherServer.CMS
alias CMS.Model.Community
alias Helper.ORM
setup do
{:ok, user} = db_insert(:user)
{:ok, community} = db_insert(:community)
{:ok, guide} = CMS.create_article(community, :guide, mock_attrs(:guide), user)
guest_conn = simu_conn(:guest)
user_conn = simu_conn(:user)
owner_conn = simu_conn(:user, user)
{:ok, ~m(user_conn guest_conn owner_conn community user guide)a}
end
describe "[mutation guide flag curd]" do
@query """
mutation($id: ID!){
markDeleteGuide(id: $id) {
id
markDelete
}
}
"""
test "auth user can markDelete guide", ~m(guide)a do
variables = %{id: guide.id}
passport_rules = %{"guide.mark_delete" => true}
rule_conn = simu_conn(:user, cms: passport_rules)
updated = rule_conn |> mutation_result(@query, variables, "markDeleteGuide")
assert updated["id"] == to_string(guide.id)
assert updated["markDelete"] == true
end
test "mark delete guide should update guide's communities meta count", ~m(user)a do
community_attrs = mock_attrs(:community) |> Map.merge(%{user_id: user.id})
{:ok, community} = CMS.create_community(community_attrs)
{:ok, guide} = CMS.create_article(community, :guide, mock_attrs(:guide), user)
{:ok, community} = ORM.find(Community, community.id)
assert community.meta.guides_count == 1
variables = %{id: guide.id}
passport_rules = %{"guide.mark_delete" => true}
rule_conn = simu_conn(:user, cms: passport_rules)
rule_conn |> mutation_result(@query, variables, "markDeleteGuide")
{:ok, community} = ORM.find(Community, community.id)
assert community.meta.guides_count == 0
end
test "unauth user markDelete guide fails", ~m(user_conn guest_conn guide)a do
variables = %{id: guide.id}
rule_conn = simu_conn(:user, cms: %{"what.ever" => true})
assert user_conn |> mutation_get_error?(@query, variables, ecode(:passport))
assert guest_conn |> mutation_get_error?(@query, variables, ecode(:account_login))
assert rule_conn |> mutation_get_error?(@query, variables, ecode(:passport))
end
@query """
mutation($id: ID!){
undoMarkDeleteGuide(id: $id) {
id
markDelete
}
}
"""
test "auth user can undo markDelete guide", ~m(guide)a do
variables = %{id: guide.id}
{:ok, _} = CMS.mark_delete_article(:guide, guide.id)
passport_rules = %{"guide.undo_mark_delete" => true}
rule_conn = simu_conn(:user, cms: passport_rules)
updated = rule_conn |> mutation_result(@query, variables, "undoMarkDeleteGuide")
assert updated["id"] == to_string(guide.id)
assert updated["markDelete"] == false
end
test "undo mark delete guide should update guide's communities meta count", ~m(user)a do
community_attrs = mock_attrs(:community) |> Map.merge(%{user_id: user.id})
{:ok, community} = CMS.create_community(community_attrs)
{:ok, guide} = CMS.create_article(community, :guide, mock_attrs(:guide), user)
{:ok, _} = CMS.mark_delete_article(:guide, guide.id)
{:ok, community} = ORM.find(Community, community.id)
assert community.meta.guides_count == 0
variables = %{id: guide.id}
passport_rules = %{"guide.undo_mark_delete" => true}
rule_conn = simu_conn(:user, cms: passport_rules)
rule_conn |> mutation_result(@query, variables, "undoMarkDeleteGuide")
{:ok, community} = ORM.find(Community, community.id)
assert community.meta.guides_count == 1
end
test "unauth user undo markDelete guide fails", ~m(user_conn guest_conn guide)a do
variables = %{id: guide.id}
rule_conn = simu_conn(:user, cms: %{"what.ever" => true})
assert user_conn |> mutation_get_error?(@query, variables, ecode(:passport))
assert guest_conn |> mutation_get_error?(@query, variables, ecode(:account_login))
assert rule_conn |> mutation_get_error?(@query, variables, ecode(:passport))
end
@query """
mutation($id: ID!, $communityId: ID!){
pinGuide(id: $id, communityId: $communityId) {
id
}
}
"""
test "auth user can pin guide", ~m(community guide)a do
variables = %{id: guide.id, communityId: community.id}
passport_rules = %{community.raw => %{"guide.pin" => true}}
rule_conn = simu_conn(:user, cms: passport_rules)
updated = rule_conn |> mutation_result(@query, variables, "pinGuide")
assert updated["id"] == to_string(guide.id)
end
test "unauth user pin guide fails", ~m(user_conn guest_conn community guide)a do
variables = %{id: guide.id, communityId: community.id}
rule_conn = simu_conn(:user, cms: %{"what.ever" => true})
assert user_conn |> mutation_get_error?(@query, variables, ecode(:passport))
assert guest_conn |> mutation_get_error?(@query, variables, ecode(:account_login))
assert rule_conn |> mutation_get_error?(@query, variables, ecode(:passport))
end
@query """
mutation($id: ID!, $communityId: ID!){
undoPinGuide(id: $id, communityId: $communityId) {
id
isPinned
}
}
"""
test "auth user can undo pin guide", ~m(community guide)a do
variables = %{id: guide.id, communityId: community.id}
passport_rules = %{community.raw => %{"guide.undo_pin" => true}}
rule_conn = simu_conn(:user, cms: passport_rules)
CMS.pin_article(:guide, guide.id, community.id)
updated = rule_conn |> mutation_result(@query, variables, "undoPinGuide")
assert updated["id"] == to_string(guide.id)
end
test "unauth user undo pin guide fails", ~m(user_conn guest_conn community guide)a do
variables = %{id: guide.id, communityId: community.id}
rule_conn = simu_conn(:user, cms: %{"what.ever" => true})
assert user_conn |> mutation_get_error?(@query, variables, ecode(:passport))
assert guest_conn |> mutation_get_error?(@query, variables, ecode(:account_login))
assert rule_conn |> mutation_get_error?(@query, variables, ecode(:passport))
end
end
end
| 34.98324 | 92 | 0.654104 |
9e30988383e4a8f5016b77b29d83a0bfb9912385 | 56 | ex | Elixir | lib/tnd/compendiums/skill.ex | tndrpg/tnd | a9a348ed7ce2f3d8f55046559f9551e2607f3236 | [
"0BSD"
] | null | null | null | lib/tnd/compendiums/skill.ex | tndrpg/tnd | a9a348ed7ce2f3d8f55046559f9551e2607f3236 | [
"0BSD"
] | 1 | 2021-05-11T14:31:58.000Z | 2021-05-11T14:31:58.000Z | lib/tnd/compendiums/skill.ex | tndrpg/tnd | a9a348ed7ce2f3d8f55046559f9551e2607f3236 | [
"0BSD"
] | null | null | null | defmodule Tnd.Compendiums.Skill do
use Tnd.Schema
end
| 14 | 34 | 0.803571 |
9e30a310953fc04c4efabfa416c0f967852e2050 | 656 | ex | Elixir | web/service_router.ex | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 1,003 | 2016-02-23T17:21:12.000Z | 2022-02-20T14:39:35.000Z | web/service_router.ex | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 906 | 2016-02-22T22:54:19.000Z | 2022-03-11T15:19:43.000Z | web/service_router.ex | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 95 | 2016-02-23T13:42:31.000Z | 2021-11-30T14:39:55.000Z | defmodule Cog.ServiceRouter do
use Cog.Web, :router
pipeline :service do
plug Cog.Plug.Event
plug :accepts, ["json"]
end
scope "/v1/services", Cog do
pipe_through :service
get "/meta", V1.ServiceController, :index
get "/meta/deployed/:name", V1.ServiceController, :show
get "/memory/1.0.0/:key", V1.MemoryServiceController, :show
delete "/memory/1.0.0/:key", V1.MemoryServiceController, :delete
put "/memory/1.0.0/:key", V1.MemoryServiceController, :update
post "/memory/1.0.0/:key", V1.MemoryServiceController, :change
post "/chat/1.0.0/send_message", V1.ChatServiceController, :send_message
end
end
| 28.521739 | 76 | 0.693598 |
9e30a7ebc624b4197dd86ed7cad73bbbb1177482 | 512 | exs | Elixir | config/test.exs | DEvil0000/strichliste_elixir | 33efe808ced1dd3e3650212a506e8c3322277b2b | [
"MIT"
] | null | null | null | config/test.exs | DEvil0000/strichliste_elixir | 33efe808ced1dd3e3650212a506e8c3322277b2b | [
"MIT"
] | null | null | null | config/test.exs | DEvil0000/strichliste_elixir | 33efe808ced1dd3e3650212a506e8c3322277b2b | [
"MIT"
] | 1 | 2019-05-24T18:18:24.000Z | 2019-05-24T18:18:24.000Z | use Mix.Config
# Configure your database
config :strichliste_elixir, StrichlisteElixir.Repo,
username: "postgres",
password: "postgres",
database: "strichliste_elixir_test",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :strichliste_elixir, StrichlisteElixirWeb.Endpoint,
http: [port: 4002],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
| 26.947368 | 58 | 0.759766 |
9e30bc6678cff96097fdaffffa9aedb07e65b1f4 | 1,271 | exs | Elixir | test/pay_nl/transaction_options_test.exs | smeevil/pay_nl | 8b62ed5c01405aba432e56e8c2b6c5774da1470a | [
"WTFPL"
] | 3 | 2017-10-03T12:30:57.000Z | 2020-01-06T00:23:59.000Z | test/pay_nl/transaction_options_test.exs | smeevil/pay_nl | 8b62ed5c01405aba432e56e8c2b6c5774da1470a | [
"WTFPL"
] | null | null | null | test/pay_nl/transaction_options_test.exs | smeevil/pay_nl | 8b62ed5c01405aba432e56e8c2b6c5774da1470a | [
"WTFPL"
] | 1 | 2019-02-11T11:12:17.000Z | 2019-02-11T11:12:17.000Z | defmodule PayNL.TransactionOptionsTest do
use ExUnit.Case, async: false
doctest PayNL
@valid_params [
remote_ip: "127.0.0.1",
amount_in_cents: 100,
return_url: "https://example.com/return",
notification_url: "https://example.com/ipn",
service_id: "123",
api_token: "abc"
]
test "creating with no options should return errors" do
assert {:error, _} = PayNL.TransactionOptions.create()
end
test "creating with minimal required options shoud return a config" do
assert {:ok, %PayNL.TransactionOptions{}} = PayNL.TransactionOptions.create(@valid_params)
end
test "should convert to post options" do
{:ok, options} = PayNL.TransactionOptions.create(@valid_params)
assert %{
"amount" => 100,
"finishUrl" => "https://example.com/return",
"ipAddress" => "127.0.0.1",
"serviceId" => "123",
"testMode" => "0",
"token" => "abc",
"transaction[currency]" => "EUR",
"enduser[language]" => "EN",
"transaction[orderExchangeUrl]" => "https://example.com/ipn",
"transaction[sendReminderEmail]" => "0"
}
== PayNL.TransactionOptions.to_post_map(options)
end
end
| 32.589744 | 94 | 0.598741 |
9e30d0180d62bc0da91139e624eb091af085a0a4 | 1,599 | exs | Elixir | test/harald/host/att/error_rsp_test.exs | RicardoTrindade/harald | 3f56003265c29af0780730eb538183b50e55df2f | [
"MIT"
] | 59 | 2019-02-16T00:09:58.000Z | 2020-03-29T23:37:36.000Z | test/harald/host/att/error_rsp_test.exs | RicardoTrindade/harald | 3f56003265c29af0780730eb538183b50e55df2f | [
"MIT"
] | 19 | 2019-02-15T22:41:43.000Z | 2020-02-15T19:20:57.000Z | test/harald/host/att/error_rsp_test.exs | RicardoTrindade/harald | 3f56003265c29af0780730eb538183b50e55df2f | [
"MIT"
] | 9 | 2020-05-07T00:02:36.000Z | 2021-09-17T18:17:46.000Z | defmodule Harald.Host.ATT.ErrorRspTest do
use ExUnit.Case, async: true
alias Harald.Host.ATT.{WriteReq, ErrorRsp, ErrorCodes}
test "encode/1" do
request_module_in_error = WriteReq
attribute_handle_in_error = 1
error_message = "Write Not Permitted"
parameters = %{
request_module_in_error: request_module_in_error,
attribute_handle_in_error: attribute_handle_in_error,
error_message: error_message
}
module_opcode = request_module_in_error.opcode()
{:ok, error_code} = ErrorCodes.encode(error_message)
expected_bin =
<<module_opcode::little-size(8), attribute_handle_in_error::little-size(16),
error_code::little-size(8)>>
expected_size = byte_size(expected_bin)
assert {:ok, actual_bin} = ErrorRsp.encode(parameters)
assert expected_bin == actual_bin
assert expected_size == byte_size(actual_bin)
end
test "decode/1" do
request_module_in_error = WriteReq
attribute_handle_in_error = 1
error_message = "Write Not Permitted"
expected_parameters = %{
request_module_in_error: request_module_in_error,
attribute_handle_in_error: attribute_handle_in_error,
error_message: error_message
}
module_opcode = request_module_in_error.opcode()
{:ok, error_code} = ErrorCodes.encode(error_message)
bin =
<<module_opcode::little-size(8), attribute_handle_in_error::little-size(16),
error_code::little-size(8)>>
assert {:ok, expected_parameters} == ErrorRsp.decode(bin)
end
test "opcode/0" do
assert 0x01 == ErrorRsp.opcode()
end
end
| 29.072727 | 82 | 0.722326 |
9e30d4c8168f2120941c8ab101d957ad81a52e01 | 61 | ex | Elixir | lib/rumbl/repo.ex | phensalves/rumbl | 4ecf962038774fa81c83f07b81f36266f86effaf | [
"MIT"
] | 1 | 2016-09-19T01:31:35.000Z | 2016-09-19T01:31:35.000Z | lib/rumbl/repo.ex | phensalves/rumbl | 4ecf962038774fa81c83f07b81f36266f86effaf | [
"MIT"
] | null | null | null | lib/rumbl/repo.ex | phensalves/rumbl | 4ecf962038774fa81c83f07b81f36266f86effaf | [
"MIT"
] | null | null | null | defmodule Rumbl.Repo do
use Ecto.Repo, otp_app: :rumbl
end
| 15.25 | 32 | 0.754098 |
9e30db77137037eb9baf2451d1ccf30e62e5a4f7 | 720 | ex | Elixir | lib/squarestore_web/gettext.ex | NinjaAnge/forksquare | ee9ea91e45e50b9f1ba4a8261ebdd99b7fe3333d | [
"MIT"
] | null | null | null | lib/squarestore_web/gettext.ex | NinjaAnge/forksquare | ee9ea91e45e50b9f1ba4a8261ebdd99b7fe3333d | [
"MIT"
] | null | null | null | lib/squarestore_web/gettext.ex | NinjaAnge/forksquare | ee9ea91e45e50b9f1ba4a8261ebdd99b7fe3333d | [
"MIT"
] | null | null | null | defmodule SquarestoreWeb.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import SquarestoreWeb.Gettext
# Simple translation
gettext "Here is the string to translate"
# Plural translation
ngettext "Here is the string to translate",
"Here are the strings to translate",
3
# Domain-based translation
dgettext "errors", "Here is the error message to translate"
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :squarestore
end
| 28.8 | 72 | 0.686111 |
9e30dcfb900e70378ad097eec8fd0ce3db0abe07 | 79 | exs | Elixir | config/config.exs | wayann/raxx | 4a192c9b5a9a483e93e2194f5039b9fa9ff92cc8 | [
"Apache-2.0"
] | null | null | null | config/config.exs | wayann/raxx | 4a192c9b5a9a483e93e2194f5039b9fa9ff92cc8 | [
"Apache-2.0"
] | null | null | null | config/config.exs | wayann/raxx | 4a192c9b5a9a483e93e2194f5039b9fa9ff92cc8 | [
"Apache-2.0"
] | 1 | 2021-01-12T09:57:58.000Z | 2021-01-12T09:57:58.000Z | use Mix.Config
config :raxx, :extra_statuses, [{422, "Unprocessable Entity"}]
| 19.75 | 62 | 0.734177 |
9e30dd0c46dc80821cf3b1bc8e274850d63f9b5e | 373 | exs | Elixir | test/async_server_test.exs | fhunleth/elixirport | 754634e3115cef04cd6a9dad8557cabedb2e0530 | [
"MIT"
] | null | null | null | test/async_server_test.exs | fhunleth/elixirport | 754634e3115cef04cd6a9dad8557cabedb2e0530 | [
"MIT"
] | null | null | null | test/async_server_test.exs | fhunleth/elixirport | 754634e3115cef04cd6a9dad8557cabedb2e0530 | [
"MIT"
] | null | null | null | defmodule AsyncServerTest do
use ExUnit.Case
setup do
{:ok, pid} = AsyncServer.start_link(:anything)
on_exit(fn() -> AsyncServer.stop(pid) end)
{:ok, server: pid}
end
test "ping/pong test", %{server: pid} do
assert :pong == AsyncServer.ping(pid)
end
test "add numbers", %{server: pid} do
assert 5 == AsyncServer.add(pid, 4, 1)
end
end
| 20.722222 | 50 | 0.640751 |
9e3102c0cb558869c58ed65e3d0a29e186192211 | 2,981 | ex | Elixir | clients/dataproc/lib/google_api/dataproc/v1/model/virtual_cluster_config.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/dataproc/lib/google_api/dataproc/v1/model/virtual_cluster_config.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/dataproc/lib/google_api/dataproc/v1/model/virtual_cluster_config.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dataproc.V1.Model.VirtualClusterConfig do
@moduledoc """
The Dataproc cluster config for a cluster that does not directly control the underlying compute resources, such as a Dataproc-on-GKE cluster (https://cloud.google.com/dataproc/docs/guides/dpgke/dataproc-gke).
## Attributes
* `auxiliaryServicesConfig` (*type:* `GoogleApi.Dataproc.V1.Model.AuxiliaryServicesConfig.t`, *default:* `nil`) - Optional. Configuration of auxiliary services used by this cluster.
* `kubernetesClusterConfig` (*type:* `GoogleApi.Dataproc.V1.Model.KubernetesClusterConfig.t`, *default:* `nil`) - Required. The configuration for running the Dataproc cluster on Kubernetes.
* `stagingBucket` (*type:* `String.t`, *default:* `nil`) - Optional. A Cloud Storage bucket used to stage job dependencies, config files, and job driver console output. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's staging bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket (see Dataproc staging and temp buckets (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/staging-bucket)). This field requires a Cloud Storage bucket name, not a gs://... URI to a Cloud Storage bucket.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:auxiliaryServicesConfig =>
GoogleApi.Dataproc.V1.Model.AuxiliaryServicesConfig.t() | nil,
:kubernetesClusterConfig =>
GoogleApi.Dataproc.V1.Model.KubernetesClusterConfig.t() | nil,
:stagingBucket => String.t() | nil
}
field(:auxiliaryServicesConfig, as: GoogleApi.Dataproc.V1.Model.AuxiliaryServicesConfig)
field(:kubernetesClusterConfig, as: GoogleApi.Dataproc.V1.Model.KubernetesClusterConfig)
field(:stagingBucket)
end
defimpl Poison.Decoder, for: GoogleApi.Dataproc.V1.Model.VirtualClusterConfig do
def decode(value, options) do
GoogleApi.Dataproc.V1.Model.VirtualClusterConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Dataproc.V1.Model.VirtualClusterConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 54.2 | 677 | 0.758806 |
9e31131f2f922fa138e52bbe65e96a3724ba256b | 2,735 | ex | Elixir | clients/sheets/lib/google_api/sheets/v4/model/batch_update_values_by_data_filter_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/sheets/lib/google_api/sheets/v4/model/batch_update_values_by_data_filter_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/sheets/lib/google_api/sheets/v4/model/batch_update_values_by_data_filter_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Sheets.V4.Model.BatchUpdateValuesByDataFilterResponse do
@moduledoc """
The response when updating a range of values in a spreadsheet.
## Attributes
* `responses` (*type:* `list(GoogleApi.Sheets.V4.Model.UpdateValuesByDataFilterResponse.t)`, *default:* `nil`) - The response for each range updated.
* `spreadsheetId` (*type:* `String.t`, *default:* `nil`) - The spreadsheet the updates were applied to.
* `totalUpdatedCells` (*type:* `integer()`, *default:* `nil`) - The total number of cells updated.
* `totalUpdatedColumns` (*type:* `integer()`, *default:* `nil`) - The total number of columns where at least one cell in the column was
updated.
* `totalUpdatedRows` (*type:* `integer()`, *default:* `nil`) - The total number of rows where at least one cell in the row was updated.
* `totalUpdatedSheets` (*type:* `integer()`, *default:* `nil`) - The total number of sheets where at least one cell in the sheet was
updated.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:responses => list(GoogleApi.Sheets.V4.Model.UpdateValuesByDataFilterResponse.t()),
:spreadsheetId => String.t(),
:totalUpdatedCells => integer(),
:totalUpdatedColumns => integer(),
:totalUpdatedRows => integer(),
:totalUpdatedSheets => integer()
}
field(:responses, as: GoogleApi.Sheets.V4.Model.UpdateValuesByDataFilterResponse, type: :list)
field(:spreadsheetId)
field(:totalUpdatedCells)
field(:totalUpdatedColumns)
field(:totalUpdatedRows)
field(:totalUpdatedSheets)
end
defimpl Poison.Decoder, for: GoogleApi.Sheets.V4.Model.BatchUpdateValuesByDataFilterResponse do
def decode(value, options) do
GoogleApi.Sheets.V4.Model.BatchUpdateValuesByDataFilterResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Sheets.V4.Model.BatchUpdateValuesByDataFilterResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42.734375 | 153 | 0.723949 |
9e3129ff88a4a92523428ca6b2bddd8670a18766 | 3,472 | ex | Elixir | lib/exnoops/fizzbot.ex | bfcarpio/exNoops-mirror | e9a08e8cfdc47da9ab3fef1cdd3bb1fd021b1cc5 | [
"MIT"
] | null | null | null | lib/exnoops/fizzbot.ex | bfcarpio/exNoops-mirror | e9a08e8cfdc47da9ab3fef1cdd3bb1fd021b1cc5 | [
"MIT"
] | null | null | null | lib/exnoops/fizzbot.ex | bfcarpio/exNoops-mirror | e9a08e8cfdc47da9ab3fef1cdd3bb1fd021b1cc5 | [
"MIT"
] | null | null | null | defmodule Exnoops.Fizzbot do
@moduledoc """
Module to interact with Github's Noop: Fizzbot
See the [official `noop` documentation](https://noopschallenge.com/challenges/fizzbot) for API information
"""
require Logger
import Exnoops.API
@noop "fizzbot"
@doc ~S"""
Query Fizzbot for a question
If you don't provide a question number, it will query the default endpoint which returns the instructions.
**Note**: Due to the many possible keys in each response, minimal processing will occur.
## Examples
iex> Exnoops.Fizzbot.ask_question()
{:ok,
%{
"message" => "Thank you for your application to Noops Inc.\n\nOur automated fizzbot interview process will help us determine if you have what it takes to become a Noop.\n\nFor each question, you will GET the question and then give us the answer back to the same URL.\nYou will also find the URL for the next question in the nextQuestion parameter for each response.\n\nThe first question is at https://api.noopschallenge.com/fizzbot/questions/1.\n\nGood Luck\n",
"nextQuestion" => "/fizzbot/questions/1"
}
}
iex> Exnoops.Fizzbot.ask_question(1)
{:ok,
%{
"message" => "What is your favorite programming language?\nMine is COBOL, of course.\nPOST your answer back to this URL in JSON format. If you are having difficulties, see the exampleResponse provided.",
"exampleResponse" => %{ "answer" => "COBOL" }
}
}
iex> Exnoops.Fizzbot.ask_question(1234567)
{:ok,
%{
"message" => "FizzBuzz is the name of the game.\nHere's a list of numbers.\nSend me back a string as follows:\nFor each number:\nIf it is divisible by 3, print \"Fizz\".\nIf it is divisible by 5, print \"Buzz\".\nIf it is divisible by 3 and 5, print \"FizzBuzz\".\nOtherwise, print the number.\n\nEach entry in the string should be separated by a space.\n\nFor example, if the numbers are [1, 2, 3, 4, 5], you would send back:\n\n{\n \"answer\": \"1 2 Fizz 4 Buzz\"\n}\n",
"rules" => [
%{ "number" => 3, "response" => "Fizz" },
%{ "number" => 5, "response" => "Buzz" }
],
"numbers" => [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 ],
"exampleResponse" => %{ "answer" => "1 2 Fizz 4 Buzz..." }
}
}
"""
@spec ask_question(integer()) :: {atom(), map()}
def ask_question(question_id \\ 0) when is_integer(question_id) do
Logger.debug("Calling Fizzbot.ask_question(#{question_id})")
endpoint = if question_id == 0, do: "", else: "/questions/#{question_id}"
case get("/" <> @noop <> endpoint, []) do
{:ok, _} = res -> res
error -> error
end
end
@doc ~S"""
Submit an answer to Fizzbot
## Examples
iex> Exnoops.Fizzbot.answer_question(1, %{"answer" => "COBOL"})
{:ok,
%{
"result" => "correct",
"message" => "Of course. How interesting. Are you ready for your first REAL question?",
"nextQuestion" => "/fizzbot/questions/1234567"
}
}
"""
@spec answer_question(integer(), map()) :: {atom(), map()}
def answer_question(question_id, %{"answer" => _} = answer) when is_integer(question_id) do
Logger.debug("Calling Fizzbot.answer_question(#{question_id})")
case post("/" <> @noop <> "/questions/#{question_id}", answer) do
{:ok, %{"result" => _}} = res -> res
error -> error
end
end
end
| 39.908046 | 483 | 0.618664 |
9e313ffacee6acffc40156c735b9eccd054459b0 | 6,416 | ex | Elixir | lib/vintage_net_mobile/ex_chat.ex | jfcloutier/vintage_net_mobile | 55f4ebfcd2d5bd9da7759ab112325c3a261a0d12 | [
"Apache-2.0"
] | 2 | 2020-02-11T15:15:25.000Z | 2020-02-11T15:15:27.000Z | lib/vintage_net_mobile/ex_chat.ex | jfcloutier/vintage_net_mobile | 55f4ebfcd2d5bd9da7759ab112325c3a261a0d12 | [
"Apache-2.0"
] | 8 | 2020-01-28T21:02:56.000Z | 2020-02-24T16:13:38.000Z | lib/vintage_net_mobile/ex_chat.ex | jfcloutier/vintage_net_mobile | 55f4ebfcd2d5bd9da7759ab112325c3a261a0d12 | [
"Apache-2.0"
] | 1 | 2020-02-08T20:46:27.000Z | 2020-02-08T20:46:27.000Z | defmodule VintageNetMobile.ExChat do
@moduledoc """
Send commands to your modem and get notifications
This module is used by the "monitor" modules for reporting modem and
connection status.
It can be handy to debug modems too. If you'd like to send commands and
receive notifications from the IEx prompt, here's what to do:
```elixir
require Logger
RingLogger.attach
tty_name = "ttyUSB2" # set to your AT command interface
VintageNetMobile.ExChat.register(tty_name, "+", fn m -> Logger.debug("Got: " <> inspect(m)) end)
VintageNetMobile.ExChat.send(tty_name, "AT+CSQ")
```
To reset the registrations, `VintageNet.deconfigure/2` and
`VintageNet.configure/3` your modem.
"""
alias VintageNetMobile.ExChat.Core
# This limits the restart rate for this GenServer on tty errors.
# Errors usually mean the interface is going away and vintage_net
# will clean things up soon. If nothing else, the UART won't be
# pegged by restarts and the logs won't be filled with errors.
@error_delay 1000
@typedoc """
The options for the ATCommand server are:
* `:speed` - the speed of the serial connection
* `:tty` - the tty name for sending AT commands
* `:uart` - use an alternative UART-provider (for testing)
* `:uart_opts` - additional options to pass to UART.open
"""
@type opt() ::
{:speed, non_neg_integer()}
| {:tty, String.t()}
| {:uart, module()}
| {:uart_opts, keyword()}
use GenServer
require Logger
@spec start_link([opt]) :: GenServer.on_start()
def start_link(opts) do
tty_name = Keyword.fetch!(opts, :tty)
GenServer.start_link(__MODULE__, opts, name: server_name(tty_name))
end
@doc """
Send a command to the modem
On success, this returns a list of the lines received back from the modem.
"""
@spec send(binary(), iodata(), Core.send_options()) :: {:ok, [binary()]} | {:error, any()}
def send(tty_name, command, options \\ []) do
# Make sure we wait long enough for the command to be processed by the modem
command_timeout = Keyword.get(options, :timeout, 10000) + 500
GenServer.call(server_name(tty_name), {:send, command, options}, command_timeout)
end
@doc """
Helper for sending commands to the modem as best effort
This function always succeeds. Failed commands log errors, but that's it. This
is useful for monitoring operations where intermittent failures should be logged,
but really aren't worth dealing with.
"""
@spec send_best_effort(binary(), iodata(), Core.send_options()) :: :ok
def send_best_effort(tty_name, command, options \\ []) do
case send(tty_name, command, options) do
{:ok, _response} ->
:ok
error ->
Logger.warn("Send #{inspect(command)} failed: #{inspect(error)}. Ignoring...")
:ok
end
end
@doc """
Register a callback function for reports
"""
@spec register(binary(), binary(), function()) :: :ok
def register(tty_name, type, callback) do
GenServer.call(server_name(tty_name), {:register, type, callback})
end
@impl GenServer
def init(opts) do
speed = Keyword.get(opts, :speed, 115_200)
tty_name = Keyword.fetch!(opts, :tty)
uart = Keyword.get(opts, :uart, Circuits.UART)
uart_opts = Keyword.get(opts, :uart_opts, [])
{:ok, uart_ref} = uart.start_link()
all_uart_opts =
[
speed: speed,
framing: {Circuits.UART.Framing.Line, separator: "\r\n"},
rx_framing_timeout: 500
] ++ uart_opts
{:ok,
%{uart: uart, uart_ref: uart_ref, tty_name: tty_name, core: Core.init(), timer_ref: nil},
{:continue, all_uart_opts}}
end
@impl GenServer
def handle_continue(uart_opts, state) do
case state.uart.open(state.uart_ref, state.tty_name, uart_opts) do
:ok ->
{:noreply, state}
{:error, error} ->
Logger.warn("vintage_net_mobile: can't open #{state.tty_name}: #{inspect(error)}")
Process.sleep(@error_delay)
{:stop, :tty_error, state}
end
end
@impl GenServer
def handle_call({:send, command, options}, from, state) do
{new_core_state, actions} = Core.send(state.core, command, from, options)
new_state =
%{state | core: new_core_state}
|> run_actions(actions)
{:noreply, new_state}
end
@impl GenServer
def handle_call({:register, type, callback}, _from, state) do
{new_core_state, actions} = Core.register(state.core, type, callback)
new_state =
%{state | core: new_core_state}
|> run_actions(actions)
{:reply, :ok, new_state}
end
@impl GenServer
def handle_info({:circuits_uart, tty_name, {:partial, fragment}}, state) do
Logger.warn("vintage_net_mobile: dropping junk from #{tty_name}: #{inspect(fragment)}")
{:noreply, state}
end
def handle_info({:circuits_uart, tty_name, {:error, error}}, state) do
Logger.warn("vintage_net_mobile: error from #{tty_name}: #{inspect(error)}")
Process.sleep(@error_delay)
{:stop, :tty_error, state}
end
def handle_info({:circuits_uart, _tty_name, message}, state) do
{new_core_state, actions} = Core.process(state.core, message)
new_state =
%{state | core: new_core_state}
|> run_actions(actions)
{:noreply, new_state}
end
def handle_info({:timeout, core_timer_ref}, state) do
{new_core_state, actions} = Core.timeout(state.core, core_timer_ref)
new_state =
%{state | core: new_core_state, timer_ref: nil}
|> run_actions(actions)
{:noreply, new_state}
end
defp run_actions(state, actions) do
Enum.reduce(actions, state, &run_action(&2, &1))
end
defp run_action(state, {:notify, what, who}) do
apply(who, [what])
state
end
defp run_action(state, {:reply, what, who}) do
GenServer.reply(who, what)
state
end
defp run_action(state, {:send, what}) do
:ok = state.uart.write(state.uart_ref, what)
state
end
defp run_action(state, {:start_timer, timeout, core_timer_ref}) do
timer_ref = Process.send_after(self(), {:timeout, core_timer_ref}, timeout)
%{state | timer_ref: timer_ref}
end
defp run_action(state, :stop_timer) do
_ = Process.cancel_timer(state.timer_ref)
%{state | timer_ref: nil}
end
defp server_name("/dev/" <> tty_name) do
server_name(tty_name)
end
defp server_name(tty_name) do
Module.concat([__MODULE__, tty_name])
end
end
| 29.296804 | 98 | 0.668797 |
9e314803880f4283d1352d6bda6a7bb1bc9d024e | 2,217 | ex | Elixir | src/server/lib/api/websocket.ex | pedromykul/csb-wonhu1 | 5fe81fbc99d2b9859fbdb3c87c23e788264c0b9c | [
"MIT"
] | null | null | null | src/server/lib/api/websocket.ex | pedromykul/csb-wonhu1 | 5fe81fbc99d2b9859fbdb3c87c23e788264c0b9c | [
"MIT"
] | null | null | null | src/server/lib/api/websocket.ex | pedromykul/csb-wonhu1 | 5fe81fbc99d2b9859fbdb3c87c23e788264c0b9c | [
"MIT"
] | null | null | null | defmodule Api.Websocket do
@behaviour :cowboy_websocket
require Logger
def init(request, _state) do
parts = String.split(request.path, "/")
game_uuid = Enum.at(parts, 2)
user_uuid = Enum.at(parts, 3)
{:cowboy_websocket, request, {game_uuid, user_uuid}}
end
def websocket_init({game_uuid, user_uuid}) do
{:ok, user} = User.Store.find_or_initialize(user_uuid)
{:ok, pid} = Game.Lobby.Supervisor.find_or_initialize(game_uuid)
if Game.Lobby.Server.joinable?(pid, user) do
new_state = Game.Lobby.Server.join(pid, user)
{:ok, encode!(new_state)}
else
{:reply, {:close, 1000, "Lobby is full"}, %Game{}}
end
end
def websocket_handle({:text, json}, state) do
{:ok, pid} = Game.Lobby.Supervisor.find_or_initialize(state.uuid)
state =
case Jason.decode!(json) do
%{"type" => "continue"} ->
Game.Lobby.Server.action(pid, :continue)
%{"type" => "toggleDice", "value" => index} ->
Game.Lobby.Server.action(pid, {:toggle, index})
%{"type" => "selectFavor", "value" => %{"favor" => favor, "tier" => tier}} ->
Game.Lobby.Server.action(pid, {:select, %{favor: favor, tier: tier}})
%{"type" => "changeSettings", "value" => settings} ->
Game.Lobby.Server.change_settings(pid, settings)
%{"type" => "toggleReady"} ->
Game.Lobby.Server.toggle_ready(pid)
%{"type" => "updateUser", "value" => attrs} ->
Game.Lobby.Server.update_user(pid, attrs)
_other ->
state
end
{:reply, {:text, encode!(state)}, state}
end
@spec websocket_info(any, any) :: {:reply, {:text, any}, any}
def websocket_info(%Game.Lobby{} = new_state, _state) do
{:reply, {:text, encode!(new_state)}, new_state}
end
def websocket_info(other, state) do
{:reply, {:text, other}, state}
end
def terminate(_reason, _req, state) do
{:ok, pid} = Game.Lobby.Supervisor.find_or_initialize(state.uuid)
Game.Lobby.Server.leave(pid)
end
defp encode!(state) do
state
|> Map.put(:turn, Game.Lobby.turn(state, self()))
|> Map.put(:user, Game.Lobby.get_user(state, self()))
|> Jason.encode!()
end
end
| 28.423077 | 85 | 0.609382 |
9e3151843721ad7ec721035525f989fef8151e23 | 1,392 | ex | Elixir | skippy/bat/lib/sensor.ex | mashbytes/baby_zoo | 4554890242a2493d17d9b1c1f4cc90d7ad1e637e | [
"MIT"
] | null | null | null | skippy/bat/lib/sensor.ex | mashbytes/baby_zoo | 4554890242a2493d17d9b1c1f4cc90d7ad1e637e | [
"MIT"
] | 5 | 2020-07-17T23:35:42.000Z | 2021-05-10T07:00:10.000Z | skippy/bat/lib/sensor.ex | mashbytes/baby_zoo | 4554890242a2493d17d9b1c1f4cc90d7ad1e637e | [
"MIT"
] | null | null | null | defmodule Bat.Sensor do
use GenServer
require Logger
@input_pin Application.get_env(:skippy, :sound_input_pin, 4)
@timeout Application.get_env(:skippy, :sound_timout, 2000)
@name __MODULE__
def start_link(state) do
GenServer.start_link(@name, state, name: @name)
end
def init(_) do
Logger.debug("Starting hardware on pin #{@input_pin}")
{:ok, pid} = Circuits.GPIO.open(@input_pin, :input)
:ok = Circuits.GPIO.set_interrupts(pid, :rising)
{:ok, %{gpio: pid, state: Device.State.new(:inactive, DateTime.utc_now())}}
end
def handle_info({:circuits_gpio, @input_pin, timestamp, 1}, state) do
# Logger.debug("Received high signal on #{@input_pin}, timestamp, #{timestamp}")
active = Device.State.new(:active, timestamp)
updated_state = %{state | state: active}
{:noreply, updated_state, @timeout}
end
def handle_info(:timeout, state) do
inactive_state = Device.State.new(:inactive, DateTime.utc_now())
Logger.debug("Timeout occurred, setting state to [#{inactive_state}]")
updated_state = %{state | state: inactive_state}
{:noreply, updated_state}
end
def handle_call(:snapshot, _, state) do
Logger.debug("Fetching snapshot [#{Map.get(state, :state)}]")
snapshot = Map.get(state, :state)
{:reply, snapshot, state}
end
def snapshot() do
GenServer.call(@name, :snapshot)
end
end
| 29.617021 | 84 | 0.68319 |
9e315e0828ea7e29ca27b956a467da2fc193acb3 | 627 | exs | Elixir | mix.exs | wojtekmach/rebar3_ex_doc | 59b00b28acc17f22a887fa9274794f8ef7dcb7af | [
"Apache-2.0"
] | null | null | null | mix.exs | wojtekmach/rebar3_ex_doc | 59b00b28acc17f22a887fa9274794f8ef7dcb7af | [
"Apache-2.0"
] | null | null | null | mix.exs | wojtekmach/rebar3_ex_doc | 59b00b28acc17f22a887fa9274794f8ef7dcb7af | [
"Apache-2.0"
] | null | null | null | defmodule RebarHexDoc.MixProject do
use Mix.Project
def project do
[
app: :rebar_ex_doc,
version: "0.2.0",
elixir: "~> 1.13",
start_permanent: Mix.env() == :prod,
deps: deps(),
escript: [main_module: ExDoc.CLI, name: "ex_doc", path: "priv/ex_doc"],
docs: [main: "readme", # The main page in the docs
extras: ["README.md"]]
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: []
]
end
defp deps do
[
{:ex_doc, git: "https://github.com/elixir-lang/ex_doc"}
]
end
end
| 19.59375 | 77 | 0.575758 |
9e315ea576b9cca4dc3491d21514c08891ad2663 | 2,386 | ex | Elixir | clients/calendar/lib/google_api/calendar/v3/model/free_busy_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/calendar/lib/google_api/calendar/v3/model/free_busy_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/calendar/lib/google_api/calendar/v3/model/free_busy_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Calendar.V3.Model.FreeBusyResponse do
@moduledoc """
## Attributes
* `calendars` (*type:* `%{optional(String.t) => GoogleApi.Calendar.V3.Model.FreeBusyCalendar.t}`, *default:* `nil`) - List of free/busy information for calendars.
* `groups` (*type:* `%{optional(String.t) => GoogleApi.Calendar.V3.Model.FreeBusyGroup.t}`, *default:* `nil`) - Expansion of groups.
* `kind` (*type:* `String.t`, *default:* `calendar#freeBusy`) - Type of the resource ("calendar#freeBusy").
* `timeMax` (*type:* `DateTime.t`, *default:* `nil`) - The end of the interval.
* `timeMin` (*type:* `DateTime.t`, *default:* `nil`) - The start of the interval.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:calendars => %{
optional(String.t()) => GoogleApi.Calendar.V3.Model.FreeBusyCalendar.t()
},
:groups => %{optional(String.t()) => GoogleApi.Calendar.V3.Model.FreeBusyGroup.t()},
:kind => String.t(),
:timeMax => DateTime.t(),
:timeMin => DateTime.t()
}
field(:calendars, as: GoogleApi.Calendar.V3.Model.FreeBusyCalendar, type: :map)
field(:groups, as: GoogleApi.Calendar.V3.Model.FreeBusyGroup, type: :map)
field(:kind)
field(:timeMax, as: DateTime)
field(:timeMin, as: DateTime)
end
defimpl Poison.Decoder, for: GoogleApi.Calendar.V3.Model.FreeBusyResponse do
def decode(value, options) do
GoogleApi.Calendar.V3.Model.FreeBusyResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Calendar.V3.Model.FreeBusyResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.114754 | 166 | 0.692372 |
9e3166dc6a0ae196c3bb7fe5d25f85a95bc6a981 | 6,012 | ex | Elixir | lib/nebulex/cache/entry.ex | amplifiedai/nebulex | fa788d9ae71ef0e4aba73f98953e98d8a7644a29 | [
"MIT"
] | 1 | 2021-03-01T16:14:07.000Z | 2021-03-01T16:14:07.000Z | lib/nebulex/cache/entry.ex | amplifiedai/nebulex | fa788d9ae71ef0e4aba73f98953e98d8a7644a29 | [
"MIT"
] | null | null | null | lib/nebulex/cache/entry.ex | amplifiedai/nebulex | fa788d9ae71ef0e4aba73f98953e98d8a7644a29 | [
"MIT"
] | null | null | null | defmodule Nebulex.Cache.Entry do
@moduledoc false
import Nebulex.Helpers
alias Nebulex.{Adapter, Time}
@doc """
Implementation for `c:Nebulex.Cache.get/2`.
"""
def get(name, key, opts) do
Adapter.with_meta(name, & &1.get(&2, key, opts))
end
@doc """
Implementation for `c:Nebulex.Cache.get!/2`.
"""
def get!(name, key, opts) do
if result = get(name, key, opts) do
result
else
raise KeyError, key: key, term: name
end
end
@doc """
Implementation for `c:Nebulex.Cache.get_all/2`.
"""
def get_all(_name, [], _opts), do: %{}
def get_all(name, keys, opts) do
Adapter.with_meta(name, & &1.get_all(&2, keys, opts))
end
@doc """
Implementation for `c:Nebulex.Cache.put/3`.
"""
def put(name, key, value, opts) do
true = do_put(name, key, value, :put, opts)
:ok
end
@doc """
Implementation for `c:Nebulex.Cache.put_new/3`.
"""
def put_new(name, key, value, opts) do
do_put(name, key, value, :put_new, opts)
end
@doc """
Implementation for `c:Nebulex.Cache.put_new!/3`.
"""
def put_new!(name, key, value, opts) do
with false <- put_new(name, key, value, opts) do
raise Nebulex.KeyAlreadyExistsError, cache: name, key: key
end
end
@doc """
Implementation for `c:Nebulex.Cache.replace/3`.
"""
def replace(name, key, value, opts) do
do_put(name, key, value, :replace, opts)
end
@doc """
Implementation for `c:Nebulex.Cache.replace!/3`.
"""
def replace!(name, key, value, opts) do
with false <- replace(name, key, value, opts) do
raise KeyError, key: key, term: name
end
end
defp do_put(_name, _key, nil, _on_write, _opts), do: true
defp do_put(name, key, value, on_write, opts) do
Adapter.with_meta(name, & &1.put(&2, key, value, get_ttl(opts), on_write, opts))
end
@doc """
Implementation for `c:Nebulex.Cache.put_all/2`.
"""
def put_all(name, entries, opts) do
_ = do_put_all(name, entries, :put, opts)
:ok
end
@doc """
Implementation for `c:Nebulex.Cache.put_new_all/2`.
"""
def put_new_all(name, entries, opts) do
do_put_all(name, entries, :put_new, opts)
end
def do_put_all(_name, [], _on_write, _opts), do: true
def do_put_all(_name, entries, _on_write, _opts) when map_size(entries) == 0, do: true
def do_put_all(name, entries, on_write, opts) do
Adapter.with_meta(name, & &1.put_all(&2, entries, get_ttl(opts), on_write, opts))
end
@doc """
Implementation for `c:Nebulex.Cache.delete/2`.
"""
def delete(name, key, opts) do
Adapter.with_meta(name, & &1.delete(&2, key, opts))
end
@doc """
Implementation for `c:Nebulex.Cache.take/2`.
"""
def take(_name, nil, _opts), do: nil
def take(name, key, opts) do
Adapter.with_meta(name, & &1.take(&2, key, opts))
end
@doc """
Implementation for `c:Nebulex.Cache.take!/2`.
"""
def take!(name, key, opts) do
if result = take(name, key, opts) do
result
else
raise KeyError, key: key, term: name
end
end
@doc """
Implementation for `c:Nebulex.Cache.has_key?/1`.
"""
def has_key?(name, key) do
Adapter.with_meta(name, & &1.has_key?(&2, key))
end
@doc """
Implementation for `c:Nebulex.Cache.get_and_update/3`.
"""
def get_and_update(name, key, fun, opts) when is_function(fun, 1) do
Adapter.with_meta(name, fn adapter, adapter_meta ->
current = adapter.get(adapter_meta, key, opts)
case fun.(current) do
{get, nil} ->
{get, get}
{get, update} ->
true = adapter.put(adapter_meta, key, update, get_ttl(opts), :put, opts)
{get, update}
:pop when is_nil(current) ->
{nil, nil}
:pop ->
:ok = adapter.delete(adapter_meta, key, opts)
{current, nil}
other ->
raise ArgumentError,
"the given function must return a two-element tuple or :pop," <>
" got: #{inspect(other)}"
end
end)
end
@doc """
Implementation for `c:Nebulex.Cache.update/4`.
"""
def update(name, key, initial, fun, opts) do
Adapter.with_meta(name, fn adapter, adapter_meta ->
adapter_meta
|> adapter.get(key, opts)
|> case do
nil -> {initial, nil}
val -> {fun.(val), val}
end
|> case do
{nil, old} ->
# avoid storing nil values
old
{new, _} ->
true = adapter.put(adapter_meta, key, new, get_ttl(opts), :put, opts)
new
end
end)
end
@doc """
Implementation for `c:Nebulex.Cache.incr/3`.
"""
def incr(name, key, incr, opts) when is_integer(incr) do
Adapter.with_meta(name, & &1.incr(&2, key, incr, get_ttl(opts), opts))
end
def incr(_cache, _key, incr, _opts) do
raise ArgumentError, "expected incr to be an integer, got: #{inspect(incr)}"
end
@doc """
Implementation for `c:Nebulex.Cache.ttl/1`.
"""
def ttl(name, key) do
Adapter.with_meta(name, & &1.ttl(&2, key))
end
@doc """
Implementation for `c:Nebulex.Cache.expire/2`.
"""
def expire(name, key, ttl) do
ttl =
(Time.timeout?(ttl) && ttl) ||
raise ArgumentError, "expected ttl to be a valid timeout, got: #{inspect(ttl)}"
Adapter.with_meta(name, & &1.expire(&2, key, ttl))
end
@doc """
Implementation for `c:Nebulex.Cache.touch/1`.
"""
def touch(name, key) do
Adapter.with_meta(name, & &1.touch(&2, key))
end
@doc """
Implementation for `c:Nebulex.Cache.size/0`.
"""
def size(name) do
Adapter.with_meta(name, & &1.size(&2))
end
@doc """
Implementation for `c:Nebulex.Cache.flush/0`.
"""
def flush(name) do
Adapter.with_meta(name, & &1.flush(&2))
end
## Helpers
defp get_ttl(opts) do
case get_option(opts, :ttl, &Time.timeout?/1, :infinity, &{:error, &1}) do
{:error, val} ->
raise ArgumentError, "expected ttl: to be a valid timeout, got: #{inspect(val)}"
val ->
val
end
end
end
| 23.857143 | 88 | 0.601464 |
9e316c15a6b694fab8b44689084a99e565503d63 | 1,021 | exs | Elixir | mix.exs | s-takada/elixir_github_issues_getter | 12332148c73a80f0c912e1883dc435fb761f112d | [
"MIT"
] | null | null | null | mix.exs | s-takada/elixir_github_issues_getter | 12332148c73a80f0c912e1883dc435fb761f112d | [
"MIT"
] | null | null | null | mix.exs | s-takada/elixir_github_issues_getter | 12332148c73a80f0c912e1883dc435fb761f112d | [
"MIT"
] | null | null | null | defmodule Issues.Mixfile do
use Mix.Project
def project do
[app: :issues,
escript: escript_config,
version: "0.1.0",
elixir: "~> 1.3",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
name: "Issues",
source_url: "https://github.com/s0utakada/issues",
deps: deps]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[ applications: [ :logger, :httpoison ] ]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[
httpoison: "~> 0.8",
poison: "~> 1.5",
ex_doc: "~> 0.11",
earmark: ">= 0.0.0"
]
end
defp escript_config do
[ main_module: Issues.CLI ]
end
end
| 22.688889 | 77 | 0.5524 |
9e31b2ee1ce0e4b142014a12782ac45a529653ff | 2,968 | ex | Elixir | clients/surveys/lib/google_api/surveys/v2/model/survey_cost.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/surveys/lib/google_api/surveys/v2/model/survey_cost.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/surveys/lib/google_api/surveys/v2/model/survey_cost.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Surveys.V2.Model.SurveyCost do
@moduledoc """
Message defining the cost to run a given survey through API.
## Attributes
- costPerResponseNanos (String): Cost per survey response in nano units of the given currency. To get the total cost for a survey, multiply this value by wanted_response_count. Defaults to: `null`.
- currencyCode (String): Currency code that the cost is given in. Defaults to: `null`.
- maxCostPerResponseNanos (String): Threshold to start a survey automatically if the quoted price is at most this value. When a survey has a Screener (threshold) question, it must go through an incidence pricing test to determine the final cost per response. Typically you will have to make a followup call to start the survey giving the final computed cost per response. If the survey has no threshold_answers, setting this property will return an error. By specifying this property, you indicate the max price per response you are willing to pay in advance of the incidence test. If the price turns out to be lower than the specified value, the survey will begin immediately and you will be charged at the rate determined by the incidence pricing test. If the price turns out to be greater than the specified value the survey will not be started and you will instead be notified what price was determined by the incidence test. At that point, you must raise the value of this property to be greater than or equal to that cost before attempting to start the survey again. This will immediately start the survey as long the incidence test was run within the last 21 days. Defaults to: `null`.
- nanos (String): Cost of survey in nano units of the given currency. DEPRECATED in favor of cost_per_response_nanos Defaults to: `null`.
"""
defstruct [
:"costPerResponseNanos",
:"currencyCode",
:"maxCostPerResponseNanos",
:"nanos"
]
end
defimpl Poison.Decoder, for: GoogleApi.Surveys.V2.Model.SurveyCost do
def decode(value, _options) do
value
end
end
defimpl Poison.Encoder, for: GoogleApi.Surveys.V2.Model.SurveyCost do
def encode(value, options) do
GoogleApi.Surveys.V2.Deserializer.serialize_non_nil(value, options)
end
end
| 57.076923 | 1,194 | 0.772911 |
9e31f29126e69606198bfc9adc3f3e53d1ed342b | 246 | exs | Elixir | apps/crm/priv/repo/migrations/20190821133523_create_contacts.exs | jdambron/phoenix_crm | 6d17280e3a6655990840f4714a338f58b7d0c756 | [
"MIT"
] | null | null | null | apps/crm/priv/repo/migrations/20190821133523_create_contacts.exs | jdambron/phoenix_crm | 6d17280e3a6655990840f4714a338f58b7d0c756 | [
"MIT"
] | 1 | 2020-04-06T07:42:36.000Z | 2020-04-06T07:42:36.000Z | apps/crm/priv/repo/migrations/20190821133523_create_contacts.exs | jdambron/phoenix_crm | 6d17280e3a6655990840f4714a338f58b7d0c756 | [
"MIT"
] | null | null | null | defmodule Crm.Repo.Migrations.CreateContacts do
use Ecto.Migration
def change do
create table("contacts") do
add :first_name, :string
add :last_name, :string
add :days_for_contact, :integer
timestamps()
end
end
end
| 18.923077 | 47 | 0.699187 |
9e31f77b07020c485742d01ef35fba691570b02a | 270 | exs | Elixir | test/mappers_web/views/layout_view_test.exs | evandiewald/mappers | 7359cfb39a4d9d26c42f5917ee04a7e41d3291bc | [
"Apache-2.0"
] | 32 | 2021-04-22T01:55:31.000Z | 2022-02-25T13:17:21.000Z | test/mappers_web/views/layout_view_test.exs | evandiewald/mappers | 7359cfb39a4d9d26c42f5917ee04a7e41d3291bc | [
"Apache-2.0"
] | 58 | 2021-06-04T18:42:59.000Z | 2022-03-31T07:17:01.000Z | test/mappers_web/views/layout_view_test.exs | evandiewald/mappers | 7359cfb39a4d9d26c42f5917ee04a7e41d3291bc | [
"Apache-2.0"
] | 13 | 2021-04-10T06:09:15.000Z | 2022-03-23T13:07:37.000Z | defmodule MappersWeb.LayoutViewTest do
use MappersWeb.ConnCase, async: true
# When testing helpers, you may want to import Phoenix.HTML and
# use functions such as safe_to_string() to convert the helper
# result into an HTML string.
# import Phoenix.HTML
end
| 30 | 65 | 0.766667 |
9e32054251a931b11933c845f06caa4359928670 | 2,352 | exs | Elixir | test/lib/code_corps_web/controllers/password_reset_controller_test.exs | fikape/code-corps-api | c21674b0b2a19fa26945c94268db8894420ca181 | [
"MIT"
] | 275 | 2015-06-23T00:20:51.000Z | 2021-08-19T16:17:37.000Z | test/lib/code_corps_web/controllers/password_reset_controller_test.exs | fikape/code-corps-api | c21674b0b2a19fa26945c94268db8894420ca181 | [
"MIT"
] | 1,304 | 2015-06-26T02:11:54.000Z | 2019-12-12T21:08:00.000Z | test/lib/code_corps_web/controllers/password_reset_controller_test.exs | fikape/code-corps-api | c21674b0b2a19fa26945c94268db8894420ca181 | [
"MIT"
] | 140 | 2016-01-01T18:19:47.000Z | 2020-11-22T06:24:47.000Z | defmodule CodeCorpsWeb.PasswordResetControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :password_reset
import CodeCorps.TestEnvironmentHelper, only: [modify_env: 2]
alias CodeCorps.{AuthToken, User}
test "updates user password when data is valid and deletes auth token model", %{conn: conn} do
current_user = insert(:user)
{:ok, auth_token} = AuthToken.changeset(%AuthToken{}, current_user) |> Repo.insert
attrs = %{"token" => auth_token.value, "password" => "123456", "password_confirmation" => "123456"}
conn = post conn, password_reset_path(conn, :reset_password), attrs
response = json_response(conn, 201)
assert response
encrypted_password = Repo.get(User, current_user.id).encrypted_password
assert Comeonin.Bcrypt.checkpw("123456", encrypted_password)
assert AuthToken |> Repo.get(auth_token.id) == nil
end
test "does not create resource and renders errors when password does not match", %{conn: conn} do
current_user = insert(:user)
{:ok, auth_token} = AuthToken.changeset(%AuthToken{}, current_user) |> Repo.insert()
attrs = %{"token" => auth_token.value, "password" => "123456", "password_confirmation" => "another"}
conn = post conn, password_reset_path(conn, :reset_password), attrs
response = json_response(conn, 422)
assert %{"errors" => [%{"detail" => "Password confirmation passwords do not match"}]} = response
end
test "does not create resource and renders errors when token is invalid", %{conn: conn} do
current_user = insert(:user)
{:ok, _} = AuthToken.changeset(%AuthToken{}, current_user) |> Repo.insert()
attrs = %{"token" => "random token", "password" => "123456", "password_confirmation" => "123456"}
conn = post conn, password_reset_path(conn, :reset_password), attrs
assert json_response(conn, 404)
end
test "does not create resource and renders errors when error in token timeout occurs", %{conn: conn} do
modify_env(:code_corps, password_reset_timeout: 0)
current_user = insert(:user)
{:ok, auth_token} = AuthToken.changeset(%AuthToken{}, current_user) |> Repo.insert()
attrs = %{"token" => auth_token.value, "password" => "123456", "password_confirmation" => "123456"}
conn = post conn, password_reset_path(conn, :reset_password), attrs
assert json_response(conn, 404)
end
end
| 46.117647 | 105 | 0.708759 |
9e321299654509e7ff14b42e2d3150e5d8f01f4c | 11,275 | exs | Elixir | test/teslamate/vehicles/vehicle/charging_test.exs | aishlai/teslamate | c736497d1972563c49e48f08f9c76dd08ce546e5 | [
"MIT"
] | 2,602 | 2019-07-24T23:19:12.000Z | 2022-03-31T15:03:48.000Z | test/teslamate/vehicles/vehicle/charging_test.exs | aishlai/teslamate | c736497d1972563c49e48f08f9c76dd08ce546e5 | [
"MIT"
] | 1,547 | 2019-07-26T22:02:09.000Z | 2022-03-31T15:39:41.000Z | test/teslamate/vehicles/vehicle/charging_test.exs | aishlai/teslamate | c736497d1972563c49e48f08f9c76dd08ce546e5 | [
"MIT"
] | 524 | 2019-07-26T17:31:33.000Z | 2022-03-29T15:16:36.000Z | defmodule TeslaMate.Vehicles.Vehicle.ChargingTest do
use TeslaMate.VehicleCase, async: true
alias TeslaMate.Vehicles.Vehicle.Summary
alias TeslaMate.Log.ChargingProcess
import ExUnit.CaptureLog
@log_opts format: "[$level] $message\n",
colors: [enabled: false]
test "logs a full charging cycle", %{test: name} do
now_ts = DateTime.utc_now() |> DateTime.to_unix(:millisecond)
events = [
{:ok, online_event()},
{:ok, online_event(drive_state: %{timestamp: now_ts, latitude: 0.0, longitude: 0.0})},
{:ok, charging_event(now_ts + 1, "Starting", 0.1, range: 1)},
{:ok, charging_event(now_ts + 2, "Charging", 0.2, range: 2)},
{:ok, charging_event(now_ts + 3, "Charging", 0.3, range: 3)},
{:ok, charging_event(now_ts + 4, "Complete", 0.4, range: 4)},
{:ok, charging_event(now_ts + 5, "Complete", 0.4, range: 4)},
{:ok, charging_event(now_ts + 6, "Unplugged", 0.4, range: 4)},
{:ok, online_event(drive_state: %{timestamp: now_ts + 7, latitude: 0.2, longitude: 0.2})},
fn -> Process.sleep(10_000) end
]
:ok = start_vehicle(name, events)
start_date = DateTime.from_unix!(now_ts, :millisecond)
assert_receive {:start_state, car, :online, date: ^start_date}, 400
assert_receive {ApiMock, {:stream, 1000, _}}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online, since: s0}}}
assert_receive {:start_charging_process, ^car, %{latitude: 0.0, longitude: 0.0},
[lookup_address: true]}
assert_receive {:"$websockex_cast", :disconnect}
assert_receive {:insert_charge, %ChargingProcess{id: _process_id} = cproc,
%{
date: _,
charge_energy_added: 0.1,
rated_battery_range_km: 1.61,
ideal_battery_range_km: 1.61
}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :charging, since: s1}}}
assert DateTime.diff(s0, s1, :nanosecond) < 0
assert_receive {:insert_charge, ^cproc,
%{
date: _,
charge_energy_added: 0.2,
rated_battery_range_km: 3.22,
ideal_battery_range_km: 3.22
}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :charging, since: ^s1}}}
assert_receive {:insert_charge, ^cproc,
%{
date: _,
charge_energy_added: 0.3,
rated_battery_range_km: 4.83,
ideal_battery_range_km: 4.83
}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :charging, since: ^s1}}}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:insert_charge, ^cproc,
%{
date: _,
charge_energy_added: 0.4,
rated_battery_range_km: 6.44,
ideal_battery_range_km: 6.44
}}
# Completed
assert_receive {:complete_charging_process, ^cproc}
start_date = DateTime.from_unix!(now_ts + 4, :millisecond)
assert_receive {:start_state, ^car, :online, date: ^start_date}
assert_receive {ApiMock, {:stream, 1000, _}}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online, since: s2}}}
assert DateTime.diff(s1, s2, :nanosecond) < 0
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online, since: ^s2}}}
refute_receive _
end
@tag :capture_log
test "handles a connection loss when charging", %{test: name} do
now_ts = DateTime.utc_now() |> DateTime.to_unix(:millisecond)
events = [
{:ok, online_event()},
{:ok, online_event(drive_state: %{timestamp: now_ts, latitude: 0.0, longitude: 0.0})},
{:ok, charging_event(now_ts + 1, "Charging", 0.1)},
{:ok, charging_event(now_ts + 2, "Charging", 0.2)},
{:error, :vehicle_unavailable},
{:ok, %TeslaApi.Vehicle{state: "offline"}},
{:error, :vehicle_unavailable},
{:ok, %TeslaApi.Vehicle{state: "unknown"}},
{:ok, charging_event(now_ts + 3, "Charging", 0.3)},
{:ok, charging_event(now_ts + 4, "Complete", 0.3)},
{:ok, charging_event(now_ts + 5, "Complete", 0.3)},
{:ok, charging_event(now_ts + 6, "Unplugged", 0.3)},
{:ok, online_event(drive_state: %{timestamp: now_ts + 7, latitude: 0.2, longitude: 0.2})},
fn -> Process.sleep(10_000) end
]
:ok = start_vehicle(name, events)
start_date = DateTime.from_unix!(now_ts, :millisecond)
assert_receive {:start_state, car, :online, date: ^start_date}
assert_receive {ApiMock, {:stream, 1000, _}}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online}}}
assert_receive {:start_charging_process, ^car, %{latitude: 0.0, longitude: 0.0},
[lookup_address: true]}
assert_receive {:"$websockex_cast", :disconnect}
assert_receive {:insert_charge, %ChargingProcess{id: _cproc_id} = cproc,
%{date: _, charge_energy_added: 0.1}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :charging}}}
assert_receive {:insert_charge, ^cproc, %{date: _, charge_energy_added: 0.2}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :charging}}}
assert_receive {:insert_charge, ^cproc, %{date: _, charge_energy_added: 0.3}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :charging}}}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:insert_charge, ^cproc, %{date: _, charge_energy_added: 0.3}}
assert_receive {:complete_charging_process, ^cproc}
start_date = DateTime.from_unix!(now_ts + 4, :millisecond)
assert_receive {:start_state, ^car, :online, date: ^start_date}
assert_receive {ApiMock, {:stream, 1000, _}}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online}}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online}}}
refute_receive _
end
test "handles a invalid charge data", %{test: name} do
now_ts = DateTime.utc_now() |> DateTime.to_unix(:millisecond)
events = [
{:ok, online_event()},
{:ok, online_event(drive_state: %{timestamp: now_ts, latitude: 0.0, longitude: 0.0})},
{:ok, charging_event(now_ts + 1, "Charging", 0.1)},
{:ok, %TeslaApi.Vehicle{state: "online", charge_state: nil}},
{:ok, %TeslaApi.Vehicle{state: "online", charge_state: nil}},
{:ok, %TeslaApi.Vehicle{state: "online", charge_state: nil}},
{:ok, charging_event(now_ts + 3, "Charging", 0.3)},
{:ok, charging_event(now_ts + 5, "Complete", 0.3)},
{:ok, online_event(drive_state: %{timestamp: now_ts + 6, latitude: 0.2, longitude: 0.2})},
fn -> Process.sleep(10_000) end
]
:ok = start_vehicle(name, events)
start_date = DateTime.from_unix!(now_ts, :millisecond)
assert_receive {:start_state, car, :online, date: ^start_date}
assert_receive {ApiMock, {:stream, 1000, _}}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online}}}
assert_receive {:start_charging_process, ^car, %{latitude: 0.0, longitude: 0.0},
[lookup_address: true]}
assert_receive {:"$websockex_cast", :disconnect}
assert_receive {:insert_charge, cproc, %{date: _, charge_energy_added: 0.1}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :charging}}}
assert capture_log(@log_opts, fn ->
assert_receive {:insert_charge, ^cproc, %{date: _, charge_energy_added: 0.3}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :charging}}}
end) =~ "[warn] Discarded incomplete fetch result"
assert_receive {:insert_position, ^car, %{}}
assert_receive {:insert_charge, ^cproc, %{date: _, charge_energy_added: 0.3}}
assert_receive {:complete_charging_process, ^cproc}
start_date = DateTime.from_unix!(now_ts + 5, :millisecond)
assert_receive {:start_state, ^car, :online, date: ^start_date}
assert_receive {ApiMock, {:stream, 1000, _}}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online}}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online}}}
refute_receive _
end
test "Transitions directly into charging state", %{test: name} do
now_ts = DateTime.utc_now() |> DateTime.to_unix(:millisecond)
events = [
{:ok, online_event()},
{:ok, charging_event(now_ts, "Charging", 22)},
fn -> Process.sleep(10_000) end
]
:ok = start_vehicle(name, events)
start_date = DateTime.from_unix!(now_ts, :millisecond)
assert_receive {:start_state, car, :online, date: ^start_date}
assert_receive {ApiMock, {:stream, 1000, _}}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online}}}
assert_receive {:start_charging_process, ^car, %{latitude: 0.0, longitude: 0.0},
[lookup_address: true]}
assert_receive {:"$websockex_cast", :disconnect}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :charging}}}
assert_receive {:insert_charge, _charging_event, %{date: _, charge_energy_added: 22}}
refute_received _
end
@tag :capture_log
test "transisitions into asleep state", %{test: name} do
now_ts = DateTime.utc_now() |> DateTime.to_unix(:millisecond)
events = [
{:ok, online_event()},
{:ok, online_event(drive_state: %{timestamp: now_ts, latitude: 0.0, longitude: 0.0})},
{:ok, charging_event(now_ts + 1, "Charging", 0.1)},
{:ok, charging_event(now_ts + 2, "Charging", 0.2)},
{:error, :vehicle_unavailable},
{:ok, %TeslaApi.Vehicle{state: "asleep"}},
fn -> Process.sleep(10_000) end
]
:ok = start_vehicle(name, events)
start_date = DateTime.from_unix!(now_ts, :millisecond)
assert_receive {:start_state, car, :online, date: ^start_date}
assert_receive {ApiMock, {:stream, 1000, _}}
assert_receive {:insert_position, ^car, %{}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :online}}}
assert_receive {:start_charging_process, ^car, %{latitude: 0.0, longitude: 0.0},
[lookup_address: true]}
assert_receive {:"$websockex_cast", :disconnect}
assert_receive {:insert_charge, %ChargingProcess{id: _cproc_id} = cproc,
%{date: _, charge_energy_added: 0.1}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :charging}}}
assert_receive {:insert_charge, ^cproc, %{date: _, charge_energy_added: 0.2}}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :charging}}}
assert_receive {:complete_charging_process, ^cproc}
assert_receive {:start_state, ^car, :asleep, []}
assert_receive {:pubsub, {:broadcast, _, _, %Summary{state: :asleep}}}
refute_receive _
end
end
| 40.851449 | 96 | 0.624922 |
9e3221494520c2f5c0ccf900ab5c4afbdd4b84c5 | 579 | ex | Elixir | lib/celery.ex | FarmBot/farmbot_os | 5ebdca3afd672eb6b0af5c71cfca02488b32569a | [
"MIT"
] | 843 | 2016-10-05T23:46:05.000Z | 2022-03-14T04:31:55.000Z | lib/celery.ex | FarmBot/farmbot_os | 5ebdca3afd672eb6b0af5c71cfca02488b32569a | [
"MIT"
] | 455 | 2016-10-15T08:49:16.000Z | 2022-03-15T12:23:04.000Z | lib/celery.ex | FarmBot/farmbot_os | 5ebdca3afd672eb6b0af5c71cfca02488b32569a | [
"MIT"
] | 261 | 2016-10-10T04:37:06.000Z | 2022-03-13T21:07:38.000Z | defmodule FarmbotCore.Celery do
@moduledoc """
Operations for Farmbot's internal scripting language.
"""
alias FarmbotCore.Celery.{AST, StepRunner, Scheduler}
require FarmbotCore.Logger
@doc "Schedule an AST to execute on a DateTime"
def schedule(%AST{} = ast, %DateTime{} = at, %{} = data) do
Scheduler.schedule(ast, at, data)
end
@entrypoints [:execute, :sequence, :rpc_request]
@doc "Execute an AST in place"
def execute(%AST{kind: k} = ast, tag, caller \\ self())
when k in @entrypoints do
StepRunner.begin(caller, tag, ast)
end
end
| 26.318182 | 61 | 0.682211 |
9e3248a50e6809d61e169214cd8088153909d26f | 979 | ex | Elixir | lib/situation_room/sites.ex | mojotech/situation-room | d43b0e35376ec6fe1c2d69810b4eee8cb752f286 | [
"MIT"
] | 9 | 2015-10-04T11:47:19.000Z | 2019-12-09T16:02:23.000Z | lib/situation_room/sites.ex | mojotech/situation-room | d43b0e35376ec6fe1c2d69810b4eee8cb752f286 | [
"MIT"
] | 14 | 2015-07-28T04:41:57.000Z | 2022-03-31T18:12:51.000Z | lib/situation_room/sites.ex | mojotech/situation-room | d43b0e35376ec6fe1c2d69810b4eee8cb752f286 | [
"MIT"
] | 1 | 2017-07-26T03:17:02.000Z | 2017-07-26T03:17:02.000Z | defmodule SituationRoom.Sites do
@moduledoc """
Common repository methods for Sites
"""
alias SituationRoom.Repo
alias SituationRoom.Site
# Get a site from the database by one specific field
# param ex: (name: "mojotech") or (endpoint: "http://mojo.com")
def get_site(param) do
Repo.get_by(Site, param)
end
def get_site!(id), do: Repo.get!(Site, id)
# Returns all sites in the database
def get_all_sites() do
Repo.all(Site)
end
# Creates a site in the database by taking two String.t() params
# param ex: ("mojo", "http://mojotech.com")
def create_site(params) do
%Site{}
|> Site.changeset(params)
|> Repo.insert()
end
# Delete a site from the database by specifying specific field
# param ex: (name: "mojotech") or (endpoint: "http://mojo.com")
def delete_site(site) do
Repo.delete(site)
end
def update_site(%Site{} = site, attrs) do
site
|> Site.changeset(attrs)
|> Repo.update()
end
end
| 23.309524 | 66 | 0.663943 |
9e324e63d8061d38adadf31cb383b3db0317fdf0 | 814 | exs | Elixir | test/ex_ftx/wallet/air_drops_test.exs | RaghavSood/ex_ftx | 257ba35221abe4957836eb6e8312ecae0d9d51aa | [
"MIT"
] | 3 | 2021-09-27T17:19:41.000Z | 2022-03-16T09:28:13.000Z | test/ex_ftx/wallet/air_drops_test.exs | RaghavSood/ex_ftx | 257ba35221abe4957836eb6e8312ecae0d9d51aa | [
"MIT"
] | 3 | 2021-07-28T19:53:02.000Z | 2021-09-27T15:48:38.000Z | test/ex_ftx/wallet/air_drops_test.exs | RaghavSood/ex_ftx | 257ba35221abe4957836eb6e8312ecae0d9d51aa | [
"MIT"
] | 4 | 2021-08-01T11:25:58.000Z | 2021-10-11T22:15:44.000Z | defmodule ExFtx.Wallet.AirDropsTest do
use ExUnit.Case, async: false
use ExVCR.Mock, adapter: ExVCR.Adapter.Hackney
doctest ExFtx.Wallet.AirDrops
setup_all do
HTTPoison.start()
:ok
end
@valid_credentials %ExFtx.Credentials{
api_key: System.get_env("FTX_API_KEY"),
api_secret: System.get_env("FTX_API_SECRET")
}
@invalid_credentials %ExFtx.Credentials{
api_key: "invalid",
api_secret: "invalid"
}
test ".get/1 ok" do
use_cassette "wallet/air_drops/get_ok" do
assert {:ok, _air_drops} = ExFtx.Wallet.AirDrops.get(@valid_credentials)
end
end
test ".get/1 unauthorized" do
use_cassette "wallet/air_drops/get_unauthorized" do
assert ExFtx.Wallet.AirDrops.get(@invalid_credentials) ==
{:error, "Not logged in"}
end
end
end
| 24.666667 | 78 | 0.69656 |
9e327fa972b26aa0f4012a30f46a2aebe7678d96 | 6,187 | exs | Elixir | test/sparql/query/result/tsv_decoder_test.exs | pukkamustard/sparql-ex | 4f8907ddbd15215c9b6c40edac19362c92e34d91 | [
"MIT"
] | null | null | null | test/sparql/query/result/tsv_decoder_test.exs | pukkamustard/sparql-ex | 4f8907ddbd15215c9b6c40edac19362c92e34d91 | [
"MIT"
] | null | null | null | test/sparql/query/result/tsv_decoder_test.exs | pukkamustard/sparql-ex | 4f8907ddbd15215c9b6c40edac19362c92e34d91 | [
"MIT"
] | null | null | null | defmodule SPARQL.Query.Result.TSV.DecoderTest do
use ExUnit.Case
doctest SPARQL.Query.Result.TSV.Decoder
import RDF.Sigils
alias SPARQL.Query
describe "W3C tests" do
setup context do
{:ok,
result_string:
(context.test_case <> ".tsv")
|> SPARQL.W3C.TestSuite.file({"1.1", "csv-tsv-res"})
|> File.read!()
}
end
@tag test_case: "csvtsv01"
test "csvtsv01: SELECT * WHERE { ?S ?P ?O }", %{result_string: result_string} do
assert Query.Result.TSV.decode(result_string) == {:ok,
%Query.Result{
variables: ~w[s p o],
results: [
%{
"s" => ~I<http://example.org/s1>,
"p" => ~I<http://example.org/p1>,
"o" => ~I<http://example.org/s2>,
},
%{
"s" => ~I<http://example.org/s2>,
"p" => ~I<http://example.org/p2>,
"o" => ~L"foo"
},
%{
"s" => ~I<http://example.org/s3>,
"p" => ~I<http://example.org/p3>,
"o" => ~L"bar"
},
%{
"s" => ~I<http://example.org/s4>,
"p" => ~I<http://example.org/p4>,
"o" => RDF.Integer.new(4)
},
%{
"s" => ~I<http://example.org/s5>,
"p" => ~I<http://example.org/p5>,
"o" => RDF.Literal.new("5.5", datatype: "http://www.w3.org/2001/XMLSchema#decimal")
},
%{
"s" => ~I<http://example.org/s6>,
"p" => ~I<http://example.org/p6>,
"o" => ~B<b0>
}
]
}
}
end
@tag test_case: "csvtsv02"
test "csvtsv02: SELECT with OPTIONAL (i.e. not all vars bound in all results)",
%{result_string: result_string} do
assert Query.Result.TSV.decode(result_string) == {:ok,
%Query.Result{
variables: ~w[s p o p2 o2],
results: [
%{
"s" => ~I<http://example.org/s1>,
"p" => ~I<http://example.org/p1>,
"o" => ~I<http://example.org/s2>,
"p2" => ~I<http://example.org/p2>,
"o2" => ~L"foo"
},
%{
"s" => ~I<http://example.org/s2>,
"p" => ~I<http://example.org/p2>,
"o" => ~L"foo",
"p2" => nil,
"o2" => nil
},
%{
"s" => ~I<http://example.org/s3>,
"p" => ~I<http://example.org/p3>,
"o" => ~L"bar",
"p2" => nil,
"o2" => nil
},
%{
"s" => ~I<http://example.org/s4>,
"p" => ~I<http://example.org/p4>,
"o" => RDF.Integer.new(4),
"p2" => nil,
"o2" => nil
},
%{
"s" => ~I<http://example.org/s5>,
"p" => ~I<http://example.org/p5>,
"o" => RDF.Literal.new("5.5", datatype: "http://www.w3.org/2001/XMLSchema#decimal"),
"p2" => nil,
"o2" => nil
},
%{
"s" => ~I<http://example.org/s6>,
"p" => ~I<http://example.org/p6>,
"o" => ~B<b0>,
"p2" => nil,
"o2" => nil
},
]
}
}
end
@tag test_case: "csvtsv03"
test "csvtsv03: SELECT * WHERE { ?S ?P ?O } with some corner cases of typed literals",
%{result_string: result_string} do
assert Query.Result.TSV.decode(result_string) == {:ok,
%Query.Result{
variables: ~w[s p o],
results: [
%{
"s" => ~I<http://example.org/s1>,
"p" => ~I<http://example.org/p1>,
"o" => ~L"1"
},
%{
"s" => ~I<http://example.org/s2>,
"p" => ~I<http://example.org/p2>,
"o" => RDF.Literal.new("2.2", datatype: "http://www.w3.org/2001/XMLSchema#decimal")
},
%{
"s" => ~I<http://example.org/s3>,
"p" => ~I<http://example.org/p3>,
"o" => RDF.Literal.new("-3", datatype: "http://www.w3.org/2001/XMLSchema#negativeInteger")
},
%{
"s" => ~I<http://example.org/s4>,
"p" => ~I<http://example.org/p4>,
"o" => ~L"4,4"
},
%{
"s" => ~I<http://example.org/s5>,
"p" => ~I<http://example.org/p5>,
"o" => RDF.Literal.new("5,5", datatype: "http://example.org/myCustomDatatype")
},
%{
"s" => ~I<http://example.org/s6>,
"p" => ~I<http://example.org/p6>,
"o" => RDF.Double.new("1.0e6")
},
%{
"s" => ~I<http://example.org/s7>,
"p" => ~I<http://example.org/p7>,
"o" => RDF.Literal.new("a7", datatype: "http://www.w3.org/2001/XMLSchema#hexBinary")
},
]
}
}
end
end
test "values with escaped characters" do
assert Query.Result.TSV.decode("?a\n\"foo\\n\\tbar\"") == {:ok,
%Query.Result{
variables: ~w[a],
results: [%{"a" => ~L"foo\n\tbar"}]
}
}
end
test "with no header and no results" do
assert Query.Result.TSV.decode("") ==
{:ok, %Query.Result{variables: nil, results: []}}
end
test "with empty header values" do
error = {:error, "invalid header variable: ''"}
assert Query.Result.TSV.decode("?a\t\t?b") == error
assert Query.Result.TSV.decode("?a\t \t?b") == error
assert Query.Result.TSV.decode("?a\t") == error
assert Query.Result.TSV.decode("\t?a") == error
assert Query.Result.TSV.decode(" ") == error
end
test "with header variables without a leading question mark" do
assert Query.Result.TSV.decode("a") == {:error, "invalid header variable: 'a'"}
end
test "with syntax errors in the values" do
assert Query.Result.TSV.decode("?a\n\"foo") == {:error, "illegal \"foo"}
end
end
| 31.728205 | 104 | 0.419751 |
9e3285ac3db934f149990dac64517b19a148c851 | 564 | exs | Elixir | test/changelog_web/controllers/benefit_controller_test.exs | PsOverflow/changelog.com | 53f4ecfc39b021c6b8cfcc0fa11f29aff8038a7f | [
"MIT"
] | 1 | 2021-03-14T21:12:49.000Z | 2021-03-14T21:12:49.000Z | test/changelog_web/controllers/benefit_controller_test.exs | PsOverflow/changelog.com | 53f4ecfc39b021c6b8cfcc0fa11f29aff8038a7f | [
"MIT"
] | null | null | null | test/changelog_web/controllers/benefit_controller_test.exs | PsOverflow/changelog.com | 53f4ecfc39b021c6b8cfcc0fa11f29aff8038a7f | [
"MIT"
] | 1 | 2018-10-03T20:55:52.000Z | 2018-10-03T20:55:52.000Z | defmodule ChangelogWeb.BenefitControllerTest do
use ChangelogWeb.ConnCase
test "renders the benefits sans details", %{conn: conn} do
benefit = insert(:benefit, code: "ZOMG")
conn = get(conn, benefit_path(conn, :index))
assert conn.status == 200
refute conn.resp_body =~ benefit.code
end
@tag :as_user
test "renders the benefits with details", %{conn: conn} do
benefit = insert(:benefit, code: "ZOMG")
conn = get(conn, benefit_path(conn, :index))
assert conn.status == 200
assert conn.resp_body =~ benefit.code
end
end
| 29.684211 | 60 | 0.691489 |
9e32abe73f299a4721a755a3a0a8d069e8af35de | 383 | ex | Elixir | code examples/example-15-17.ex | kujua/erlang-elixir-imperative-bookcompanion | 7bc9f033bacd0f8744ec6bcee3932794d594fe69 | [
"Apache-2.0"
] | 8 | 2016-08-14T12:35:16.000Z | 2021-01-26T04:05:31.000Z | code examples/example-15-17.ex | kujua/erlang-elixir-imperative-bookcompanion | 7bc9f033bacd0f8744ec6bcee3932794d594fe69 | [
"Apache-2.0"
] | null | null | null | code examples/example-15-17.ex | kujua/erlang-elixir-imperative-bookcompanion | 7bc9f033bacd0f8744ec6bcee3932794d594fe69 | [
"Apache-2.0"
] | 5 | 2016-08-18T22:12:19.000Z | 2020-02-17T18:52:41.000Z | iex(1)> s = %Stewtype{}
%Stewtype{ingredients: [], stewtype: :veg}
iex(2)> Stew.print(s)
"It's a vegetable stew"
iex(3)> sb = %{s|stewtype: :beef}
%Stewtype{ingredients: [], stewtype: :beef}
iex(4)> StewProtocol.what_is_it?(sb)
"It's a beef stew"
iex(5)> su = %{s|stewtype: :chicken}
%Stewtype{ingredients: [], stewtype: :chicken}
iex(6)> StewProtocol.what_is_it?(su)
"Unknown stew"
| 29.461538 | 46 | 0.67624 |
9e32c0ae2655f49078a9b4c3b9cea0e9397227f9 | 1,829 | exs | Elixir | clients/recaptcha_enterprise/mix.exs | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/recaptcha_enterprise/mix.exs | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/recaptcha_enterprise/mix.exs | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.RecaptchaEnterprise.Mixfile do
use Mix.Project
@version "0.3.0"
def project() do
[
app: :google_api_recaptcha_enterprise,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/recaptcha_enterprise"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.4"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
reCAPTCHA Enterprise API client library.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/recaptcha_enterprise",
"Homepage" => "https://cloud.google.com/recaptcha-enterprise/"
}
]
end
end
| 27.298507 | 111 | 0.65883 |
9e32d58c565e92208d074a575da15f90264dfc61 | 1,978 | exs | Elixir | test/hexpm_web/controllers/version_controller_test.exs | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 691 | 2017-03-08T09:15:45.000Z | 2022-03-23T22:04:47.000Z | test/hexpm_web/controllers/version_controller_test.exs | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 491 | 2017-03-07T12:58:42.000Z | 2022-03-29T23:32:54.000Z | test/hexpm_web/controllers/version_controller_test.exs | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 200 | 2017-03-12T23:03:39.000Z | 2022-03-05T17:55:52.000Z | defmodule HexpmWeb.VersionControllerTest do
use HexpmWeb.ConnCase, async: true
setup do
user1 = insert(:user)
repository1 = insert(:repository)
package1 = insert(:package)
package2 = insert(:package, repository_id: repository1.id)
insert(
:release,
package: package1,
version: "0.0.1",
meta: build(:release_metadata, app: package1.name)
)
insert(
:release,
package: package1,
version: "0.0.2",
meta: build(:release_metadata, app: package1.name)
)
insert(
:release,
package: package1,
version: "0.0.3-dev",
meta: build(:release_metadata, app: package1.name)
)
insert(
:release,
package: package2,
version: "1.0.0",
meta: build(:release_metadata, app: package2.name)
)
insert(
:release,
package: package2,
version: "0.1.0",
meta: build(:release_metadata, app: package2.name)
)
insert(:organization_user, user: user1, organization: repository1.organization)
%{
package1: package1,
package2: package2,
repository1: repository1,
user1: user1
}
end
describe "GET /packages/:package_name/versions" do
test "list all versions for public package", %{package1: package1} do
conn = get(build_conn(), "/packages/#{package1.name}/versions")
result = response(conn, 200)
assert result =~ ~r/0.0.1/
assert result =~ ~r/0.0.2/
assert result =~ ~r/0.0.3-dev/
assert result =~ package1.name
end
test "list private package versions", %{
user1: user1,
package2: package2,
repository1: repository1
} do
conn =
build_conn()
|> test_login(user1)
|> get("/packages/#{repository1.name}/#{package2.name}/versions")
result = response(conn, 200)
assert result =~ ~r/0.1.0/
assert result =~ ~r/1.0.0/
assert result =~ package2.name
end
end
end
| 23.547619 | 83 | 0.602123 |
9e334545ffeda9183da7d025162ca42d702f92d8 | 1,559 | ex | Elixir | clients/deployment_manager/lib/google_api/deployment_manager/v2/model/authorization_logging_options.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/deployment_manager/lib/google_api/deployment_manager/v2/model/authorization_logging_options.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/deployment_manager/lib/google_api/deployment_manager/v2/model/authorization_logging_options.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.DeploymentManager.V2.Model.AuthorizationLoggingOptions do
@moduledoc """
Authorization-related information used by Cloud Audit Logging.
## Attributes
* `permissionType` (*type:* `String.t`, *default:* `nil`) - The type of the permission that was checked.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:permissionType => String.t()
}
field(:permissionType)
end
defimpl Poison.Decoder, for: GoogleApi.DeploymentManager.V2.Model.AuthorizationLoggingOptions do
def decode(value, options) do
GoogleApi.DeploymentManager.V2.Model.AuthorizationLoggingOptions.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DeploymentManager.V2.Model.AuthorizationLoggingOptions do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.170213 | 108 | 0.756895 |
9e33611b527bad74b6955008814f4ce139dc5742 | 69 | exs | Elixir | apps/artemis_notify/test/test_helper.exs | artemis-platform/artemis_dashboard | 5ab3f5ac4c5255478bbebf76f0e43b44992e3cab | [
"MIT"
] | 9 | 2019-08-19T19:56:34.000Z | 2022-03-22T17:56:38.000Z | apps/artemis_notify/test/test_helper.exs | chrislaskey/artemis_teams | 9930c3d9528e37b76f0525390e32b66eed7eadde | [
"MIT"
] | 7 | 2019-07-12T21:41:01.000Z | 2020-08-17T21:29:22.000Z | apps/artemis_notify/test/test_helper.exs | chrislaskey/artemis_teams | 9930c3d9528e37b76f0525390e32b66eed7eadde | [
"MIT"
] | 2 | 2019-07-05T22:51:47.000Z | 2019-08-19T19:56:37.000Z | ExUnit.start()
Ecto.Adapters.SQL.Sandbox.mode(Artemis.Repo, :manual)
| 23 | 53 | 0.782609 |
9e3376ae41d9316a5dca49ab605fcffa837898e2 | 916 | exs | Elixir | test/shippo/configuration_test.exs | christopherlai/shippo | 33d62242a5c3ad1d935888150d5cd630404d91f3 | [
"Unlicense",
"MIT"
] | null | null | null | test/shippo/configuration_test.exs | christopherlai/shippo | 33d62242a5c3ad1d935888150d5cd630404d91f3 | [
"Unlicense",
"MIT"
] | null | null | null | test/shippo/configuration_test.exs | christopherlai/shippo | 33d62242a5c3ad1d935888150d5cd630404d91f3 | [
"Unlicense",
"MIT"
] | null | null | null | defmodule Shippo.ConfigurationTest do
use ExUnit.Case, async: true
alias Shippo.ConfigHelpers
alias Shippo.Configuration
setup do
ConfigHelpers.put_envs(token: "token")
end
describe "new/0" do
test "returns a Configuration struct" do
assert %Configuration{
url: "https://api.goshippo.com",
token: "token",
client: Shippo.Client.Hackney,
client_opts: [],
json_library: Jason
} = Configuration.new()
end
end
describe "new/1" do
test "returns a Configuration struct with overrides applied" do
assert %Configuration{
url: "https://example.com",
token: "token",
client: Shippo.Client.Hackney,
client_opts: [],
json_library: Jason
} = Configuration.new(url: "https://example.com")
end
end
end
| 26.941176 | 67 | 0.576419 |
9e33773e1a3c64e61a8aea75d3d0f13d8172f2eb | 1,791 | exs | Elixir | test/json_schema_test_suite_test.exs | imax-iva/nex_json_schema | 00838cd891937f938d731ddd733c64a29fb31c18 | [
"MIT"
] | null | null | null | test/json_schema_test_suite_test.exs | imax-iva/nex_json_schema | 00838cd891937f938d731ddd733c64a29fb31c18 | [
"MIT"
] | null | null | null | test/json_schema_test_suite_test.exs | imax-iva/nex_json_schema | 00838cd891937f938d731ddd733c64a29fb31c18 | [
"MIT"
] | null | null | null | defmodule NExJsonSchema.JsonSchemaTestSuiteTest.Helpers do
use ExUnit.Case, async: true
@schema_tests_path "test/JSON-Schema-Test-Suite/tests/draft4/"
def schema_tests_path do
@schema_tests_path
end
def schema_test_path(filename) do
Path.join(schema_tests_path(), filename)
end
def load_schema_test(name) do
(name <> ".json")
|> schema_test_path
|> File.read!()
|> Poison.Parser.parse!()
end
end
defmodule NExJsonSchema.JsonSchemaTestSuiteTest do
use ExUnit.Case, async: true
import NExJsonSchema.JsonSchemaTestSuiteTest.Helpers
import NExJsonSchema.Validator, only: [valid?: 2]
@tests Path.wildcard("#{schema_tests_path()}**/*.json")
|> Enum.map(fn path ->
path |> String.replace(schema_tests_path(), "") |> String.replace(".json", "")
end)
@ignored_tests %{
"optional/format" => %{
"validation of URIs" => true
}
}
Enum.each(@tests, fn feature ->
fixture = load_schema_test(feature)
Enum.each(fixture, fn fixture ->
%{"description" => description, "schema" => schema, "tests" => tests} = fixture
@schema schema
Enum.each(tests, fn t ->
@test t
case @ignored_tests[feature] do
true ->
nil
ignored_group ->
case ignored_group[description] do
true ->
nil
ignored_tests ->
unless ignored_tests && Enum.member?(ignored_tests, @test["description"]) do
test "[#{feature}] #{description}: #{@test["description"]}" do
assert valid?(NExJsonSchema.Schema.resolve(@schema), @test["data"]) == @test["valid"]
end
end
end
end
end)
end)
end)
end
| 26.338235 | 105 | 0.591848 |
9e33acde122578e4dfb30ec0a641b12ade9f351e | 25,320 | ex | Elixir | lib/iex/lib/iex.ex | jfornoff/elixir | 4ed5e8e66973ae7b0e52ead00f65117ab0d600e0 | [
"Apache-2.0"
] | null | null | null | lib/iex/lib/iex.ex | jfornoff/elixir | 4ed5e8e66973ae7b0e52ead00f65117ab0d600e0 | [
"Apache-2.0"
] | null | null | null | lib/iex/lib/iex.ex | jfornoff/elixir | 4ed5e8e66973ae7b0e52ead00f65117ab0d600e0 | [
"Apache-2.0"
] | 1 | 2021-09-30T01:21:02.000Z | 2021-09-30T01:21:02.000Z | defmodule IEx do
@moduledoc ~S"""
Elixir's interactive shell.
Some of the functionalities described here will not be available
depending on your terminal. In particular, if you get a message
saying that the smart terminal could not be run, some of the
features described here won't work.
## Helpers
IEx provides a bunch of helpers. They can be accessed by typing
`h()` into the shell or as a documentation for the `IEx.Helpers` module.
## Autocomplete
To discover a module's public functions or other modules, type the module name
followed by a dot, then press tab to trigger autocomplete. For example:
Enum.
A module may export functions that are not meant to be used directly: these
functions won't be autocompleted by IEx. IEx will not autocomplete functions
annotated with `@doc false`, `@impl true`, or functions that aren't explicitly
documented and where the function name is in the form of `__foo__`.
Autocomplete may not be available on some Windows shells. You may need
to pass the `--werl` flag when starting IEx, as in `iex --werl` for it
to work. `--werl` may be permanently enabled by setting the `IEX_WITH_WERL`
environment variable.
## Shell history
From Erlang/OTP 20, it is possible to get shell history by passing some
flags that enable it in the VM. This can be done on a per-need basis
when starting IEx:
iex --erl "-kernel shell_history enabled"
If you would rather enable it on your system as a whole, you can use
the `ERL_AFLAGS` environment variable and make sure that it is set
accordingly on your terminal/shell configuration.
On Linux:
export ERL_AFLAGS="-kernel shell_history enabled"
On Windows:
set ERL_AFLAGS "-kernel shell_history enabled"
## Expressions in IEx
As an interactive shell, IEx evaluates expressions. This has some
interesting consequences that are worth discussing.
The first one is that the code is truly evaluated and not compiled.
This means that any benchmarking done in the shell is going to have
skewed results. So never run any profiling nor benchmarks in the shell.
Second, IEx allows you to break an expression into many lines,
since this is common in Elixir. For example:
iex(1)> "ab
...(1)> c"
"ab\nc"
In the example above, the shell will be expecting more input until it
finds the closing quote. Sometimes it is not obvious which character
the shell is expecting, and the user may find themselves trapped in
the state of incomplete expression with no ability to terminate it other
than by exiting the shell.
For such cases, there is a special break-trigger (`#iex:break`) that when
encountered on a line by itself will force the shell to break out of any
pending expression and return to its normal state:
iex(1)> ["ab
...(1)> c"
...(1)> "
...(1)> ]
...(1)> #iex:break
** (TokenMissingError) iex:1: incomplete expression
## The Break command
Inside IEx, hitting `Ctrl+C` will open up the `BREAK` menu. In this
menu you can quit the shell, see process and ets tables information
and much more.
## Exiting the shell
There are a few ways to quit the IEx shell:
* via the `BREAK` menu (available via `Ctrl+C`) by typing `q`, pressing enter
* by hitting `Ctrl+C`, `Ctrl+C`
* by hitting `Ctrl+\ `
If you are connected to remote shell, it remains alive after disconnection.
## Prying and breakpoints
IEx also has the ability to set breakpoints on Elixir code and
"pry" into running processes. This allows the developer to have
an IEx session run inside a given function.
`IEx.pry/0` can be used when you are able to modify the source
code directly and recompile it:
def my_fun(arg1, arg2) do
require IEx; IEx.pry
... implementation ...
end
When the code is executed, it will ask you for permission to be
introspected.
Alternatively, you can use `IEx.break!/4` to setup a breakpoint
on a given module, function and arity you have no control of.
While `IEx.break!/4` is more flexible, it requires Erlang/OTP 20+ and
it does not contain information about imports and aliases from
the source code.
## The User Switch command
Besides the break command, one can type `Ctrl+G` to get to the
user switch command menu. When reached, you can type `h` to
get more information.
In this menu, developers are able to start new shells and
alternate between them. Let's give it a try:
User switch command
--> s 'Elixir.IEx'
--> c
The command above will start a new shell and connect to it.
Create a new variable called `hello` and assign some value to it:
hello = :world
Now, let's roll back to the first shell:
User switch command
--> c 1
Now, try to access the `hello` variable again:
hello
** (UndefinedFunctionError) undefined function hello/0
The command above fails because we have switched shells.
Since shells are isolated from each other, you can't access the
variables defined in one shell from the other one.
The User Switch command can also be used to terminate an existing
session, for example when the evaluator gets stuck in an infinite
loop or when you are stuck typing an expression:
User switch command
--> i
--> c
The user switch command menu also allows developers to connect to
remote shells using the `r` command. A topic which we will discuss next.
## Remote shells
IEx allows you to connect to another node in two fashions.
First of all, we can only connect to a shell if we give names
both to the current shell and the shell we want to connect to.
Let's give it a try. First start a new shell:
$ iex --sname foo
iex(foo@HOST)1>
The string between the parentheses in the prompt is the name
of your node. We can retrieve it by calling the `node/0`
function:
iex(foo@HOST)1> node()
:"foo@HOST"
iex(foo@HOST)2> Node.alive?()
true
For fun, let's define a simple module in this shell too:
iex(foo@HOST)3> defmodule Hello do
...(foo@HOST)3> def world, do: "it works!"
...(foo@HOST)3> end
Now, let's start another shell, giving it a name as well:
$ iex --sname bar
iex(bar@HOST)1>
If we try to dispatch to `Hello.world`, it won't be available
as it was defined only in the other shell:
iex(bar@HOST)1> Hello.world
** (UndefinedFunctionError) undefined function Hello.world/0
However, we can connect to the other shell remotely. Open up
the User Switch prompt (Ctrl+G) and type:
User switch command
--> r 'foo@HOST' 'Elixir.IEx'
--> c
Now we are connected into the remote node, as the prompt shows us,
and we can access the information and modules defined over there:
rem(foo@macbook)1> Hello.world
"it works"
In fact, connecting to remote shells is so common that we provide
a shortcut via the command line as well:
$ iex --sname baz --remsh foo@HOST
Where "remsh" means "remote shell". In general, Elixir supports:
* remsh from an Elixir node to an Elixir node
* remsh from a plain Erlang node to an Elixir node (through the ^G menu)
* remsh from an Elixir node to a plain Erlang node (and get an `erl` shell there)
Connecting an Elixir shell to a remote node without Elixir is
**not** supported.
## The .iex.exs file
When starting, IEx looks for a local `.iex.exs` file (located in the current
working directory), then a global one (located at `~/.iex.exs`) and loads the
first one it finds (if any). The code in the loaded `.iex.exs` file is
evaluated in the shell's context. So, for instance, any modules that are
loaded or variables that are bound in the `.iex.exs` file will be available in the
shell after it has booted.
For example, take the following `.iex.exs` file:
# Load another ".iex.exs" file
import_file "~/.iex.exs"
# Import some module from lib that may not yet have been defined
import_if_available MyApp.Mod
# Print something before the shell starts
IO.puts "hello world"
# Bind a variable that'll be accessible in the shell
value = 13
Running IEx in the directory where the above `.iex.exs` file is located
results in:
$ iex
Erlang 19 [...]
hello world
Interactive Elixir - press Ctrl+C to exit (type h() ENTER for help)
iex(1)> value
13
It is possible to load another file by supplying the `--dot-iex`
option to IEx. See `iex --help`.
## Configuring the shell
There are a number of customization options provided by IEx. Take a look
at the docs for the `IEx.configure/1` function by typing `h IEx.configure/1`.
Those options can be configured in your project configuration file or globally
by calling `IEx.configure/1` from your `~/.iex.exs` file. For example:
# .iex.exs
IEx.configure(inspect: [limit: 3])
Now run the shell:
$ iex
Erlang 19 [...]
Interactive Elixir - press Ctrl+C to exit (type h() ENTER for help)
iex(1)> [1, 2, 3, 4, 5]
[1, 2, 3, ...]
"""
@doc """
Configures IEx.
The supported options are:
* `:colors`
* `:inspect`
* `:width`
* `:history_size`
* `:default_prompt`
* `:alive_prompt`
They are discussed individually in the sections below.
## Colors
A keyword list that encapsulates all color settings used by the
shell. See documentation for the `IO.ANSI` module for the list of
supported colors and attributes.
List of supported keys in the keyword list:
* `:enabled` - boolean value that allows for switching the coloring on and off
* `:eval_result` - color for an expression's resulting value
* `:eval_info` - ... various informational messages
* `:eval_error` - ... error messages
* `:eval_interrupt` - ... interrupt messages
* `:stack_info` - ... the stacktrace color
* `:blame_diff` - ... when blaming source with no match
* `:ls_directory` - ... for directory entries (ls helper)
* `:ls_device` - ... device entries (ls helper)
When printing documentation, IEx will convert the Markdown
documentation to ANSI as well. Colors for this can be configured
via:
* `:doc_code` - the attributes for code blocks (cyan, bright)
* `:doc_inline_code` - inline code (cyan)
* `:doc_headings` - h1 and h2 (yellow, bright)
* `:doc_title` - the overall heading for the output (reverse, yellow, bright)
* `:doc_bold` - (bright)
* `:doc_underline` - (underline)
IEx will also color inspected expressions using the `:syntax_colors`
option. Such can be disabled with:
IEx.configure [colors: [syntax_colors: false]]
You can also configure the syntax colors, however, as desired:
IEx.configure [colors: [syntax_colors: [atom: :red]]]
Configuration for most built-in data types are supported: `:atom`,
`:string`, `:binary`, `:list`, `:number`, `:boolean`, `:nil`, etc.
The default is:
[number: :magenta, atom: :cyan, string: :green,
boolean: :magenta, nil: :magenta]
## Inspect
A keyword list containing inspect options used by the shell
when printing results of expression evaluation. Default to
pretty formatting with a limit of 50 entries.
To show all entries, configure the limit to `:infinity`:
IEx.configure [inspect: [limit: :infinity]]
See `Inspect.Opts` for the full list of options.
## Width
An integer indicating the maximum number of columns to use in output.
The default value is 80 columns. The actual output width is the minimum
of this number and result of `:io.columns`. This way you can configure IEx
to be your largest screen size and it should always take up the full width
of your current terminal screen.
## History size
Number of expressions and their results to keep in the history.
The value is an integer. When it is negative, the history is unlimited.
## Prompt
This is an option determining the prompt displayed to the user
when awaiting input.
The value is a keyword list with two possible keys representing prompt types:
* `:default_prompt` - used when `Node.alive?/0` returns `false`
* `:alive_prompt` - used when `Node.alive?/0` returns `true`
The following values in the prompt string will be replaced appropriately:
* `%counter` - the index of the history
* `%prefix` - a prefix given by `IEx.Server`
* `%node` - the name of the local node
"""
def configure(options) do
IEx.Config.configure(options)
end
@doc """
Returns IEx configuration.
"""
def configuration do
IEx.Config.configuration()
end
@doc """
Registers a function to be invoked after the IEx process is spawned.
"""
def after_spawn(fun) when is_function(fun) do
IEx.Config.after_spawn(fun)
end
@doc """
Returns registered `after_spawn` callbacks.
"""
def after_spawn do
IEx.Config.after_spawn()
end
@doc """
Returns `true` if IEx was started.
"""
def started? do
IEx.Config.started?()
end
@doc """
Returns `string` escaped using the specified `color`.
ANSI escapes in `string` are not processed in any way.
"""
def color(color, string) do
case IEx.Config.color(color) do
nil ->
string
ansi ->
[ansi | string] |> IO.ANSI.format(true) |> IO.iodata_to_binary()
end
end
@doc """
Gets the IEx width for printing.
Used by helpers and it has a default maximum cap of 80 chars.
"""
def width do
IEx.Config.width()
end
@doc """
Gets the options used for inspecting.
"""
def inspect_opts do
IEx.Config.inspect_opts()
end
@doc """
Pries into the process environment.
This is useful for debugging a particular chunk of code
when executed by a particular process. The process becomes
the evaluator of IEx commands and is temporarily changed to
have a custom group leader. Those values are reverted by
calling `IEx.Helpers.respawn/0`, which starts a new IEx shell,
freeing up the pried one.
When a process is pried, all code runs inside IEx and has
access to all imports and aliases from the original code.
However, the code is evaluated and therefore cannot access
private functions of the module being pried. Module functions
still need to be accessed via `Mod.fun(args)`.
Alternatively, you can use `IEx.break!/4` to setup a breakpoint
on a given module, function and arity you have no control of.
While `IEx.break!/4` is more flexible, it requires Erlang/OTP 20+ and
it does not contain information about imports and aliases from
the source code.
## Examples
Let's suppose you want to investigate what is happening
with some particular function. By invoking `IEx.pry/1` from
the function, IEx will allow you to access its binding
(variables), verify its lexical information and access
the process information. Let's see an example:
import Enum, only: [map: 2]
defmodule Adder do
def add(a, b) do
c = a + b
require IEx; IEx.pry
end
end
When invoking `Adder.add(1, 2)`, you will receive a message in
your shell to pry the given environment. By allowing it,
the shell will be reset and you gain access to all variables
and the lexical scope from above:
pry(1)> map([a, b, c], &IO.inspect(&1))
1
2
3
Keep in mind that `IEx.pry/1` runs in the caller process,
blocking the caller during the evaluation cycle. The caller
process can be freed by calling `respawn/0`, which starts a
new IEx evaluation cycle, letting this one go:
pry(2)> respawn()
true
Interactive Elixir - press Ctrl+C to exit (type h() ENTER for help)
Setting variables or importing modules in IEx does not
affect the caller's environment. However, sending and
receiving messages will change the process state.
## Pry and macros
When setting up Pry inside a code defined by macros, such as:
defmacro __using__(_) do
quote do
def add(a, b) do
c = a + b
require IEx; IEx.pry
end
end
end
The variables defined inside `quote` won't be available during
prying due to the hygiene mechanism in quoted expressions. The
hygiene mechanism changes the variable names in quoted expressions
so they don't collide with variables defined by the users of the
macros. Therefore the original names are not available.
## Pry and mix test
To use `IEx.pry/0` during tests, you need to run Mix inside
`iex` and pass the `--trace` to `mix test` to avoid running
into timeouts:
iex -S mix test --trace
iex -S mix test path/to/file:line --trace
"""
defmacro pry() do
quote do
IEx.Pry.pry(binding(), __ENV__)
end
end
@doc """
Macro-based shortcut for `IEx.break!/4`.
"""
@since "1.5.0"
defmacro break!(ast, stops \\ 1) do
quote do
IEx.__break__!(unquote(Macro.escape(ast)), unquote(Macro.escape(stops)), __ENV__)
end
end
def __break__!({:/, _, [call, arity]} = ast, stops, env) when is_integer(arity) do
with {module, fun, []} <- Macro.decompose_call(call),
module when is_atom(module) <- Macro.expand(module, env) do
IEx.Pry.break!(module, fun, arity, quote(do: _), stops)
else
_ ->
raise_unknown_break_ast!(ast)
end
end
def __break__!({{:., _, [module, fun]}, _, args} = ast, stops, env) do
__break__!(ast, module, fun, args, true, stops, env)
end
def __break__!({:when, _, [{{:., _, [module, fun]}, _, args}, guards]} = ast, stops, env) do
__break__!(ast, module, fun, args, guards, stops, env)
end
def __break__!(ast, _stops) do
raise_unknown_break_ast!(ast)
end
defp __break__!(ast, module, fun, args, guards, stops, env) do
module = Macro.expand(module, env)
unless is_atom(module) do
raise_unknown_break_ast!(ast)
end
pattern = {:when, [], [{:{}, [], args}, guards]}
to_expand =
quote do
case Unknown.module() do
unquote(pattern) -> :ok
end
end
{{:case, _, [_, [do: [{:->, [], [[expanded], _]}]]]}, _} =
:elixir_expand.expand(to_expand, env)
IEx.Pry.break!(module, fun, length(args), expanded, stops)
end
defp raise_unknown_break_ast!(ast) do
raise ArgumentError, """
unknown expression to break on, expected one of:
* Mod.fun/arity, such as: URI.parse/1
* Mod.fun(arg1, arg2, ...), such as: URI.parse(_)
* Mod.fun(arg1, arg2, ...) when guard, such as: URI.parse(var) when is_binary(var)
Got #{Macro.to_string(ast)}
"""
end
@doc """
Sets up a breakpoint in `module`, `function` and `arity` with
the given number of `stops`.
This function will instrument the given module and load a new
version in memory with breakpoints at the given function and
arity. If the module is recompiled, all breakpoints are lost.
When a breakpoint is reached, IEx will ask if you want to `pry`
the given function and arity. In other words, this works similar
to `IEx.pry/0` as the running process becomes the evaluator of
IEx commands and is temporarily changed to have a custom group
leader. However, differently from `IEx.pry/0`, aliases and imports
from the source code won't be available in the shell.
IEx helpers includes many conveniences related to breakpoints.
Below they are listed with the full module, such as `IEx.Helpers.breaks/0`,
but remember it can be called directly as `breaks()` inside IEx.
They are:
* `IEx.Helpers.break!/2` - sets up a breakpoint for a given `Mod.fun/arity`
* `IEx.Helpers.break!/4` - sets up a breakpoint for the given module, function, arity
* `IEx.Helpers.breaks/0` - prints all breakpoints and their ids
* `IEx.Helpers.continue/0` - continues until the next breakpoint in the same shell
* `IEx.Helpers.open/0` - opens editor on the current breakpoint
* `IEx.Helpers.remove_breaks/0` - removes all breakpoints in all modules
* `IEx.Helpers.remove_breaks/1` - removes all breakpoints in a given module
* `IEx.Helpers.reset_break/1` - sets the number of stops on the given id to zero
* `IEx.Helpers.reset_break/3` - sets the number of stops on the given module, function, arity to zero
* `IEx.Helpers.respawn/0` - starts a new shell (breakpoints will ask for permission once more)
* `IEx.Helpers.whereami/1` - shows the current location
By default, the number of stops in a breakpoint is 1. Any follow-up
call won't stop the code execution unless another breakpoint is set.
Alternatively, the number of stops can be increased by passing the `stops`
argument. `IEx.Helpers.reset_break/1` and `IEx.Helpers.reset_break/3`
can be used to reset the number back to zero. Note the module remains
"instrumented" even after all stops on all breakpoints are consumed.
You can remove the instrumentation in a given module by calling
`IEx.Helpers.remove_breaks/1` and on all modules by calling
`IEx.Helpers.remove_breaks/0`.
To exit a breakpoint, the developer can either invoke `continue()`,
which will block the shell until the next breakpoint is found or
the process terminates, or invoke `respawn()`, which starts a new IEx
shell, freeing up the pried one.
This functionality only works on Elixir code and requires Erlang/OTP 20+.
## Examples
The examples below will use `break!`, assuming that you are setting
a breakpoint directly from your IEx shell. But you can set up a break
from anywhere by using the fully qualified name `IEx.break!`.
The following sets up a breakpoint on `URI.decode_query/2`:
break! URI, :decode_query, 2
This call will setup a breakpoint that stops once.
To set a breakpoint that will stop 10 times:
break! URI, :decode_query, 2, 10
`IEx.break!/2` is a convenience macro that allows breakpoints
to be given in the `Mod.fun/arity` format:
break! URI.decode_query/2
Or to set a breakpoint that will stop 10 times:
break! URI.decode_query/2, 10
This function returns the breakpoint ID and will raise if there
is an error setting up the breakpoint.
## Patterns and guards
`IEx.break!/2` allows patterns to be given, triggering the
breakpoint only in some occasions. For example, to trigger
the breakpoint only when the first argument is the "foo=bar"
string:
break! URI.decode_query("foo=bar", _)
Or to trigger it whenever the second argument is a map with
more than one element:
break! URI.decode_query(_, map) when map_size(map) > 0
Only a single break point can be set per function. So if you call
`IEx.break!` multiple times with different patterns, only the last
pattern is kept.
Notice that, while patterns may be given to macros, remember that
macros receive ASTs as arguments, and not values. For example, if
you try to break on a macro with the following pattern:
break! MyModule.some_macro(pid) when pid == self()
This breakpoint will never be reached, because a macro never receives
a PID. Even if you call the macro as `MyModule.some_macro(self())`,
the macro will receive the AST representing the `self()` call, and not
the PID itself.
## Breaks and mix test
To use `IEx.break!/4` during tests, you need to run Mix inside
`iex` and pass the `--trace` to `mix test` to avoid running
into timeouts:
iex -S mix test --trace
iex -S mix test path/to/file:line --trace
"""
@since "1.5.0"
def break!(module, function, arity, stops \\ 1) when is_integer(arity) do
IEx.Pry.break!(module, function, arity, quote(do: _), stops)
end
## Callbacks
# This is a callback invoked by Erlang shell utilities
# when someone press Ctrl+G and adds 's Elixir.IEx'.
@doc false
def start(opts \\ [], mfa \\ {IEx, :dont_display_result, []}) do
spawn(fn ->
case :init.notify_when_started(self()) do
:started -> :ok
_ -> :init.wait_until_started()
end
:ok = start_iex()
:ok = set_expand_fun()
:ok = run_after_spawn()
IEx.Server.start(opts, mfa)
end)
end
@doc false
def dont_display_result, do: :"do not show this result in output"
## Helpers
defp start_iex() do
{:ok, _} = Application.ensure_all_started(:iex)
:ok
end
defp set_expand_fun do
gl = Process.group_leader()
glnode = node(gl)
expand_fun =
if glnode != node() do
_ = ensure_module_exists(glnode, IEx.Remsh)
IEx.Remsh.expand(node())
else
&IEx.Autocomplete.expand(&1)
end
# expand_fun is not supported by a shell variant
# on Windows, so we do two IO calls, not caring
# about the result of the expand_fun one.
_ = :io.setopts(gl, expand_fun: expand_fun)
:io.setopts(gl, binary: true, encoding: :unicode)
end
defp ensure_module_exists(node, mod) do
unless :rpc.call(node, :code, :is_loaded, [mod]) do
{m, b, f} = :code.get_object_code(mod)
{:module, _} = :rpc.call(node, :code, :load_binary, [m, f, b])
end
end
defp run_after_spawn do
_ = for fun <- Enum.reverse(after_spawn()), do: fun.()
:ok
end
end
| 31.689612 | 105 | 0.684123 |
9e33ddb245324a3f38751d242cdb2ffe63f3cc88 | 1,599 | ex | Elixir | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_conditions.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_conditions.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_conditions.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2Conditions do
@moduledoc """
A collection of conditions.
## Attributes
- conditions ([GooglePrivacyDlpV2Condition]): Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:conditions => list(GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2Condition.t())
}
field(:conditions, as: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2Condition, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2Conditions do
def decode(value, options) do
GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2Conditions.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2Conditions do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.3125 | 89 | 0.757348 |
9e33efcab864c5822f8e806b290afc67dea6f872 | 476 | ex | Elixir | lib/docusign/model/brand_request.ex | gaslight/docusign_elixir | d9d88d53dd85d32a39d537bade9db28d779414e6 | [
"MIT"
] | 4 | 2020-12-21T12:50:13.000Z | 2022-01-12T16:50:43.000Z | lib/docusign/model/brand_request.ex | gaslight/docusign_elixir | d9d88d53dd85d32a39d537bade9db28d779414e6 | [
"MIT"
] | 12 | 2018-09-18T15:26:34.000Z | 2019-09-28T15:29:39.000Z | lib/docusign/model/brand_request.ex | gaslight/docusign_elixir | d9d88d53dd85d32a39d537bade9db28d779414e6 | [
"MIT"
] | 15 | 2020-04-29T21:50:16.000Z | 2022-02-11T18:01:51.000Z | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule DocuSign.Model.BrandRequest do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:brandId
]
@type t :: %__MODULE__{
:brandId => String.t()
}
end
defimpl Poison.Decoder, for: DocuSign.Model.BrandRequest do
def decode(value, _options) do
value
end
end
| 19.04 | 75 | 0.678571 |
9e33f5d1fd10f5426f5d3e659157b62e7e3dbbef | 2,734 | ex | Elixir | lib/ex_slp/server.ex | 4pcbr/ex_slp_tk | 91a9f435cdc62224219409b9560a031d5ff4bb39 | [
"MIT"
] | 2 | 2016-03-01T22:16:18.000Z | 2016-04-14T23:10:01.000Z | lib/ex_slp/server.ex | 4pcbr/ex_slp_tk | 91a9f435cdc62224219409b9560a031d5ff4bb39 | [
"MIT"
] | null | null | null | lib/ex_slp/server.ex | 4pcbr/ex_slp_tk | 91a9f435cdc62224219409b9560a031d5ff4bb39 | [
"MIT"
] | null | null | null | defmodule ExSlp.Server do
alias ExSlp.Tool
import ExSlp.Util, only: [ format_servise_url: 1, format_args: 1, format_opts: 1 ]
@slpd "slpd"
@doc """
Checks the status of slpd daemon.
Returns:
{ :ok, pid } # in case of success,
{ :not_running, message } # otherwise.
"""
def status do
case :os.cmd( :"ps cax | grep #{@slpd} | awk '{print $1}'" ) do
[] -> { :not_running, "The server doesn't seem to be running." }
output ->
pid = output |> List.to_string |> String.strip |> String.to_integer
{ :ok, pid }
end
end
@doc """
Registers the service in the local network.
Takes the `service` specification as a mandatory argument,
`args` and `opts` as a standard keyword lists (see ExSlp.Client.findsrvs for
more info on `args`.
This is the place you should specify the service attributes.
Check the original documentation for more service internals:
http://www.openslp.org/doc/html/ProgrammersGuide/SLPReg.html
Please keep in mind the slpd instance won't track the initiator
status and moreover it knows nothing about it.
It's initiator's responsibility to deregister the service
it registers.
Returns:
{ :ok, resp } # in case of success,
{ :error, message } # otherwise.
Example:
register( "myservice://192.168.0.10" )
register( "service:myservice.xyz://192.168.0.10", [ attr1: val1, att2: val2 ] )
register( "service:myservice.xyz://192.168.0.10", [ l: "en", t: 60 * 60], [] )
"""
def register( service ), do: register( service, [], [] )
def register( service, opts ), do: register( service, [], opts )
def register( service, args, opts ) do
args = format_args( args )
opts = format_opts( opts )
case res = Tool.exec_cmd( args, :register, [ format_servise_url( service ), opts ] ) do
{ :ok, "" } -> res
{ :ok, silent_err } ->
{ :error, silent_err }
_ -> res
end
end
@doc """
Deregisters the service which has been registered earlier.
You should call this method every time the application
is about to be terminated. The service won't be automatically
deregistered.
Takes `service` as a mandatory argument, the same you used
to register the service.
`args` is a standard openslp keyword list.
Returns:
{ :ok, resp } # in case of success
{ :error, reason } # otherwise
"""
def deregister( service ), do: deregister( service, [] )
def deregister( service, args ) do
args = format_args( args )
case res = Tool.exec_cmd( args, :deregister, [ format_servise_url( service ) ] ) do
{ :ok, "" } -> res
{ :ok, silent_err } ->
{ :error, silent_err }
_ -> res
end
end
end
| 34.175 | 91 | 0.631675 |
9e340f81aa9fd304a07feba22e6eacde32c1c8da | 642 | exs | Elixir | test/brando/i18n_test.exs | brandocms/brando | 4198e0c0920031bd909969055064e4e2b7230d21 | [
"MIT"
] | 4 | 2020-10-30T08:40:38.000Z | 2022-01-07T22:21:37.000Z | test/brando/i18n_test.exs | brandocms/brando | 4198e0c0920031bd909969055064e4e2b7230d21 | [
"MIT"
] | 1,162 | 2020-07-05T11:20:15.000Z | 2022-03-31T06:01:49.000Z | test/brando/i18n_test.exs | brandocms/brando | 4198e0c0920031bd909969055064e4e2b7230d21 | [
"MIT"
] | null | null | null | defmodule Brando.I18nTest do
use ExUnit.Case, async: true
alias Brando.I18n
test "get_language" do
mock_conn = %{assigns: %{}}
assert I18n.get_language(mock_conn) == Brando.config(:default_admin_language)
mock_conn = %{assigns: %{language: "en"}}
assert I18n.get_language(mock_conn) == "en"
end
test "extract" do
assert I18n.parse_path([]) == {"en", ["index"]}
assert I18n.parse_path(["en"]) == {"en", ["index"]}
assert I18n.parse_path(["test"]) == {"en", ["test"]}
assert I18n.parse_path(["no", "test"]) == {"no", ["test"]}
assert I18n.parse_path(["en", "test"]) == {"en", ["test"]}
end
end
| 32.1 | 81 | 0.610592 |
9e3417e9154aea5468b1c4f62f4ad4896c0a6202 | 1,115 | exs | Elixir | mix.exs | IanLuites/tidy | 8427986dd0261139a8e25a6fb6f9a938608500c6 | [
"MIT"
] | null | null | null | mix.exs | IanLuites/tidy | 8427986dd0261139a8e25a6fb6f9a938608500c6 | [
"MIT"
] | 1 | 2020-09-02T10:03:53.000Z | 2020-09-02T10:03:53.000Z | mix.exs | IanLuites/tidy | 8427986dd0261139a8e25a6fb6f9a938608500c6 | [
"MIT"
] | 1 | 2020-09-01T16:20:38.000Z | 2020-09-01T16:20:38.000Z | defmodule Tidy.MixProject do
use Mix.Project
@version "0.1.4"
def project do
[
app: :tidy,
version: @version,
elixir: "~> 1.7",
start_permanent: Mix.env() == :prod,
deps: deps(),
package: package(),
description: "Checks documentation and specs of Elixir modules.",
docs: docs()
]
end
defp docs do
[
main: "Tidy",
canonical: "http://hexdocs.pm/tidy",
extras: ["README.md"],
source_ref: "v#{@version}",
source_url: "https://github.com/IanLuites/tidy",
groups_for_modules: [
Adapters: [
Tidy.Checks.ModuleDoc,
Tidy.Checks.FunctionDoc,
Tidy.Checks.FunctionSpec
]
]
]
end
defp package do
[
name: :tidy,
maintainers: ["Ian Luites"],
licenses: ["MIT"],
files: ~w(.formatter.exs mix.exs README.md LICENSE lib),
links: %{github: "https://github.com/IanLuites/tidy"}
]
end
def application do
[
extra_applications: [:logger]
]
end
defp deps do
[{:ex_doc, ">= 0.0.0", only: :dev}]
end
end
| 19.910714 | 71 | 0.550673 |
9e3421a2c5c03b2d31367b2d48d95243d91b4c6c | 2,451 | ex | Elixir | lib/nerves_rt_perf/base/all.ex | takasehideki/nerves_rt_perf | 2d189b24d02432632c5e4ec65fbb7421ac0b629e | [
"Apache-2.0"
] | 1 | 2020-10-18T03:21:47.000Z | 2020-10-18T03:21:47.000Z | lib/nerves_rt_perf/base/all.ex | takasehideki/nerves_rt_perf | 2d189b24d02432632c5e4ec65fbb7421ac0b629e | [
"Apache-2.0"
] | 1 | 2022-02-09T07:36:27.000Z | 2022-02-09T07:36:27.000Z | lib/nerves_rt_perf/base/all.ex | takasehideki/nerves_rt_perf | 2d189b24d02432632c5e4ec65fbb7421ac0b629e | [
"Apache-2.0"
] | null | null | null | defmodule NervesRtPerf.Base.All do
# macro setting for const value (defined by NervesRtPerf)
require NervesRtPerf
@eval_loop_num NervesRtPerf.eval_loop_num()
@sum_num NervesRtPerf.sum_num()
@fib_num NervesRtPerf.fib_num()
@sleep_interval NervesRtPerf.sleep_interval()
# obtain target name
@target System.get_env("MIX_TARGET")
def eval(param) do
# prepare log file
filename =
(@target <> to_string(__MODULE__) <> "_" <> param <> "-" <> Time.to_string(Time.utc_now()))
|> String.replace("Elixir.NervesRtPerf.", "-")
|> String.replace(".", "-")
|> String.replace(":", "")
# eliminate under second
|> String.slice(0..-8)
filepath = "/tmp/" <> filename <> ".csv"
IO.puts("result log file: " <> filepath)
File.write(filepath, "count,time,heap_size,minor_gcs\r\n")
# generate process for output of measurement logs
pid = spawn(NervesRtPerf, :output, [filepath, ""])
case param do
"normal" ->
Process.spawn(__MODULE__, :eval_loop, [1, pid], [])
_ ->
IO.puts("Argument error")
end
end
# loop for evaluation
def eval_loop(count, pid) do
# sleep on each iteration
:timer.sleep(5)
case count do
# write results to the log file
n when n > @eval_loop_num ->
send(pid, {:ok})
IO.puts("Evaluation end:" <> Time.to_string(Time.utc_now()))
:ok
0 ->
IO.puts("Evaluation start:" <> Time.to_string(Time.utc_now()))
# ignore evaluation for the first time to avoid cache influence
NervesRtPerf.sum(@sum_num)
NervesRtPerf.fib(@fib_num)
:timer.sleep(@sleep_interval)
:timer.sleep(5)
eval_loop(count + 1, pid)
_ ->
# measurement point
# {eval, _} = :timer.tc(NervesRtPerf, :fib, [])
t1 = :erlang.monotonic_time()
NervesRtPerf.sum(@sum_num)
NervesRtPerf.fib(@fib_num)
:timer.sleep(@sleep_interval)
t2 = :erlang.monotonic_time()
time = :erlang.convert_time_unit(t2 - t1, :native, :microsecond)
result =
"#{count},#{time},#{Process.info(self())[:heap_size]},#{
Process.info(self())[:garbage_collection][:minor_gcs]
}\r\n"
# send measurement result to output process
send(pid, {:ok, result})
# sleep to wait output process
:timer.sleep(5)
eval_loop(count + 1, pid)
end
end
end
| 29.178571 | 97 | 0.600571 |
9e3423b353320d747e92aabb100f0ad024bd1770 | 759 | ex | Elixir | apps/elixir_ls_debugger/test/fixtures/mix_project/lib/mix_project.ex | bottlenecked/elixir-ls | 99ab6e98ff181aae01ca3d119dee0ea9c49c727a | [
"Apache-2.0"
] | null | null | null | apps/elixir_ls_debugger/test/fixtures/mix_project/lib/mix_project.ex | bottlenecked/elixir-ls | 99ab6e98ff181aae01ca3d119dee0ea9c49c727a | [
"Apache-2.0"
] | null | null | null | apps/elixir_ls_debugger/test/fixtures/mix_project/lib/mix_project.ex | bottlenecked/elixir-ls | 99ab6e98ff181aae01ca3d119dee0ea9c49c727a | [
"Apache-2.0"
] | null | null | null | defmodule MixProject do
def quadruple(x) do
double(double(x))
end
def double(y) do
2 * y
end
def exit do
Task.start(fn ->
Task.start_link(fn ->
Process.sleep(1000)
raise "Fixture MixProject expected error"
end)
Process.sleep(:infinity)
end)
Process.sleep(:infinity)
end
def exit_self do
Task.start_link(fn ->
Process.sleep(1000)
raise "Fixture MixProject raise for exit_self/0"
end)
Process.sleep(:infinity)
end
end
defmodule MixProject.Some do
def double(y) do
2 * y
end
def quadruple(x) do
double(double(x))
end
def sleep do
Supervisor.start_link([], strategy: :one_for_one, name: __MODULE__)
Process.sleep(:infinity)
end
end
| 16.148936 | 71 | 0.635046 |
9e34335c9a976b8a7b41a5418a757dae5748a69e | 812 | ex | Elixir | lib/verk/stats.ex | coop/verk | cae9bea1426d5b340b3041b29453f17c709d70a8 | [
"MIT"
] | null | null | null | lib/verk/stats.ex | coop/verk | cae9bea1426d5b340b3041b29453f17c709d70a8 | [
"MIT"
] | null | null | null | lib/verk/stats.ex | coop/verk | cae9bea1426d5b340b3041b29453f17c709d70a8 | [
"MIT"
] | 1 | 2018-12-04T03:50:36.000Z | 2018-12-04T03:50:36.000Z | defmodule Verk.Stats do
@moduledoc """
Basic stats for Verk
"""
@doc """
Total amount of processed and failed jobs
"""
@spec total(GenServer.server) :: Map.t
def total(redis \\ Verk.Redis) do
[processed, failed] = Redix.command!(redis, ~w(MGET stat:processed stat:failed))
%{processed: to_int(processed), failed: to_int(failed)}
end
@doc """
Total amount of processed and failed jobs for a single queue
"""
@spec queue_total(String.t, GenServer.server) :: Map.t
def queue_total(queue, redis \\ Verk.Redis) do
[processed, failed] = Redix.command!(redis, ~w(MGET stat:processed:#{queue} stat:failed:#{queue}))
%{total_processed: to_int(processed), total_failed: to_int(failed)}
end
defp to_int(nil), do: 0
defp to_int(string), do: String.to_integer(string)
end
| 30.074074 | 102 | 0.684729 |
9e34410cc1423814d4dabb43fd191ec712d61e89 | 1,235 | ex | Elixir | web/views/error_helpers.ex | whenther/chopsticks | ec8d9f90cb4e9afc9e80322c734d9c6bfe5e14e1 | [
"MIT"
] | 2 | 2016-11-08T18:17:41.000Z | 2017-02-23T06:51:56.000Z | web/views/error_helpers.ex | will-wow/chopsticks | ec8d9f90cb4e9afc9e80322c734d9c6bfe5e14e1 | [
"MIT"
] | null | null | null | web/views/error_helpers.ex | will-wow/chopsticks | ec8d9f90cb4e9afc9e80322c734d9c6bfe5e14e1 | [
"MIT"
] | null | null | null | defmodule Chopsticks.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
if error = form.errors[field] do
content_tag :span, translate_error(error), class: "help-block"
end
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# Because error messages were defined within Ecto, we must
# call the Gettext module passing our Gettext backend. We
# also use the "errors" domain as translations are placed
# in the errors.po file.
# Ecto will pass the :count keyword if the error message is
# meant to be pluralized.
# On your own code and templates, depending on whether you
# need the message to be pluralized or not, this could be
# written simply as:
#
# dngettext "errors", "1 file", "%{count} files", count
# dgettext "errors", "is invalid"
#
if count = opts[:count] do
Gettext.dngettext(Chopsticks.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(Chopsticks.Gettext, "errors", msg, opts)
end
end
end
| 30.121951 | 76 | 0.669636 |
9e347b3e1d006dc48cc3d9af569b21f22262779a | 1,863 | ex | Elixir | lib/grizzly/command_class/network_management_basic/default_set.ex | pragdave/grizzly | bcd7b46ab2cff1797dac04bc3cd12a36209dd579 | [
"Apache-2.0"
] | null | null | null | lib/grizzly/command_class/network_management_basic/default_set.ex | pragdave/grizzly | bcd7b46ab2cff1797dac04bc3cd12a36209dd579 | [
"Apache-2.0"
] | null | null | null | lib/grizzly/command_class/network_management_basic/default_set.ex | pragdave/grizzly | bcd7b46ab2cff1797dac04bc3cd12a36209dd579 | [
"Apache-2.0"
] | null | null | null | defmodule Grizzly.CommandClass.NetworkManagementBasic.DefaultSet do
@moduledoc """
Command module for working with the NetworkManagementBasic command class DEFAULT_SET command
Command Options:
* `:seq_number` - the sequence number used for the Z/IP packet
* `:retries` - the number of attempts to send the command (default 2)
"""
@behaviour Grizzly.Command
alias Grizzly.Packet
@type t :: %__MODULE__{
seq_number: Grizzly.seq_number(),
retries: non_neg_integer()
}
@type opt :: {:seq_number, Grizzly.seq_number()} | {:retries, non_neg_integer()}
defstruct seq_number: nil, retries: 2
@spec init([opt]) :: {:ok, t}
def init(opts) do
{:ok, struct(__MODULE__, opts)}
end
def encode(%__MODULE__{seq_number: seq_number}) do
binary = Packet.header(seq_number) <> <<0x4D, 0x06, seq_number>>
{:ok, binary}
end
@spec handle_response(t, Packet.t()) ::
{:continue, t}
| {:done, {:error, :nack_response}}
| {:done, :done | :busy}
| {:retry, t}
def handle_response(%__MODULE__{seq_number: seq_number} = command, %Packet{
seq_number: seq_number,
types: [:ack_response]
}) do
{:continue, command}
end
def handle_response(%__MODULE__{seq_number: seq_number, retries: 0}, %Packet{
seq_number: seq_number,
types: [:nack_response]
}) do
{:done, {:error, :nack_response}}
end
def handle_response(%__MODULE__{seq_number: seq_number, retries: n} = command, %Packet{
seq_number: seq_number,
types: [:nack_response]
}) do
{:retry, %{command | retries: n - 1}}
end
def handle_response(_command, %Packet{body: %{command: :default_set_complete, status: status}}) do
{:done, {:ok, status}}
end
def handle_response(command, _), do: {:continue, command}
end
| 28.661538 | 100 | 0.641975 |
9e34c3592c3e33c9fd23cbd362e6bb776bfbb6bf | 1,296 | exs | Elixir | mix.exs | axelson/live_attribute | 5935e58c922bbbc2961f2e574bd43668174685ef | [
"MIT"
] | null | null | null | mix.exs | axelson/live_attribute | 5935e58c922bbbc2961f2e574bd43668174685ef | [
"MIT"
] | null | null | null | mix.exs | axelson/live_attribute | 5935e58c922bbbc2961f2e574bd43668174685ef | [
"MIT"
] | null | null | null | defmodule LiveAttribute.MixProject do
use Mix.Project
@version "1.0.0"
@name "LiveAttribute"
@url "https://github.com/dominicletz/live_attribute"
@maintainers ["Dominic Letz"]
def project do
[
app: :live_attribute,
version: @version,
name: @name,
elixir: "~> 1.11",
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps(),
docs: docs(),
package: package(),
homepage_url: @url,
description: """
LiveAttributes for Phoenix.LiveView to make subscription and updating
of changing variables easier.
"""
]
end
defp aliases() do
[
lint: [
"compile",
"format --check-formatted",
"credo"
]
]
end
defp docs do
[
main: @name,
source_ref: "v#{@version}",
source_url: @url,
authors: @maintainers
]
end
defp package do
[
maintainers: @maintainers,
licenses: ["MIT"],
links: %{github: @url},
files: ~w(lib LICENSE.md mix.exs README.md)
]
end
def application do
[extra_applications: [:global]]
end
defp deps do
[
{:credo, "~> 1.5", only: [:dev, :test], runtime: false},
{:ex_doc, "~> 0.22", only: :dev, runtime: false}
]
end
end
| 19.343284 | 77 | 0.552469 |
9e34c7f38bac79e2dfbd83de5e3dd7d0ecf5dc13 | 33,845 | ex | Elixir | lib/elixir/lib/inspect/algebra.ex | RyanBard/elixir | 3e0f3b47cf26aa121470141b9a1aa55a366c066e | [
"Apache-2.0"
] | 2 | 2018-11-15T06:38:14.000Z | 2018-11-17T18:03:14.000Z | lib/elixir/lib/inspect/algebra.ex | RyanBard/elixir | 3e0f3b47cf26aa121470141b9a1aa55a366c066e | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/inspect/algebra.ex | RyanBard/elixir | 3e0f3b47cf26aa121470141b9a1aa55a366c066e | [
"Apache-2.0"
] | null | null | null | defmodule Inspect.Opts do
@moduledoc """
Defines the options used by the `Inspect` protocol.
The following fields are available:
* `:structs` - when `false`, structs are not formatted by the inspect
protocol, they are instead printed as maps, defaults to `true`.
* `:binaries` - when `:as_strings` all binaries will be printed as strings,
non-printable bytes will be escaped.
When `:as_binaries` all binaries will be printed in bit syntax.
When the default `:infer`, the binary will be printed as a string if it
is printable, otherwise in bit syntax.
* `:charlists` - when `:as_charlists` all lists will be printed as charlists,
non-printable elements will be escaped.
When `:as_lists` all lists will be printed as lists.
When the default `:infer`, the list will be printed as a charlist if it
is printable, otherwise as list.
* `:limit` - limits the number of items that are printed for tuples,
bitstrings, maps, lists and any other collection of items. It does not
apply to strings nor charlists and defaults to 50. If you don't want to limit
the number of items to a particular number, use `:infinity`.
* `:printable_limit` - limits the number of bytes that are printed for strings
and charlists. Defaults to 4096. If you don't want to limit the number of items
to a particular number, use `:infinity`.
* `:pretty` - if set to `true` enables pretty printing, defaults to `false`.
* `:width` - defaults to 80 characters, used when pretty is `true` or when
printing to IO devices. Set to 0 to force each item to be printed on its
own line. If you don't want to limit the number of items to a particular
number, use `:infinity`.
* `:base` - prints integers as `:binary`, `:octal`, `:decimal`, or `:hex`,
defaults to `:decimal`. When inspecting binaries any `:base` other than
`:decimal` implies `binaries: :as_binaries`.
* `:safe` - when `false`, failures while inspecting structs will be raised
as errors instead of being wrapped in the `Inspect.Error` exception. This
is useful when debugging failures and crashes for custom inspect
implementations
* `:syntax_colors` - when set to a keyword list of colors the output will
be colorized. The keys are types and the values are the colors to use for
each type (for example, `[number: :red, atom: :blue]`). Types can include
`:number`, `:atom`, `regex`, `:tuple`, `:map`, `:list`, and `:reset`.
Colors can be any `t:IO.ANSI.ansidata/0` as accepted by `IO.ANSI.format/1`.
"""
# TODO: Remove :char_lists key by 2.0
defstruct structs: true,
binaries: :infer,
charlists: :infer,
char_lists: :infer,
limit: 50,
printable_limit: 4096,
width: 80,
base: :decimal,
pretty: false,
safe: true,
syntax_colors: []
@type color_key :: atom
# TODO: Remove :char_lists key and :as_char_lists value by 2.0
@type t :: %__MODULE__{
structs: boolean,
binaries: :infer | :as_binaries | :as_strings,
charlists: :infer | :as_lists | :as_charlists,
char_lists: :infer | :as_lists | :as_char_lists,
limit: pos_integer | :infinity,
printable_limit: pos_integer | :infinity,
width: pos_integer | :infinity,
base: :decimal | :binary | :hex | :octal,
pretty: boolean,
safe: boolean,
syntax_colors: [{color_key, IO.ANSI.ansidata()}]
}
end
defmodule Inspect.Error do
@moduledoc """
Raised when a struct cannot be inspected.
"""
defexception [:message]
end
defmodule Inspect.Algebra do
@moduledoc ~S"""
A set of functions for creating and manipulating algebra
documents.
This module implements the functionality described in
["Strictly Pretty" (2000) by Christian Lindig][0] with small
additions, like support for binary nodes and a break mode that
maximises use of horizontal space.
iex> Inspect.Algebra.empty()
:doc_nil
iex> "foo"
"foo"
With the functions in this module, we can concatenate different
elements together and render them:
iex> doc = Inspect.Algebra.concat(Inspect.Algebra.empty(), "foo")
iex> Inspect.Algebra.format(doc, 80)
["foo"]
The functions `nest/2`, `space/2` and `line/2` help you put the
document together into a rigid structure. However, the document
algebra gets interesting when using functions like `glue/3` and
`group/1`. A glue inserts a break between two documents. A group
indicates a document that must fit the current line, otherwise
breaks are rendered as new lines. Let's glue two docs together
with a break, group it and then render it:
iex> doc = Inspect.Algebra.glue("a", " ", "b")
iex> doc = Inspect.Algebra.group(doc)
iex> Inspect.Algebra.format(doc, 80)
["a", " ", "b"]
Notice the break was represented as is, because we haven't reached
a line limit. Once we do, it is replaced by a newline:
iex> doc = Inspect.Algebra.glue(String.duplicate("a", 20), " ", "b")
iex> doc = Inspect.Algebra.group(doc)
iex> Inspect.Algebra.format(doc, 10)
["aaaaaaaaaaaaaaaaaaaa", "\n", "b"]
This module uses the byte size to compute how much space there is
left. If your document contains strings, then those need to be
wrapped in `string/1`, which then relies on `String.length/1` to
precompute the document size.
Finally, this module also contains Elixir related functions, a bit
tied to Elixir formatting, such as `to_doc/2`.
## Implementation details
The implementation of `Inspect.Algebra` is based on the Strictly Pretty
paper by [Lindig][0] which builds on top of previous pretty printing
algorithms but is tailored to strict languages, such as Elixir.
The core idea in the paper is the use of explicit document groups which
are rendered as flat (breaks as spaces) or as break (breaks as newlines).
This implementation provides two types of breaks: `:strict` and `:flex`.
When a group does not fit, all strict breaks are treated as newlines.
Flex breaks however are re-evaluated on every occurrence and may still
be rendered flat. See `break/1` and `flex_break/1` for more information.
This implementation also adds `force_unfit/1` and `next_break_fits/2` which
give more control over the document fitting.
[0]: http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.34.2200
"""
@container_separator ","
@tail_separator " |"
@newline "\n"
@next_break_fits :enabled
# Functional interface to "doc" records
@type t ::
binary
| :doc_nil
| :doc_line
| doc_string
| doc_cons
| doc_nest
| doc_break
| doc_group
| doc_color
| doc_force
| doc_fits
| doc_collapse
@typep doc_string :: {:doc_string, t, non_neg_integer}
defmacrop doc_string(string, length) do
quote do: {:doc_string, unquote(string), unquote(length)}
end
@typep doc_cons :: {:doc_cons, t, t}
defmacrop doc_cons(left, right) do
quote do: {:doc_cons, unquote(left), unquote(right)}
end
@typep doc_nest :: {:doc_nest, t, :cursor | :reset | non_neg_integer, :always | :break}
defmacrop doc_nest(doc, indent, always_or_break) do
quote do: {:doc_nest, unquote(doc), unquote(indent), unquote(always_or_break)}
end
@typep doc_break :: {:doc_break, binary, :flex | :strict}
defmacrop doc_break(break, mode) do
quote do: {:doc_break, unquote(break), unquote(mode)}
end
@typep doc_group :: {:doc_group, t, :inherit | :self}
defmacrop doc_group(group, mode) do
quote do: {:doc_group, unquote(group), unquote(mode)}
end
@typep doc_fits :: {:doc_fits, t, :enabled | :disabled}
defmacrop doc_fits(group, mode) do
quote do: {:doc_fits, unquote(group), unquote(mode)}
end
@typep doc_force :: {:doc_force, t}
defmacrop doc_force(group) do
quote do: {:doc_force, unquote(group)}
end
@typep doc_collapse :: {:doc_collapse, pos_integer()}
defmacrop doc_collapse(count) do
quote do: {:doc_collapse, unquote(count)}
end
@typep doc_color :: {:doc_color, t, IO.ANSI.ansidata()}
defmacrop doc_color(doc, color) do
quote do: {:doc_color, unquote(doc), unquote(color)}
end
@docs [
:doc_string,
:doc_cons,
:doc_nest,
:doc_break,
:doc_group,
:doc_color,
:doc_force,
:doc_fits,
:doc_collapse
]
defguard is_doc(doc)
when is_binary(doc) or doc in [:doc_nil, :doc_line] or
(is_tuple(doc) and elem(doc, 0) in @docs)
# Elixir + Inspect.Opts conveniences
@doc """
Converts an Elixir term to an algebra document
according to the `Inspect` protocol.
"""
@spec to_doc(any, Inspect.Opts.t()) :: t
def to_doc(term, opts)
def to_doc(%_{} = struct, %Inspect.Opts{} = opts) do
if opts.structs do
try do
Inspect.inspect(struct, opts)
rescue
caught_exception ->
# Because we try to raise a nice error message in case
# we can't inspect a struct, there is a chance the error
# message itself relies on the struct being printed, so
# we need to trap the inspected messages to guarantee
# we won't try to render any failed instruct when building
# the error message.
if Process.get(:inspect_trap) do
Inspect.Map.inspect(struct, opts)
else
try do
Process.put(:inspect_trap, true)
res = Inspect.Map.inspect(struct, %{opts | syntax_colors: []})
res = IO.iodata_to_binary(format(res, :infinity))
message =
"got #{inspect(caught_exception.__struct__)} with message " <>
"#{inspect(Exception.message(caught_exception))} while inspecting #{res}"
exception = Inspect.Error.exception(message: message)
if opts.safe do
Inspect.inspect(exception, opts)
else
reraise(exception, __STACKTRACE__)
end
after
Process.delete(:inspect_trap)
end
end
end
else
Inspect.Map.inspect(struct, opts)
end
end
def to_doc(arg, %Inspect.Opts{} = opts) do
Inspect.inspect(arg, opts)
end
@doc ~S"""
Wraps `collection` in `left` and `right` according to limit and contents.
It uses the given `left` and `right` documents as surrounding and the
separator document `separator` to separate items in `docs`. If all entries
in the collection are simple documents (texts or strings), then this function
attempts to put as much as possible on the same line. If they are not simple,
only one entry is shown per line if they do not fit.
The limit in the given `inspect_opts` is respected and when reached this
function stops processing and outputs `"..."` instead.
## Options
* `:separator` - the separator used between each doc
* `:break` - If `:strict`, always break between each element. If `:flex`,
breaks only when necessary. If `:maybe`, chooses `:flex` only if all
elements are text-based, otherwise is `:strict`
## Examples
iex> inspect_opts = %Inspect.Opts{limit: :infinity}
iex> fun = fn i, _opts -> to_string(i) end
iex> doc = Inspect.Algebra.container_doc("[", Enum.to_list(1..5), "]", inspect_opts, fun)
iex> Inspect.Algebra.format(doc, 5) |> IO.iodata_to_binary()
"[1,\n 2,\n 3,\n 4,\n 5]"
iex> inspect_opts = %Inspect.Opts{limit: 3}
iex> fun = fn i, _opts -> to_string(i) end
iex> doc = Inspect.Algebra.container_doc("[", Enum.to_list(1..5), "]", inspect_opts, fun)
iex> Inspect.Algebra.format(doc, 20) |> IO.iodata_to_binary()
"[1, 2, 3, ...]"
iex> inspect_opts = %Inspect.Opts{limit: 3}
iex> fun = fn i, _opts -> to_string(i) end
iex> opts = [separator: "!"]
iex> doc = Inspect.Algebra.container_doc("[", Enum.to_list(1..5), "]", inspect_opts, fun, opts)
iex> Inspect.Algebra.format(doc, 20) |> IO.iodata_to_binary()
"[1! 2! 3! ...]"
"""
@doc since: "1.6.0"
@spec container_doc(t, [any], t, Inspect.Opts.t(), (term, Inspect.Opts.t() -> t), keyword()) ::
t
def container_doc(left, collection, right, inspect_opts, fun, opts \\ [])
when is_doc(left) and is_list(collection) and is_doc(right) and is_function(fun, 2) and
is_list(opts) do
case collection do
[] ->
concat(left, right)
_ ->
break = Keyword.get(opts, :break, :maybe)
separator = Keyword.get(opts, :separator, @container_separator)
{docs, simple?} =
container_each(collection, inspect_opts.limit, inspect_opts, fun, [], break == :maybe)
flex? = simple? or break == :flex
docs = fold_doc(docs, &join(&1, &2, flex?, separator))
case flex? do
true -> group(concat(concat(left, nest(docs, 1)), right))
false -> group(glue(nest(glue(left, "", docs), 2), "", right))
end
end
end
defp container_each([], _limit, _opts, _fun, acc, simple?) do
{:lists.reverse(acc), simple?}
end
defp container_each(_, 0, _opts, _fun, acc, simple?) do
{:lists.reverse(["..." | acc]), simple?}
end
defp container_each([term | terms], limit, opts, fun, acc, simple?) when is_list(terms) do
limit = decrement(limit)
doc = fun.(term, %{opts | limit: limit})
container_each(terms, limit, opts, fun, [doc | acc], simple? and simple?(doc))
end
defp container_each([left | right], limit, opts, fun, acc, simple?) do
limit = decrement(limit)
left = fun.(left, %{opts | limit: limit})
right = fun.(right, %{opts | limit: limit})
simple? = simple? and simple?(left) and simple?(right)
doc = join(left, right, simple?, @tail_separator)
{:lists.reverse([doc | acc]), simple?}
end
defp decrement(:infinity), do: :infinity
defp decrement(counter), do: counter - 1
defp join(:doc_nil, :doc_nil, _, _), do: :doc_nil
defp join(left, :doc_nil, _, _), do: left
defp join(:doc_nil, right, _, _), do: right
defp join(left, right, true, sep), do: flex_glue(concat(left, sep), right)
defp join(left, right, false, sep), do: glue(concat(left, sep), right)
defp simple?(doc_cons(left, right)), do: simple?(left) and simple?(right)
defp simple?(doc_color(doc, _)), do: simple?(doc)
defp simple?(doc_string(_, _)), do: true
defp simple?(:doc_nil), do: true
defp simple?(other), do: is_binary(other)
# TODO: Remove on 2.0
@doc false
@deprecated "Use a combination of concat/2 and nest/2 instead"
def surround(left, doc, right) when is_doc(left) and is_doc(doc) and is_doc(right) do
concat(concat(left, nest(doc, 1)), right)
end
# TODO: Remove on 2.0
@doc false
@deprecated "Use Inspect.Algebra.container_doc/6 instead"
def surround_many(
left,
docs,
right,
%Inspect.Opts{} = inspect,
fun,
separator \\ @container_separator
)
when is_doc(left) and is_list(docs) and is_doc(right) and is_function(fun, 2) do
container_doc(left, docs, right, inspect, fun, separator: separator)
end
# Algebra API
@doc """
Returns a document entity used to represent nothingness.
## Examples
iex> Inspect.Algebra.empty()
:doc_nil
"""
@spec empty() :: :doc_nil
def empty, do: :doc_nil
@doc ~S"""
Creates a document represented by string.
While `Inspect.Algebra` accepts binaries as documents,
those are counted by binary size. On the other hand,
`string` documents are measured in terms of graphemes
towards the document size.
## Examples
The following document has 10 bytes and therefore it
does not format to width 9 without breaks:
iex> doc = Inspect.Algebra.glue("olá", " ", "mundo")
iex> doc = Inspect.Algebra.group(doc)
iex> Inspect.Algebra.format(doc, 9)
["olá", "\n", "mundo"]
However, if we use `string`, then the string length is
used, instead of byte size, correctly fitting:
iex> string = Inspect.Algebra.string("olá")
iex> doc = Inspect.Algebra.glue(string, " ", "mundo")
iex> doc = Inspect.Algebra.group(doc)
iex> Inspect.Algebra.format(doc, 9)
["olá", " ", "mundo"]
"""
@doc since: "1.6.0"
@spec string(String.t()) :: doc_string
def string(string) when is_binary(string) do
doc_string(string, String.length(string))
end
@doc ~S"""
Concatenates two document entities returning a new document.
## Examples
iex> doc = Inspect.Algebra.concat("hello", "world")
iex> Inspect.Algebra.format(doc, 80)
["hello", "world"]
"""
@spec concat(t, t) :: t
def concat(doc1, doc2) when is_doc(doc1) and is_doc(doc2) do
doc_cons(doc1, doc2)
end
@doc ~S"""
Concatenates a list of documents returning a new document.
## Examples
iex> doc = Inspect.Algebra.concat(["a", "b", "c"])
iex> Inspect.Algebra.format(doc, 80)
["a", "b", "c"]
"""
@spec concat([t]) :: t
def concat(docs) when is_list(docs) do
fold_doc(docs, &concat(&1, &2))
end
@doc ~S"""
Colors a document if the `color_key` has a color in the options.
"""
@doc since: "1.4.0"
@spec color(t, Inspect.Opts.color_key(), Inspect.Opts.t()) :: doc_color
def color(doc, color_key, %Inspect.Opts{syntax_colors: syntax_colors}) when is_doc(doc) do
if precolor = Keyword.get(syntax_colors, color_key) do
postcolor = Keyword.get(syntax_colors, :reset, :reset)
concat(doc_color(doc, precolor), doc_color(empty(), postcolor))
else
doc
end
end
@doc ~S"""
Nests the given document at the given `level`.
If `level` is an integer, that's the indentation appended
to line breaks whenever they occur. If the level is `:cursor`,
the current position of the "cursor" in the document becomes
the nesting. If the level is `:reset`, it is set back to 0.
`mode` can be `:always`, which means nesting always happen,
or `:break`, which means nesting only happens inside a group
that has been broken.
## Examples
iex> doc = Inspect.Algebra.nest(Inspect.Algebra.glue("hello", "world"), 5)
iex> doc = Inspect.Algebra.group(doc)
iex> Inspect.Algebra.format(doc, 5)
["hello", "\n ", "world"]
"""
@spec nest(t, non_neg_integer | :cursor | :reset, :always | :break) :: doc_nest
def nest(doc, level, mode \\ :always)
def nest(doc, :cursor, mode) when is_doc(doc) and mode in [:always, :break] do
doc_nest(doc, :cursor, mode)
end
def nest(doc, :reset, mode) when is_doc(doc) and mode in [:always, :break] do
doc_nest(doc, :reset, mode)
end
def nest(doc, 0, _mode) when is_doc(doc) do
doc
end
def nest(doc, level, mode)
when is_doc(doc) and is_integer(level) and level > 0 and mode in [:always, :break] do
doc_nest(doc, level, mode)
end
@doc ~S"""
Returns a break document based on the given `string`.
This break can be rendered as a linebreak or as the given `string`,
depending on the `mode` of the chosen layout.
## Examples
Let's create a document by concatenating two strings with a break between
them:
iex> doc = Inspect.Algebra.concat(["a", Inspect.Algebra.break("\t"), "b"])
iex> Inspect.Algebra.format(doc, 80)
["a", "\t", "b"]
Notice the break was represented with the given string, because we didn't
reach a line limit. Once we do, it is replaced by a newline:
iex> break = Inspect.Algebra.break("\t")
iex> doc = Inspect.Algebra.concat([String.duplicate("a", 20), break, "b"])
iex> doc = Inspect.Algebra.group(doc)
iex> Inspect.Algebra.format(doc, 10)
["aaaaaaaaaaaaaaaaaaaa", "\n", "b"]
"""
@spec break(binary) :: doc_break
def break(string \\ " ") when is_binary(string) do
doc_break(string, :strict)
end
@doc """
Collapse any new lines and whitespace following this
node, emitting up to `max` new lines.
"""
@doc since: "1.6.0"
@spec collapse_lines(pos_integer) :: doc_collapse
def collapse_lines(max) when is_integer(max) and max > 0 do
doc_collapse(max)
end
@doc """
Considers the next break as fit.
`mode` can be `:enabled` or `:disabled`. When `:enabled`,
it will consider the document as fit as soon as it finds
the next break, effectively cancelling the break. It will
also ignore any `force_unfit/1` in search of the next break.
When disabled, it behaves as usual and it will ignore
any further `next_break_fits/2` instruction.
## Examples
This is used by Elixir's code formatter to avoid breaking
code at some specific locations. For example, consider this
code:
some_function_call(%{..., key: value, ...})
Now imagine that this code does not fit its line. The code
formatter introduces breaks inside `(` and `)` and inside
`%{` and `}`. Therefore the document would break as:
some_function_call(
%{
...,
key: value,
...
}
)
The formatter wraps the algebra document representing the
map in `next_break_fits/1` so the code is formatted as:
some_function_call(%{
...,
key: value,
...
})
"""
@doc since: "1.6.0"
@spec next_break_fits(t, :enabled | :disabled) :: doc_fits
def next_break_fits(doc, mode \\ @next_break_fits)
when is_doc(doc) and mode in [:enabled, :disabled] do
doc_fits(doc, mode)
end
@doc """
Forces the current group to be unfit.
"""
@doc since: "1.6.0"
@spec force_unfit(t) :: doc_force
def force_unfit(doc) when is_doc(doc) do
doc_force(doc)
end
@doc """
Returns a flex break document based on the given `string`.
A flex break still causes a group to break, like `break/1`,
but it is re-evaluated when the documented is rendered.
For example, take a group document represented as `[1, 2, 3]`
where the space after every comma is a break. When the document
above does not fit a single line, all breaks are enabled,
causing the document to be rendered as:
[1,
2,
3]
However, if flex breaks are used, then each break is re-evaluated
when rendered, so the document could be possible rendered as:
[1, 2,
3]
Hence the name "flex". they are more flexible when it comes
to the document fitting. On the other hand, they are more expensive
since each break needs to be re-evaluated.
This function is used by `container_doc/4` and friends to the
maximum number of entries on the same line.
"""
@doc since: "1.6.0"
@spec flex_break(binary) :: doc_break
def flex_break(string \\ " ") when is_binary(string) do
doc_break(string, :flex)
end
@doc """
Glues two documents (`doc1` and `doc2`) inserting a
`flex_break/1` given by `break_string` between them.
This function is used by `container_doc/6` and friends
to the maximum number of entries on the same line.
"""
@doc since: "1.6.0"
@spec flex_glue(t, binary, t) :: t
def flex_glue(doc1, break_string \\ " ", doc2) when is_binary(break_string) do
concat(doc1, concat(flex_break(break_string), doc2))
end
@doc ~S"""
Glues two documents (`doc1` and `doc2`) inserting the given
break `break_string` between them.
For more information on how the break is inserted, see `break/1`.
## Examples
iex> doc = Inspect.Algebra.glue("hello", "world")
iex> Inspect.Algebra.format(doc, 80)
["hello", " ", "world"]
iex> doc = Inspect.Algebra.glue("hello", "\t", "world")
iex> Inspect.Algebra.format(doc, 80)
["hello", "\t", "world"]
"""
@spec glue(t, binary, t) :: t
def glue(doc1, break_string \\ " ", doc2) when is_binary(break_string) do
concat(doc1, concat(break(break_string), doc2))
end
@doc ~S"""
Returns a group containing the specified document `doc`.
Documents in a group are attempted to be rendered together
to the best of the renderer ability.
The group mode can also be set to `:inherit`, which means it
automatically breaks if the parent group has broken too.
## Examples
iex> doc =
...> Inspect.Algebra.group(
...> Inspect.Algebra.concat(
...> Inspect.Algebra.group(
...> Inspect.Algebra.concat(
...> "Hello,",
...> Inspect.Algebra.concat(
...> Inspect.Algebra.break(),
...> "A"
...> )
...> )
...> ),
...> Inspect.Algebra.concat(
...> Inspect.Algebra.break(),
...> "B"
...> )
...> )
...> )
iex> Inspect.Algebra.format(doc, 80)
["Hello,", " ", "A", " ", "B"]
iex> Inspect.Algebra.format(doc, 6)
["Hello,", "\n", "A", "\n", "B"]
"""
@spec group(t, :self | :inherit) :: doc_group
def group(doc, mode \\ :self) when is_doc(doc) do
doc_group(doc, mode)
end
@doc ~S"""
Inserts a mandatory single space between two documents.
## Examples
iex> doc = Inspect.Algebra.space("Hughes", "Wadler")
iex> Inspect.Algebra.format(doc, 5)
["Hughes", " ", "Wadler"]
"""
@spec space(t, t) :: t
def space(doc1, doc2), do: concat(doc1, concat(" ", doc2))
@doc ~S"""
A mandatory linebreak.
A group with linebreaks will fit if all lines in the group fit.
## Examples
iex> doc =
...> Inspect.Algebra.concat(
...> Inspect.Algebra.concat(
...> "Hughes",
...> Inspect.Algebra.line()
...> ),
...> "Wadler"
...> )
iex> Inspect.Algebra.format(doc, 80)
["Hughes", "\n", "Wadler"]
"""
@doc since: "1.6.0"
@spec line() :: t
def line(), do: :doc_line
@doc ~S"""
Inserts a mandatory linebreak between two documents.
See `line/1`.
## Examples
iex> doc = Inspect.Algebra.line("Hughes", "Wadler")
iex> Inspect.Algebra.format(doc, 80)
["Hughes", "\n", "Wadler"]
"""
@spec line(t, t) :: t
def line(doc1, doc2), do: concat(doc1, concat(line(), doc2))
@doc ~S"""
Folds a list of documents into a document using the given folder function.
The list of documents is folded "from the right"; in that, this function is
similar to `List.foldr/3`, except that it doesn't expect an initial
accumulator and uses the last element of `docs` as the initial accumulator.
## Examples
iex> docs = ["A", "B", "C"]
iex> docs =
...> Inspect.Algebra.fold_doc(docs, fn doc, acc ->
...> Inspect.Algebra.concat([doc, "!", acc])
...> end)
iex> Inspect.Algebra.format(docs, 80)
["A", "!", "B", "!", "C"]
"""
@spec fold_doc([t], (t, t -> t)) :: t
def fold_doc(docs, folder_fun)
def fold_doc([], _folder_fun), do: empty()
def fold_doc([doc], _folder_fun), do: doc
def fold_doc([doc | docs], folder_fun) when is_function(folder_fun, 2),
do: folder_fun.(doc, fold_doc(docs, folder_fun))
@doc ~S"""
Formats a given document for a given width.
Takes the maximum width and a document to print as its arguments
and returns an IO data representation of the best layout for the
document to fit in the given width.
The document starts flat (without breaks) until a group is found.
## Examples
iex> doc = Inspect.Algebra.glue("hello", " ", "world")
iex> doc = Inspect.Algebra.group(doc)
iex> doc |> Inspect.Algebra.format(30) |> IO.iodata_to_binary()
"hello world"
iex> doc |> Inspect.Algebra.format(10) |> IO.iodata_to_binary()
"hello\nworld"
"""
@spec format(t, non_neg_integer | :infinity) :: iodata
def format(doc, width) when is_doc(doc) and (width == :infinity or width >= 0) do
format(width, 0, [{0, :flat, doc}])
end
# Type representing the document mode to be rendered
#
# * flat - represents a document with breaks as flats (a break may fit, as it may break)
# * break - represents a document with breaks as breaks (a break always fits, since it breaks)
#
# The following modes are exclusive to fitting
#
# * flat_no_break - represents a document with breaks as flat not allowed to enter in break mode
# * break_no_flat - represents a document with breaks as breaks not allowed to enter in flat mode
#
@typep mode :: :flat | :flat_no_break | :break
@spec fits?(width :: integer(), column :: integer(), break? :: boolean(), entries) :: boolean()
when entries: [{integer(), mode(), t()}] | {:tail, boolean(), entries}
# We need at least a break to consider the document does not fit since a
# large document without breaks has no option but fitting its current line.
#
# In case we have groups and the group fits, we need to consider the group
# parent without the child breaks, hence {:tail, b?, t} below.
defp fits?(w, k, b?, _) when k > w and b?, do: false
defp fits?(_, _, _, []), do: true
defp fits?(w, k, _, {:tail, b?, t}), do: fits?(w, k, b?, t)
## Flat no break
defp fits?(w, k, b?, [{i, _, doc_fits(x, :disabled)} | t]),
do: fits?(w, k, b?, [{i, :flat_no_break, x} | t])
defp fits?(w, k, b?, [{i, :flat_no_break, doc_fits(x, _)} | t]),
do: fits?(w, k, b?, [{i, :flat_no_break, x} | t])
## Breaks no flat
defp fits?(w, k, b?, [{i, _, doc_fits(x, :enabled)} | t]),
do: fits?(w, k, b?, [{i, :break_no_flat, x} | t])
defp fits?(w, k, b?, [{i, :break_no_flat, doc_force(x)} | t]),
do: fits?(w, k, b?, [{i, :break_no_flat, x} | t])
defp fits?(_, _, _, [{_, :break_no_flat, doc_break(_, _)} | _]), do: true
defp fits?(_, _, _, [{_, :break_no_flat, :doc_line} | _]), do: true
## Breaks
defp fits?(_, _, _, [{_, :break, doc_break(_, _)} | _]), do: true
defp fits?(_, _, _, [{_, :break, :doc_line} | _]), do: true
defp fits?(w, k, b?, [{i, :break, doc_group(x, _)} | t]),
do: fits?(w, k, b?, [{i, :flat, x} | {:tail, b?, t}])
## Catch all
defp fits?(w, _, _, [{i, _, :doc_line} | t]), do: fits?(w, i, false, t)
defp fits?(w, k, b?, [{_, _, :doc_nil} | t]), do: fits?(w, k, b?, t)
defp fits?(w, _, b?, [{i, _, doc_collapse(_)} | t]), do: fits?(w, i, b?, t)
defp fits?(w, k, b?, [{i, m, doc_color(x, _)} | t]), do: fits?(w, k, b?, [{i, m, x} | t])
defp fits?(w, k, b?, [{_, _, doc_string(_, l)} | t]), do: fits?(w, k + l, b?, t)
defp fits?(w, k, b?, [{_, _, s} | t]) when is_binary(s), do: fits?(w, k + byte_size(s), b?, t)
defp fits?(_, _, _, [{_, _, doc_force(_)} | _]), do: false
defp fits?(w, k, _, [{_, _, doc_break(s, _)} | t]), do: fits?(w, k + byte_size(s), true, t)
defp fits?(w, k, b?, [{i, m, doc_nest(x, _, :break)} | t]), do: fits?(w, k, b?, [{i, m, x} | t])
defp fits?(w, k, b?, [{i, m, doc_nest(x, j, _)} | t]),
do: fits?(w, k, b?, [{apply_nesting(i, k, j), m, x} | t])
defp fits?(w, k, b?, [{i, m, doc_cons(x, y)} | t]),
do: fits?(w, k, b?, [{i, m, x}, {i, m, y} | t])
defp fits?(w, k, b?, [{i, m, doc_group(x, _)} | t]),
do: fits?(w, k, b?, [{i, m, x} | {:tail, b?, t}])
@spec format(integer | :infinity, integer, [{integer, mode, t}]) :: [binary]
defp format(_, _, []), do: []
defp format(w, k, [{_, _, :doc_nil} | t]), do: format(w, k, t)
defp format(w, _, [{i, _, :doc_line} | t]), do: [indent(i) | format(w, i, t)]
defp format(w, k, [{i, m, doc_cons(x, y)} | t]), do: format(w, k, [{i, m, x}, {i, m, y} | t])
defp format(w, k, [{i, m, doc_color(x, c)} | t]), do: [ansi(c) | format(w, k, [{i, m, x} | t])]
defp format(w, k, [{_, _, doc_string(s, l)} | t]), do: [s | format(w, k + l, t)]
defp format(w, k, [{_, _, s} | t]) when is_binary(s), do: [s | format(w, k + byte_size(s), t)]
defp format(w, k, [{i, m, doc_force(x)} | t]), do: format(w, k, [{i, m, x} | t])
defp format(w, k, [{i, m, doc_fits(x, _)} | t]), do: format(w, k, [{i, m, x} | t])
defp format(w, _, [{i, _, doc_collapse(max)} | t]), do: collapse(format(w, i, t), max, 0, i)
# Flex breaks are not conditional to the mode
defp format(w, k, [{i, m, doc_break(s, :flex)} | t]) do
k = k + byte_size(s)
if w == :infinity or m == :flat or fits?(w, k, true, t) do
[s | format(w, k, t)]
else
[indent(i) | format(w, i, t)]
end
end
# Strict breaks are conditional to the mode
defp format(w, k, [{i, mode, doc_break(s, :strict)} | t]) do
if mode == :break do
[indent(i) | format(w, i, t)]
else
[s | format(w, k + byte_size(s), t)]
end
end
# Nesting is conditional to the mode.
defp format(w, k, [{i, mode, doc_nest(x, j, nest)} | t]) do
if nest == :always or (nest == :break and mode == :break) do
format(w, k, [{apply_nesting(i, k, j), mode, x} | t])
else
format(w, k, [{i, mode, x} | t])
end
end
# Groups must do the fitting decision.
defp format(w, k, [{i, :break, doc_group(x, :inherit)} | t]) do
format(w, k, [{i, :break, x} | t])
end
defp format(w, k, [{i, _, doc_group(x, _)} | t]) do
if w == :infinity or fits?(w, k, false, [{i, :flat, x}]) do
format(w, k, [{i, :flat, x} | t])
else
format(w, k, [{i, :break, x} | t])
end
end
defp collapse(["\n" <> _ | t], max, count, i) do
collapse(t, max, count + 1, i)
end
defp collapse(["" | t], max, count, i) do
collapse(t, max, count, i)
end
defp collapse(t, max, count, i) do
[:binary.copy("\n", min(max, count)) <> :binary.copy(" ", i) | t]
end
defp apply_nesting(_, k, :cursor), do: k
defp apply_nesting(_, _, :reset), do: 0
defp apply_nesting(i, _, j), do: i + j
defp ansi(color) do
IO.ANSI.format_fragment(color, true)
end
defp indent(0), do: @newline
defp indent(i), do: @newline <> :binary.copy(" ", i)
end
| 33.051758 | 101 | 0.617373 |
9e34dad25322747f6b1f257784d4a96f4cff79b8 | 2,170 | exs | Elixir | apps/alchemist/mix.exs | smartcitiesdata/smartcitiesdata | c926c25003a8ee2d09b933c521c49f674841c0b6 | [
"Apache-2.0"
] | 26 | 2019-09-20T23:54:45.000Z | 2020-08-20T14:23:32.000Z | apps/alchemist/mix.exs | smartcitiesdata/smartcitiesdata | c926c25003a8ee2d09b933c521c49f674841c0b6 | [
"Apache-2.0"
] | 757 | 2019-08-15T18:15:07.000Z | 2020-09-18T20:55:31.000Z | apps/alchemist/mix.exs | smartcitiesdata/smartcitiesdata | c926c25003a8ee2d09b933c521c49f674841c0b6 | [
"Apache-2.0"
] | 9 | 2019-11-12T16:43:46.000Z | 2020-03-25T16:23:16.000Z | defmodule Alchemist.MixProject do
use Mix.Project
def project do
[
app: :alchemist,
version: "0.2.9",
elixir: "~> 1.10",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
start_permanent: Mix.env() == :prod,
deps: deps(),
aliases: aliases(),
test_paths: test_paths(Mix.env()),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test
]
]
end
def application do
[
extra_applications: [:logger],
mod: {Alchemist.Application, []}
]
end
defp deps do
[
{:brook, "~> 0.4"},
{:cachex, "~> 3.1"},
{:checkov, "~> 1.0", only: [:test]},
{:credo, "~> 1.0", only: [:dev, :test], runtime: false},
{:dead_letter, in_umbrella: true},
{:distillery, "~> 2.1"},
{:divo, "~> 1.3", only: [:dev, :test, :integration]},
{:divo_kafka, "~> 0.1", only: [:integration]},
{:divo_redis, "~> 0.1", only: [:integration]},
{:excoveralls, "~> 0.11.1", only: :test},
{:tasks, in_umbrella: true, only: :dev},
{:jason, "~> 1.2", override: true},
{:httpoison, "~> 1.6"},
{:libcluster, "~> 3.1"},
{:mix_test_watch, "~> 1.0", only: :dev, runtime: false},
{:observer_cli, "~> 1.4"},
{:off_broadway_kafka, "~> 1.0.1"},
{:placebo, "~> 2.0.0-rc2", only: [:dev, :test, :integration]},
{:properties, in_umbrella: true},
{:retry, "~> 0.13"},
{:smart_city, "~> 5.2.1", override: true},
{:smart_city_test, "~> 2.2.1", only: [:test, :integration]},
{:telemetry_event, in_umbrella: true},
{:timex, "~> 3.6"},
{:transformers, in_umbrella: true},
{:performance, in_umbrella: true, only: :integration}
]
end
defp test_paths(:integration), do: ["test/integration"]
defp test_paths(_), do: ["test/unit"]
defp aliases do
[
lint: ["format", "credo"],
verify: ["format --check-formatted", "credo"]
]
end
end
| 28.933333 | 68 | 0.52212 |
9e350146324ef63114d1e85c8c387c1d1ddb0cbb | 1,854 | ex | Elixir | tests/path-test/keast.ex | mahyar-osn/scaffold-pathway-maker | b9249103c58f3d1e7d8ad6d36054e60e4904b2e9 | [
"Apache-2.0"
] | null | null | null | tests/path-test/keast.ex | mahyar-osn/scaffold-pathway-maker | b9249103c58f3d1e7d8ad6d36054e60e4904b2e9 | [
"Apache-2.0"
] | null | null | null | tests/path-test/keast.ex | mahyar-osn/scaffold-pathway-maker | b9249103c58f3d1e7d8ad6d36054e60e4904b2e9 | [
"Apache-2.0"
] | 1 | 2021-12-01T02:03:55.000Z | 2021-12-01T02:03:55.000Z | EX Version: 2
Region: /
!#nodeset nodes
Shape. Dimension=0
#Fields=1
1) coordinates, coordinate, rectangular cartesian, real, #Components=3
x. #Values=2 (value,d/ds1)
y. #Values=2 (value,d/ds1)
z. #Values=2 (value,d/ds1)
Node: 1
-1.479162494069713e+00 0.000000000000000e+00
-5.688841365188414e+00 0.000000000000000e+00
8.316505803184116e+02 0.000000000000000e+00
Node: 2
-6.633272951118450e-01 0.000000000000000e+00
-1.088684182666644e+01 0.000000000000000e+00
8.313722720681664e+02 0.000000000000000e+00
Node: 3
-1.985135291068078e+00 0.000000000000000e+00
-1.746282118900751e+01 0.000000000000000e+00
8.308973484455781e+02 0.000000000000000e+00
Node: 4
-2.011647113917304e+00 0.000000000000000e+00
-2.338444896505831e+01 0.000000000000000e+00
8.305453828914985e+02 0.000000000000000e+00
Node: 5
-1.233912891216252e+00 0.000000000000000e+00
-2.921598540984682e+01 0.000000000000000e+00
8.285286603000318e+02 0.000000000000000e+00
Node: 6
9.659000000000035e-01 0.000000000000000e+00
-5.817000000000003e+01 0.000000000000000e+00
8.079999999999998e+02 0.000000000000000e+00
!#mesh mesh1d, dimension=1, nodeset=nodes
Shape. Dimension=1, line
#Scale factor sets=0
#Nodes=2
#Fields=1
1) coordinates, coordinate, rectangular cartesian, real, #Components=3
x. c.Hermite, no modify, standard node based.
#Nodes=2
1. #Values=2
Value labels: value d/ds1
2. #Values=2
Value labels: value d/ds1
y. c.Hermite, no modify, standard node based.
#Nodes=2
1. #Values=2
Value labels: value d/ds1
2. #Values=2
Value labels: value d/ds1
z. c.Hermite, no modify, standard node based.
#Nodes=2
1. #Values=2
Value labels: value d/ds1
2. #Values=2
Value labels: value d/ds1
Element: 1
Nodes:
1 2
Element: 2
Nodes:
2 3
Element: 3
Nodes:
3 4
Element: 4
Nodes:
4 5
Element: 5
Nodes:
5 6
| 25.39726 | 70 | 0.740022 |
9e3506513cf6a4e805634e8e44c4bd954fa23e28 | 1,472 | ex | Elixir | lib/mail_slurp_api/model/page_bounced_recipients.ex | mailslurp/mailslurp-client-elixir | 5b98b91bb327de5216e873cd45b4fbb3c1b55c90 | [
"MIT"
] | 1 | 2021-06-17T18:07:49.000Z | 2021-06-17T18:07:49.000Z | lib/mail_slurp_api/model/page_bounced_recipients.ex | mailslurp/mailslurp-client-elixir | 5b98b91bb327de5216e873cd45b4fbb3c1b55c90 | [
"MIT"
] | null | null | null | lib/mail_slurp_api/model/page_bounced_recipients.ex | mailslurp/mailslurp-client-elixir | 5b98b91bb327de5216e873cd45b4fbb3c1b55c90 | [
"MIT"
] | 1 | 2021-03-16T18:55:56.000Z | 2021-03-16T18:55:56.000Z | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule MailSlurpAPI.Model.PageBouncedRecipients do
@moduledoc """
Paginated bounced recipients. Page index starts at zero. Projection results may omit larger entity fields. For fetching a full entity use the projection ID with individual method calls.
"""
@derive [Poison.Encoder]
defstruct [
:"content",
:"empty",
:"first",
:"last",
:"number",
:"numberOfElements",
:"pageable",
:"size",
:"sort",
:"totalElements",
:"totalPages"
]
@type t :: %__MODULE__{
:"content" => [BounceRecipient] | nil,
:"empty" => boolean() | nil,
:"first" => boolean() | nil,
:"last" => boolean() | nil,
:"number" => integer() | nil,
:"numberOfElements" => integer() | nil,
:"pageable" => Pageable | nil,
:"size" => integer() | nil,
:"sort" => Sort | nil,
:"totalElements" => integer() | nil,
:"totalPages" => integer() | nil
}
end
defimpl Poison.Decoder, for: MailSlurpAPI.Model.PageBouncedRecipients do
import MailSlurpAPI.Deserializer
def decode(value, options) do
value
|> deserialize(:"content", :list, MailSlurpAPI.Model.BounceRecipient, options)
|> deserialize(:"pageable", :struct, MailSlurpAPI.Model.Pageable, options)
|> deserialize(:"sort", :struct, MailSlurpAPI.Model.Sort, options)
end
end
| 29.44 | 187 | 0.647418 |
9e3525f6c17222b6de1799b232c8da1e93178dd6 | 1,811 | ex | Elixir | lib/web/plug/session.ex | ikeyasu/antikythera | 544fdd22e46b1f34177053d87d9e2a9708c74113 | [
"Apache-2.0"
] | null | null | null | lib/web/plug/session.ex | ikeyasu/antikythera | 544fdd22e46b1f34177053d87d9e2a9708c74113 | [
"Apache-2.0"
] | null | null | null | lib/web/plug/session.ex | ikeyasu/antikythera | 544fdd22e46b1f34177053d87d9e2a9708c74113 | [
"Apache-2.0"
] | null | null | null | # Copyright(c) 2015-2018 ACCESS CO., LTD. All rights reserved.
use Croma
defmodule Antikythera.Plug.Session do
@moduledoc """
Plug to automatically load/store session information using a specific session store.
Uses cookie store by default.
## Usage
Adding the following line in a controller module enables this plug:
plug Antikythera.Plug.Session, :load, [key: "12345678"]
Then,
- session values are loaded from cookie before controller action is executed, and
- session values are stored into cookie after controller action is executed.
"""
alias Antikythera.Conn
alias Antikythera.Session
defun load(conn :: v[Conn.t], opts :: Keyword.t(String.t | atom)) :: Conn.t do
key = opts[:key]
store_name = Keyword.get(opts, :store, :cookie) |> Atom.to_string() |> Macro.camelize()
store_module = Module.safe_concat("Antikythera.Session", store_name)
{session_id, data} = store_module.load(Conn.get_req_cookie(conn, key))
session = %Session{
state: :update,
id: session_id,
data: data,
}
conn
|> Conn.register_before_send(make_before_send(store_module, key))
|> Conn.assign(:session, session)
end
defunp make_before_send(store :: module, key :: String.t) :: (Conn.t -> Conn.t) do
fn %Conn{assigns: %{session: session}} = conn ->
%Session{state: state, id: id, data: data} = session
case state do
:update ->
new_id = store.save(id, data)
Conn.put_resp_cookie(conn, key, new_id)
:renew ->
store.delete(id)
new_id = store.save(nil, data)
Conn.put_resp_cookie(conn, key, new_id)
:destroy ->
store.delete(id)
Conn.put_resp_cookie_to_revoke(conn, key)
end
end
end
end
| 31.77193 | 99 | 0.643843 |
9e353a7344017778584972a5219b7701e102e968 | 2,256 | ex | Elixir | lib/crit/users/password_api.ex | brownt23/crit19 | c45c7b3ae580c193168d83144da0eeb9bc91c8a9 | [
"MIT"
] | 6 | 2019-07-16T19:31:23.000Z | 2021-06-05T19:01:05.000Z | lib/crit/users/password_api.ex | brownt23/crit19 | c45c7b3ae580c193168d83144da0eeb9bc91c8a9 | [
"MIT"
] | null | null | null | lib/crit/users/password_api.ex | brownt23/crit19 | c45c7b3ae580c193168d83144da0eeb9bc91c8a9 | [
"MIT"
] | 3 | 2020-02-24T23:38:27.000Z | 2020-08-01T23:50:17.000Z | defmodule Crit.Users.PasswordApi do
import Ecto.Query, warn: false
alias Crit.Users.UniqueId
alias Crit.Users.Schemas.{User, PasswordToken, Password}
alias Crit.Sql
alias Crit.Repo
def fresh_password_changeset(), do: Password.default_changeset()
def set_password(auth_id, params, institution) do
conflict_behavior = [on_conflict: :replace_all, conflict_target: :auth_id]
result =
%Password{auth_id: auth_id}
|> Password.create_changeset(params)
|> Sql.insert(conflict_behavior, institution)
case result do
{:ok, _} -> :ok # Results should never be of interest
error -> error
end
end
def attempt_login(auth_id, proposed_password, institution) do
password =
Password.Query.by_auth_id(auth_id)
|> Password.Query.preloading_user
|> Sql.one(institution)
if password && Pbkdf2.verify_pass(proposed_password, password.hash) do
{:ok, UniqueId.new(password.user.id, institution)}
else
Pbkdf2.no_user_verify()
:error
end
end
# Password tokens
def one_token(token_text) do
PasswordToken.Query.expired_tokens |> Repo.delete_all()
case Repo.get_by(PasswordToken, text: token_text) do
nil ->
{:error, "missing token '#{token_text}'"}
token ->
PasswordToken.force_update(token, NaiveDateTime.utc_now)
{:ok, token}
end
end
def redeem_password_token(
%PasswordToken{user_id: user_id,
institution_short_name: institution,
text: text},
password_params) do
# Note: a transaction isn't useful here because the assumption that
# users are never deleted (just inactivated) is pervasive in this code,
# so the `get` "cannot" fail.
user = Sql.get(User, user_id, institution)
retval = set_password(user.auth_id, password_params, institution)
case retval do
:ok ->
delete_password_token(text)
{:ok, UniqueId.new(user_id, institution)}
{:error, changeset} ->
{:error, changeset}
end
end
def delete_password_token(token_text) do
PasswordToken.Query.by(text: token_text) |> Repo.delete_all
# There is no need for deletion information to leak out
:ok
end
end
| 30.08 | 78 | 0.672872 |
9e3554aa28b16b804b8a0e7660edab8218c24a76 | 13,518 | exs | Elixir | test/parser_test.exs | bwittenbrook3/surface | 8697032e907a349e195ff7c495e69e5a6fec6c78 | [
"MIT"
] | null | null | null | test/parser_test.exs | bwittenbrook3/surface | 8697032e907a349e195ff7c495e69e5a6fec6c78 | [
"MIT"
] | null | null | null | test/parser_test.exs | bwittenbrook3/surface | 8697032e907a349e195ff7c495e69e5a6fec6c78 | [
"MIT"
] | null | null | null | defmodule Surface.Compiler.ParserTest do
use ExUnit.Case, async: true
import Surface.Compiler.Parser
test "empty node" do
assert parse("") == {:ok, []}
end
test "only text" do
assert parse("Some text") == {:ok, ["Some text"]}
end
test "keep spaces before node" do
assert parse("\n<div></div>") ==
{:ok,
[
"\n",
{"div", [], [], %{line: 2, space: ""}}
]}
end
test "keep spaces after node" do
assert parse("<div></div>\n") ==
{:ok,
[
{"div", [], [], %{line: 1, space: ""}},
"\n"
]}
end
test "multiple nodes" do
code = """
<div>
Div 1
</div>
<div>
Div 2
</div>
"""
assert parse(code) ==
{:ok,
[
{"div", [], ["\n Div 1\n"], %{line: 1, space: ""}},
"\n",
{"div", [], ["\n Div 2\n"], %{line: 4, space: ""}},
"\n"
]}
end
test "text before and after" do
assert parse("hello<foo>bar</foo>world") ==
{:ok, ["hello", {"foo", [], ["bar"], %{line: 1, space: ""}}, "world"]}
end
test "component" do
code = ~S(<MyComponent label="My label"/>)
{:ok, [node]} = parse(code)
assert node ==
{"MyComponent",
[
{"label", "My label", %{line: 1, spaces: [" ", "", ""]}}
], [], %{line: 1, space: ""}}
end
test "spaces and line break between children" do
code = """
<div>
<span/> <span/>
<span/>
</div>
"""
{:ok, tree} = parse(code)
assert tree == [
{
"div",
'',
[
"\n ",
{"span", '', '', %{line: 2, space: ""}},
" ",
{"span", [], [], %{line: 2, space: ""}},
"\n ",
{"span", [], [], %{line: 3, space: ""}},
"\n"
],
%{line: 1, space: ""}
},
"\n"
]
end
test "ignore comments" do
code = """
<div>
<!-- This will be ignored -->
<span/>
</div>
"""
assert parse(code) ==
{:ok,
[
{
"div",
'',
[
"\n ",
"\n ",
{"span", [], [], %{line: 3, space: ""}},
"\n"
],
%{line: 1, space: ""}
},
"\n"
]}
end
describe "void elements" do
test "without attributes" do
code = """
<div>
<hr>
</div>
"""
{:ok, [{"div", [], ["\n ", node, "\n"], _}, "\n"]} = parse(code)
assert node == {"hr", [], [], %{line: 2, space: ""}}
end
test "with attributes" do
code = """
<div>
<img
src="file.gif"
alt="My image"
>
</div>
"""
{:ok, [{"div", [], ["\n ", node, "\n"], _}, "\n"]} = parse(code)
assert node ==
{"img",
[
{"src", "file.gif", %{line: 3, spaces: ["\n ", "", ""]}},
{"alt", "My image", %{line: 4, spaces: ["\n ", "", ""]}}
], [], %{line: 2, space: "\n "}}
end
end
describe "HTML only" do
test "single node" do
assert parse("<foo>bar</foo>") ==
{:ok, [{"foo", [], ["bar"], %{line: 1, space: ""}}]}
end
test "Elixir node" do
assert parse("<Foo.Bar>bar</Foo.Bar>") ==
{:ok, [{"Foo.Bar", [], ["bar"], %{line: 1, space: ""}}]}
end
test "mixed nodes" do
assert parse("<foo>one<bar>two</bar>three</foo>") ==
{:ok,
[
{"foo", [], ["one", {"bar", [], ["two"], %{line: 1, space: ""}}, "three"],
%{line: 1, space: ""}}
]}
end
test "self-closing nodes" do
assert parse("<foo>one<bar><bat/></bar>three</foo>") ==
{:ok,
[
{"foo", [],
[
"one",
{"bar", [], [{"bat", [], [], %{line: 1, space: ""}}], %{line: 1, space: ""}},
"three"
], %{line: 1, space: ""}}
]}
end
end
describe "interpolation" do
test "as root" do
assert parse("{{baz}}") ==
{:ok, [{:interpolation, "baz", %{line: 1}}]}
end
test "without root node but with text" do
assert parse("foo {{baz}} bar") ==
{:ok, ["foo ", {:interpolation, "baz", %{line: 1}}, " bar"]}
end
test "single curly bracket" do
assert parse("<foo>{bar}</foo>") ==
{:ok, [{"foo", [], ["{", "bar}"], %{line: 1, space: ""}}]}
end
test "double curly bracket" do
assert parse("<foo>{{baz}}</foo>") ==
{:ok, [{"foo", '', [{:interpolation, "baz", %{line: 1}}], %{line: 1, space: ""}}]}
end
test "mixed curly bracket" do
assert parse("<foo>bar{{baz}}bat</foo>") ==
{:ok,
[
{"foo", '', ["bar", {:interpolation, "baz", %{line: 1}}, "bat"],
%{line: 1, space: ""}}
]}
end
test "single-closing curly bracket" do
assert parse("<foo>bar{{ 'a}b' }}bat</foo>") ==
{:ok,
[
{"foo", [], ["bar", {:interpolation, " 'a}b' ", %{line: 1}}, "bat"],
%{line: 1, space: ""}}
]}
end
end
describe "with macros" do
test "single node" do
assert parse("<#foo>bar</#foo>") ==
{:ok, [{"#foo", [], ["bar"], %{line: 1, space: ""}}]}
end
test "mixed nodes" do
assert parse("<#foo>one<bar>two</baz>three</#foo>") ==
{:ok, [{"#foo", [], ["one<bar>two</baz>three"], %{line: 1, space: ""}}]}
assert parse("<#foo>one<#bar>two</#baz>three</#foo>") ==
{:ok, [{"#foo", [], ["one<#bar>two</#baz>three"], %{line: 1, space: ""}}]}
assert parse("<#foo>one<bar>two<baz>three</#foo>") ==
{:ok, [{"#foo", [], ["one<bar>two<baz>three"], %{line: 1, space: ""}}]}
assert parse("<#foo>one</bar>two</baz>three</#foo>") ==
{:ok, [{"#foo", [], ["one</bar>two</baz>three"], %{line: 1, space: ""}}]}
end
test "macro issue" do
assert parse("<#Macro/>") ==
{:ok, [{"#Macro", '', [], %{line: 1, space: ""}}]}
end
test "keep track of the line of the definition" do
code = """
<div>
one
<#Foo>
two
</#Foo>
</div>
"""
{:ok, [{_, _, children, _} | _]} = parse(code)
{_, _, _, meta} = Enum.at(children, 1)
assert meta.line == 3
end
test "do not perform interpolation for inner content" do
assert parse("<#Foo>one {{ @var }} two</#Foo>") ==
{:ok, [{"#Foo", [], ["one {{ @var }} two"], %{line: 1, space: ""}}]}
end
end
describe "errors on" do
test "invalid opening tag" do
assert parse("<>bar</>") ==
{:error, "expected opening HTML tag", 1}
end
test "invalid closing tag" do
assert parse("<foo>bar</></foo>") ==
{:error, "expected closing tag for \"foo\"", 1}
end
test "tag mismatch" do
assert parse("<foo>bar</baz>") ==
{:error, "closing tag \"baz\" did not match opening tag \"foo\"", 1}
end
test "incomplete tag content" do
assert parse("<foo>bar") ==
{:error, "expected closing tag for \"foo\"", 1}
end
test "incomplete macro content" do
assert parse("<#foo>bar</#bar>") ==
{:error, "expected closing tag for \"#foo\"", 1}
end
test "non-closing interpolation" do
assert parse("<foo>{{bar</foo>") ==
{:error, "expected closing for interpolation", 1}
end
end
describe "attributes" do
test "regular nodes" do
code = """
<foo
prop1="value1"
prop2="value2"
>
bar
<div>{{ var }}</div>
</foo>
"""
attributes = [
{"prop1", "value1", %{line: 2, spaces: ["\n ", "", ""]}},
{"prop2", "value2", %{line: 3, spaces: ["\n ", "", ""]}}
]
children = [
"\n bar\n ",
{"div", [], [{:interpolation, " var ", %{line: 6}}], %{line: 6, space: ""}},
"\n"
]
assert parse(code) == {:ok, [{"foo", attributes, children, %{line: 1, space: "\n"}}, "\n"]}
end
test "self-closing nodes" do
code = """
<foo
prop1="value1"
prop2="value2"
/>
"""
attributes = [
{"prop1", "value1", %{line: 2, spaces: ["\n ", "", ""]}},
{"prop2", "value2", %{line: 3, spaces: ["\n ", "", ""]}}
]
assert parse(code) == {:ok, [{"foo", attributes, [], %{line: 1, space: "\n"}}, "\n"]}
end
test "macro nodes" do
code = """
<#foo
prop1="value1"
prop2="value2"
>
bar
</#foo>
"""
attributes = [
{"prop1", "value1", %{line: 2, spaces: ["\n ", "", ""]}},
{"prop2", "value2", %{line: 3, spaces: ["\n ", "", ""]}}
]
assert parse(code) ==
{:ok, [{"#foo", attributes, ["\n bar\n"], %{line: 1, space: "\n"}}, "\n"]}
end
test "regular nodes with whitespaces" do
code = """
<foo
prop1
prop2 = "value 2"
prop3 =
{{ var3 }}
prop4
></foo>
"""
attributes = [
{"prop1", true, %{line: 2, spaces: ["\n ", "\n "]}},
{"prop2", "value 2", %{line: 3, spaces: ["", " ", " "]}},
{"prop3", {:attribute_expr, " var3 ", %{line: 5}},
%{line: 4, spaces: ["\n ", " ", "\n "]}},
{"prop4", true, %{line: 6, spaces: ["\n ", "\n"]}}
]
assert parse(code) == {:ok, [{"foo", attributes, [], %{line: 1, space: ""}}, "\n"]}
end
test "self-closing nodes with whitespaces" do
code = """
<foo
prop1
prop2 = "2"
prop3 =
{{ var3 }}
prop4
/>
"""
attributes = [
{"prop1", true, %{line: 2, spaces: ["\n ", "\n "]}},
{"prop2", "2", %{line: 3, spaces: ["", " ", " "]}},
{"prop3", {:attribute_expr, " var3 ", %{line: 5}},
%{line: 4, spaces: ["\n ", " ", "\n "]}},
{"prop4", true, %{line: 6, spaces: ["\n ", "\n"]}}
]
assert parse(code) == {:ok, [{"foo", attributes, [], %{line: 1, space: ""}}, "\n"]}
end
test "value as expression" do
code = """
<foo
prop1={{ var1 }}
prop2={{ var2 }}
/>
"""
attributes = [
{"prop1", {:attribute_expr, " var1 ", %{line: 2}}, %{line: 2, spaces: ["\n ", "", ""]}},
{"prop2", {:attribute_expr, " var2 ", %{line: 3}}, %{line: 3, spaces: ["\n ", "", ""]}}
]
assert parse(code) == {:ok, [{"foo", attributes, [], %{line: 1, space: "\n"}}, "\n"]}
end
test "integer values" do
code = """
<foo
prop1=1
prop2=2
/>
"""
attributes = [
{"prop1", 1, %{line: 2, spaces: ["\n ", "", ""]}},
{"prop2", 2, %{line: 3, spaces: ["\n ", "", ""]}}
]
assert parse(code) == {:ok, [{"foo", attributes, [], %{line: 1, space: "\n"}}, "\n"]}
end
test "boolean values" do
code = """
<foo
prop1
prop2=true
prop3=false
prop4
/>
"""
attributes = [
{"prop1", true, %{line: 2, spaces: ["\n ", "\n "]}},
{"prop2", true, %{line: 3, spaces: ["", "", ""]}},
{"prop3", false, %{line: 4, spaces: ["\n ", "", ""]}},
{"prop4", true, %{line: 5, spaces: ["\n ", "\n"]}}
]
assert parse(code) == {:ok, [{"foo", attributes, [], %{line: 1, space: ""}}, "\n"]}
end
test "string values" do
code = """
<foo prop="str"/>
"""
attr_value = "str"
attributes = [
{"prop", attr_value, %{line: 1, spaces: [" ", "", ""]}}
]
assert parse(code) == {:ok, [{"foo", attributes, [], %{line: 1, space: ""}}, "\n"]}
end
test "empty string" do
code = """
<foo prop=""/>
"""
attr_value = ""
attributes = [
{"prop", attr_value, %{line: 1, spaces: [" ", "", ""]}}
]
assert parse(code) == {:ok, [{"foo", attributes, [], %{line: 1, space: ""}}, "\n"]}
end
test "string with embedded interpolation" do
code = """
<foo prop="before {{ var }} after"/>
"""
attr_value = ["before ", {:attribute_expr, " var ", %{line: 1}}, " after"]
attributes = [
{"prop", attr_value, %{line: 1, spaces: [" ", "", ""]}}
]
assert parse(code) == {:ok, [{"foo", attributes, [], %{line: 1, space: ""}}, "\n"]}
end
test "string with only an embedded interpolation" do
code = """
<foo prop="{{ var }}"/>
"""
attr_value = [{:attribute_expr, " var ", %{line: 1}}]
attributes = [
{"prop", attr_value, %{line: 1, spaces: [" ", "", ""]}}
]
assert parse(code) == {:ok, [{"foo", attributes, [], %{line: 1, space: ""}}, "\n"]}
end
end
end
| 25.896552 | 98 | 0.383637 |
9e35c7f4072b6fa7da64750bc8a1d5cfa5770d43 | 1,280 | ex | Elixir | lib/rbmq/rpc_server.ex | Lean5/rbmq | c6e746c212e103fc4ada53969e88fc1f32a357a6 | [
"MIT"
] | null | null | null | lib/rbmq/rpc_server.ex | Lean5/rbmq | c6e746c212e103fc4ada53969e88fc1f32a357a6 | [
"MIT"
] | 1 | 2019-02-13T14:26:22.000Z | 2019-02-13T14:26:22.000Z | lib/rbmq/rpc_server.ex | Lean5/rbmq | c6e746c212e103fc4ada53969e88fc1f32a357a6 | [
"MIT"
] | null | null | null | defmodule RBMQ.RpcServer do
@moduledoc """
RPC Server
"""
@doc false
defmacro __using__(opts) do
quote do
use RBMQ.Consumer, unquote(opts)
def handle_delivery(payload, meta, state) do
safe_run fn(channel) ->
AMQP.Basic.ack(channel, meta.delivery_tag)
Task.start(fn ->
{type, response} =
try do
response = payload
|> Jason.decode!
|> call(meta)
{"rpc-call-success", response}
rescue
e ->
msg = if Exception.exception?(e),
do: Exception.message(e),
else: "Unknown error"
{"rpc-call-error", %{message: msg, stacktrace: Exception.format_stacktrace()}}
end
response = response |> Jason.encode!
:ok = AMQP.Basic.publish(channel, "", meta.reply_to, response, type: type, correlation_id: meta.correlation_id)
end)
end
{:noreply, state}
end
def call(payload, _meta), do: call(payload)
def call(payload) do
raise "RPC callback is not implemented."
end
defoverridable [call: 1, call: 2]
end
end
end
| 26.666667 | 123 | 0.510938 |
9e35d0d796406def3097ec33f88627d602124420 | 9,225 | ex | Elixir | lib/keto_ex.ex | ControlGap/keto_ex | 84d1f3632cce6ad6322e52103b25a07468e0926a | [
"MIT"
] | 1 | 2021-06-22T04:43:16.000Z | 2021-06-22T04:43:16.000Z | lib/keto_ex.ex | ControlGap/keto_ex | 84d1f3632cce6ad6322e52103b25a07468e0926a | [
"MIT"
] | null | null | null | lib/keto_ex.ex | ControlGap/keto_ex | 84d1f3632cce6ad6322e52103b25a07468e0926a | [
"MIT"
] | null | null | null | defmodule KetoEx do
@moduledoc """
Ory Keto REST API client
https://www.ory.sh/keto/docs/reference/api
"""
alias KetoEx.{Role, Policy}
@base "/engines/acp/ory/"
@flavors [:exact, :glob, :regex]
@default_flavor :glob
@type flavor :: :exact | :glob | :regex
@type check_allowed_input :: %{
subject: Strting.t(),
action: Strting.t(),
resource: Strting.t(),
context: map()
}
@doc """
Create a tesla client to be passed into all the other functions
"""
@spec client(any, nil | maybe_improper_list | map) :: Tesla.Client.t()
def client(host \\ "localhost", opts \\ [port: 4466, scheme: "http"]) do
port = opts[:port] || 4466
scheme = opts[:scheme] || "http"
middleware = [
{Tesla.Middleware.BaseUrl, "#{scheme}://#{host}:#{port}"},
{Tesla.Middleware.JSON, engine_opts: [keys: :atoms!]}
]
Tesla.client(middleware)
end
@doc """
Generate a request map (subject, action, resource) with an optional context
"""
@spec request(
subject :: String.t(),
action :: String.t(),
resource :: String.t(),
context :: map()
) :: check_allowed_input()
def request(subject, action, resource, context \\ %{}) do
%{subject: subject, action: action, resource: resource, context: context}
end
@doc """
Check if a request is allowed
https://www.ory.sh/keto/docs/reference/api#check-if-a-request-is-allowed
"""
@spec allowed?(client :: Tesla.Client.t(), policy :: check_allowed_input(), flavor :: flavor()) ::
{:error, any} | true | false
def allowed?(client, policy, flavor \\ @default_flavor) when flavor in @flavors do
client
|> Tesla.post(@base <> "#{flavor}/allowed", policy)
|> handle_response()
end
@doc """
List Access Control Policies (ACP)
Optional params: `limit`, `offset`, `subject`, `resource`, `action`
https://www.ory.sh/keto/docs/reference/api#listoryaccesscontrolpolicies
"""
@spec list_acp(client :: Tesla.Client.t(), flavor :: flavor(), params :: Keyword.t()) ::
{:error, any} | {:ok, [Policy.t()]}
def list_acp(client, flavor \\ @default_flavor, params \\ []) when flavor in @flavors do
client
|> Tesla.get(@base <> "#{flavor}/policies", query: params)
|> handle_response(Policy)
end
@doc """
Upsert an ACP
https://www.ory.sh/keto/docs/reference/api#upsertoryaccesscontrolpolicy
"""
@spec upsert_acp(client :: Tesla.Client.t(), policy :: map(), flavor :: flavor()) ::
{:error, any} | {:ok, Policy.t()}
def upsert_acp(client, policy, flavor \\ @default_flavor) when flavor in @flavors do
client
|> Tesla.put(@base <> "#{flavor}/policies", policy)
|> handle_response(Policy)
end
@doc """
Fetch ACP via ID
https://www.ory.sh/keto/docs/reference/api#getoryaccesscontrolpolicy
"""
@spec get_acp(client :: Tesla.Client.t(), policy_id :: String.t(), flavor :: flavor()) ::
{:error, any} | {:ok, Policy.t()}
def get_acp(client, policy_id, flavor \\ @default_flavor) when flavor in @flavors do
client
|> Tesla.get(@base <> "#{flavor}/policies/#{policy_id}")
|> handle_response(Policy)
end
@doc """
Delete ACP via ID
https://www.ory.sh/keto/docs/reference/api#deleteoryaccesscontrolpolicy
"""
@spec delete_acp(client :: Tesla.Client.t(), policy_id :: String.t(), flavor :: flavor()) ::
{:error, any} | {:ok, any}
def delete_acp(client, policy_id, flavor \\ @default_flavor) when flavor in @flavors do
client
|> Tesla.delete(@base <> "#{flavor}/policies/#{policy_id}")
|> handle_response()
end
@doc """
List Access Control Policy Roles
Optional params: `limit`, `offset`, `member`
https://www.ory.sh/keto/docs/reference/api#list-ory-access-control-policy-roles
"""
@spec list_acp_roles(client :: Tesla.Client.t(), flavor :: flavor(), params :: Keyword.t()) ::
{:error, any} | {:ok, [Role.t()]}
def list_acp_roles(client, flavor \\ @default_flavor, params \\ []) when flavor in @flavors do
client
|> Tesla.get(@base <> "#{flavor}/roles", query: params)
|> handle_response(Role)
end
@doc """
Upsert an ACP Role
https://www.ory.sh/keto/docs/reference/api#upsert-an-ory-access-control-policy-role
"""
@spec upsert_acp_role(
client :: Tesla.Client.t(),
role :: map(),
flavor :: flavor()
) :: {:error, any} | {:ok, Role.t()}
def upsert_acp_role(client, role, flavor \\ @default_flavor) when flavor in @flavors do
client
|> Tesla.put(@base <> "#{flavor}/roles", role)
|> handle_response(Role)
end
@doc """
Fetch ACP Role via ID
https://www.ory.sh/keto/docs/reference/api#get-an-ory-access-control-policy-role
"""
@spec get_acp_role(client :: Tesla.Client.t(), role_id :: String.t(), flavor :: flavor()) ::
{:error, any} | {:ok, Role.t()}
def get_acp_role(client, role_id, flavor \\ @default_flavor) when flavor in @flavors do
client
|> Tesla.get(@base <> "#{flavor}/roles/#{role_id}")
|> handle_response(Role)
end
@doc """
Delete ACP Role via ID
https://www.ory.sh/keto/docs/reference/api#delete-an-ory-access-control-policy-role
"""
@spec delete_acp_role(
client :: Tesla.Client.t(),
role_id :: String.t(),
flavor :: flavor()
) :: {:error, any} | {:ok, any}
def delete_acp_role(client, role_id, flavor \\ @default_flavor) when flavor in @flavors do
client
|> Tesla.delete(@base <> "#{flavor}/roles/#{role_id}")
|> handle_response()
end
@doc """
Add a member to an ACP Role
https://www.ory.sh/keto/docs/reference/api#add-a-member-to-an-ory-access-control-policy-role
"""
@spec add_member_to_acp_role(
client :: Tesla.Client.t(),
role_id :: String.t(),
body :: map(),
flavor :: flavor()
) :: {:error, any} | {:ok, any}
def add_member_to_acp_role(client, role_id, body, flavor \\ @default_flavor)
when flavor in @flavors do
client
|> Tesla.put(@base <> "#{flavor}/roles/#{role_id}/members", body)
|> handle_response(Role)
end
@doc """
Remove a member from an ACP Role
https://www.ory.sh/keto/docs/reference/api#remove-a-member-from-an-ory-access-control-policy-role
"""
@spec remove_member_from_acp_role(
client :: Tesla.Client.t(),
role_id :: String.t(),
member_id :: String.t(),
flavor :: flavor()
) :: {:error, any} | {:ok, any}
def remove_member_from_acp_role(client, role_id, member_id, flavor \\ @default_flavor)
when flavor in @flavors do
client
|> Tesla.delete(@base <> "#{flavor}/roles/#{role_id}/members/#{member_id}")
|> handle_response()
end
@doc """
Alive health check
https://www.ory.sh/keto/docs/reference/api#health
"""
@spec health_alive(client :: Tesla.Client.t()) :: {:error, any} | {:ok, map()}
def health_alive(client) do
client
|> Tesla.get("/health/alive")
|> handle_response()
end
@doc """
Ready health check
https://www.ory.sh/keto/docs/reference/api#health
"""
@spec health_ready(client :: Tesla.Client.t()) :: {:error, any} | {:ok, map()}
def health_ready(client) do
client
|> Tesla.get("/health/ready")
|> handle_response()
end
@doc """
Fetch version number
"""
@spec version(client :: Tesla.Client.t()) :: {:error, any} | {:ok, map()}
def version(client) do
client
|> Tesla.get("/version")
|> handle_response()
end
# if this is an allowed? response - just return the boolean.
defp handle_response({:ok, %Tesla.Env{status: status, body: %{allowed: allowed?}}})
when status in [200, 403],
do: allowed?
# all other cases return a error/success tuple.
defp handle_response({:ok, %Tesla.Env{status: 200, body: body}}), do: {:ok, body}
defp handle_response({:ok, %Tesla.Env{status: 204}}), do: :ok
defp handle_response({:ok, %Tesla.Env{status: 404, body: _body}}), do: {:error, "not found"}
defp handle_response({:ok, %Tesla.Env{status: _, body: body}}),
do: {:error, body}
defp handle_response({:error, :econnrefused}) do
{:error, "Connection to Keto Refused - ensure `client/2` is called with the correct hostname"}
end
defp handle_response(err), do: err
# when a struct is passed into this fn, returns a struct,
defp handle_response({:ok, %Tesla.Env{status: 200, body: body}}, a_struct) when is_list(body),
do: {:ok, Enum.map(body, &Kernel.struct(a_struct, &1))}
defp handle_response({:ok, %Tesla.Env{status: 200, body: body}}, a_struct),
do: {:ok, Kernel.struct(a_struct, body)}
defp handle_response({:ok, %Tesla.Env{status: 404, body: _body}}, _a_struct),
do: {:error, "not found"}
defp handle_response({:ok, %Tesla.Env{status: 500, body: _body}}, _a_struct) do
{:error, "Server error"}
end
defp handle_response({:ok, %Tesla.Env{status: _, body: body}}, _a_struct) do
{:error, body}
end
defp handle_response({:error, :econnrefused}, _a_struct) do
{:error, "Connection to Keto Refused - ensure `client/2` is called with the correct hostname"}
end
defp handle_response(err, _struct), do: err
end
| 31.701031 | 100 | 0.623957 |
9e35d8e5ee8439a5dfa7b60bfbc5a83173758857 | 1,347 | ex | Elixir | clients/tool_results/lib/google_api/tool_results/v1beta3/model/step_labels_entry.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/tool_results/lib/google_api/tool_results/v1beta3/model/step_labels_entry.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/tool_results/lib/google_api/tool_results/v1beta3/model/step_labels_entry.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.ToolResults.V1beta3.Model.StepLabelsEntry do
@moduledoc """
## Attributes
- key (String): Defaults to: `null`.
- value (String): Defaults to: `null`.
"""
defstruct [
:"key",
:"value"
]
end
defimpl Poison.Decoder, for: GoogleApi.ToolResults.V1beta3.Model.StepLabelsEntry do
def decode(value, _options) do
value
end
end
defimpl Poison.Encoder, for: GoogleApi.ToolResults.V1beta3.Model.StepLabelsEntry do
def encode(value, options) do
GoogleApi.ToolResults.V1beta3.Deserializer.serialize_non_nil(value, options)
end
end
| 28.0625 | 83 | 0.743133 |
9e35e004be07d7e5bd0092477663ecb18e19a9a8 | 1,040 | exs | Elixir | elixir/getting-started/8_recursion.exs | wesleyegberto/dojos-languages | 87170a722efac1247c713daa21cb3fcc39f5c5c1 | [
"MIT"
] | null | null | null | elixir/getting-started/8_recursion.exs | wesleyegberto/dojos-languages | 87170a722efac1247c713daa21cb3fcc39f5c5c1 | [
"MIT"
] | null | null | null | elixir/getting-started/8_recursion.exs | wesleyegberto/dojos-languages | 87170a722efac1247c713daa21cb3fcc39f5c5c1 | [
"MIT"
] | null | null | null | # Recursion
# === Loop through recursion ===
defmodule Recursion do
def print_multiple_times(msg, n) when n <= 1 do
IO.puts msg
end
def print_multiple_times(msg, n) do
IO.puts msg
print_multiple_times(msg, n - 1)
end
end
Recursion.print_multiple_times("Hi", 3)
# === Reduce and Map ===
# Recursion and tail call optimization are an important part of Elixir
# and are commonly used to create loops.
defmodule ListMath do
def sum_list([head | tail], accumulator) do
sum_list(tail, head + accumulator)
end
def sum_list([], accumulator) do
accumulator
end
def double_each([head | tail]) do
[head * 2 | double_each(tail)]
end
def double_each([]) do
[]
end
end
ListMath.sum_list([1, 2, 3], 0)
ListMath.double_each([1, 2, 3])
# To manipulate list we will commonly use Enum
Enum.reduce([1, 2, 3], 0, fn(x, acc) -> x + acc end)
Enum.reduce([1, 2, 3], 0, &+/2)
Enum.map([1, 2, 3], fn(x) -> x * 2 end)
Enum.map([1, 2, 3], &(&1 * 2))
| 20.392157 | 70 | 0.6125 |
9e35e37eb8d08334a84de99c2f3f21d98ccb68b9 | 1,662 | ex | Elixir | clients/books/lib/google_api/books/v1/model/volume2.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/books/lib/google_api/books/v1/model/volume2.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/books/lib/google_api/books/v1/model/volume2.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Books.V1.Model.Volume2 do
@moduledoc """
## Attributes
* `items` (*type:* `list(GoogleApi.Books.V1.Model.Volume.t)`, *default:* `nil`) - A list of volumes.
* `kind` (*type:* `String.t`, *default:* `onboarding#volume`) - Resource type.
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) -
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:items => list(GoogleApi.Books.V1.Model.Volume.t()),
:kind => String.t(),
:nextPageToken => String.t()
}
field(:items, as: GoogleApi.Books.V1.Model.Volume, type: :list)
field(:kind)
field(:nextPageToken)
end
defimpl Poison.Decoder, for: GoogleApi.Books.V1.Model.Volume2 do
def decode(value, options) do
GoogleApi.Books.V1.Model.Volume2.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Books.V1.Model.Volume2 do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.358491 | 104 | 0.698556 |
9e35f3c6307309302c6bc9aacc433509e0201ee0 | 929 | ex | Elixir | roman-numerals/lib/roman_numerals.ex | koladilip/elixir-examples | 49553fe39bbff6e35d1cb2c26fcb6d87fe4e28d3 | [
"MIT"
] | null | null | null | roman-numerals/lib/roman_numerals.ex | koladilip/elixir-examples | 49553fe39bbff6e35d1cb2c26fcb6d87fe4e28d3 | [
"MIT"
] | null | null | null | roman-numerals/lib/roman_numerals.ex | koladilip/elixir-examples | 49553fe39bbff6e35d1cb2c26fcb6d87fe4e28d3 | [
"MIT"
] | null | null | null | defmodule RomanNumerals do
@doc """
Convert the number to a roman number.
"""
@spec numeral(pos_integer) :: String.t()
def numeral(number) do
cond do
number >= 1000 -> String.duplicate("M", div(number, 1000)) <> numeral(rem(number, 1000))
number >= 900 -> "CM" <> numeral(number-900)
number >= 500 -> "D" <> numeral(number-500)
number >= 400 -> "CD" <> numeral(number-400)
number >= 100 -> String.duplicate("C", div(number, 100)) <> numeral(rem(number, 100))
number >= 90 -> "XC" <> numeral(number-90)
number >= 50 -> "L" <> numeral(number-50)
number >= 40 -> "XL" <> numeral(number-40)
number >= 10 -> String.duplicate("X", div(number, 10)) <> numeral(rem(number, 10))
number == 9 -> "IX"
number >= 5 -> "V" <> numeral(number-5)
number == 4 -> "IV"
number >= 1 -> String.duplicate("I", number)
number <= 0 -> ""
end
end
end
| 37.16 | 94 | 0.556512 |
9e36037c753f343cbdc19299ccf1b82611f9a289 | 201 | ex | Elixir | lib/synapse/registered_user.ex | Voronchuk/ex_matrix_api | fab0f162c84a7e72f3df257260487a977e4134d5 | [
"MIT"
] | 2 | 2020-09-02T23:10:09.000Z | 2021-03-29T09:19:15.000Z | lib/synapse/registered_user.ex | Voronchuk/ex_matrix_api | fab0f162c84a7e72f3df257260487a977e4134d5 | [
"MIT"
] | null | null | null | lib/synapse/registered_user.ex | Voronchuk/ex_matrix_api | fab0f162c84a7e72f3df257260487a977e4134d5 | [
"MIT"
] | null | null | null | defmodule ExMatrixApi.Synapse.RegisteredUser do
@moduledoc """
Newly registered user with auth token.
"""
defstruct user_id: nil,
device_id: nil,
access_token: nil
end
| 20.1 | 47 | 0.661692 |
9e36127f98e708f324a89e37c2815f223d198540 | 45,023 | ex | Elixir | lib/phoenix/endpoint.ex | zoosky/phoenix | 8c90262009652390286dd6150bed513f6a3e6150 | [
"MIT"
] | null | null | null | lib/phoenix/endpoint.ex | zoosky/phoenix | 8c90262009652390286dd6150bed513f6a3e6150 | [
"MIT"
] | null | null | null | lib/phoenix/endpoint.ex | zoosky/phoenix | 8c90262009652390286dd6150bed513f6a3e6150 | [
"MIT"
] | null | null | null | defmodule Phoenix.Endpoint do
@moduledoc ~S"""
Defines a Phoenix endpoint.
The endpoint is the boundary where all requests to your
web application start. It is also the interface your
application provides to the underlying web servers.
Overall, an endpoint has three responsibilities:
* to provide a wrapper for starting and stopping the
endpoint as part of a supervision tree
* to define an initial plug pipeline for requests
to pass through
* to host web specific configuration for your
application
## Endpoints
An endpoint is simply a module defined with the help
of `Phoenix.Endpoint`. If you have used the `mix phx.new`
generator, an endpoint was automatically generated as
part of your application:
defmodule YourApp.Endpoint do
use Phoenix.Endpoint, otp_app: :your_app
# plug ...
# plug ...
plug YourApp.Router
end
Endpoints must be explicitly started as part of your application
supervision tree. Endpoints are added by default
to the supervision tree in generated applications. Endpoints can be
added to the supervision tree as follows:
supervisor(YourApp.Endpoint, [])
### Endpoint configuration
All endpoints are configured in your application environment.
For example:
config :your_app, YourApp.Endpoint,
secret_key_base: "kjoy3o1zeidquwy1398juxzldjlksahdk3"
Endpoint configuration is split into two categories. Compile-time
configuration means the configuration is read during compilation
and changing it at runtime has no effect. The compile-time
configuration is mostly related to error handling and instrumentation.
Runtime configuration, instead, is accessed during or
after your application is started and can be read through the
`c:config/2` function:
YourApp.Endpoint.config(:port)
YourApp.Endpoint.config(:some_config, :default_value)
### Dynamic configuration
For dynamically configuring the endpoint, such as loading data
from environment variables or configuration files, Phoenix invokes
the `init/2` callback on the endpoint, passing a `:supervisor`
atom as first argument and the endpoint configuration as second.
All of Phoenix configuration, except the Compile-time configuration
below can be set dynamically from the `c:init/2` callback.
### Compile-time configuration
* `:code_reloader` - when `true`, enables code reloading functionality.
For code the list of code reloader configuration options see
`Phoenix.CodeReloader.reload!/1`
* `:debug_errors` - when `true`, uses `Plug.Debugger` functionality for
debugging failures in the application. Recommended to be set to `true`
only in development as it allows listing of the application source
code during debugging. Defaults to `false`
* `:render_errors` - responsible for rendering templates whenever there
is a failure in the application. For example, if the application crashes
with a 500 error during a HTML request, `render("500.html", assigns)`
will be called in the view given to `:render_errors`. Defaults to:
[view: MyApp.ErrorView, accepts: ~w(html), layout: false]
The default format is used when none is set in the connection
* `:instrumenters` - a list of instrumenter modules whose callbacks will
be fired on instrumentation events. Read more on instrumentation in the
"Instrumentation" section below
### Runtime configuration
* `:cache_static_manifest` - a path to a json manifest file that contains
static files and their digested version. This is typically set to
"priv/static/cache_manifest.json" which is the file automatically generated
by `mix phx.digest`
* `:check_origin` - configure transports to check `origin` header or not. May
be `false`, `true`, a list of hosts that are allowed, or a function provided as
MFA tuple. Hosts also support wildcards.
For example, using a list of hosts:
check_origin: ["//phoenixframework.org", "//*.example.com"]
or a custom MFA function:
check_origin: {MyAppWeb.Auth, :my_check_origin?, []}
The MFA is invoked with the request `%URI{}` as the first argument,
followed by arguments in the MFA list
Defaults to `true`.
* `:http` - the configuration for the HTTP server. Currently uses
Cowboy and accepts all options as defined by
[`Plug.Adapters.Cowboy`](https://hexdocs.pm/plug/Plug.Adapters.Cowboy.html).
Defaults to `false`
* `:https` - the configuration for the HTTPS server. Currently uses
Cowboy and accepts all options as defined by
[`Plug.Adapters.Cowboy`](https://hexdocs.pm/plug/Plug.Adapters.Cowboy.html).
Defaults to `false`
* `:force_ssl` - ensures no data is ever sent via HTTP, always redirecting
to HTTPS. It expects a list of options which are forwarded to `Plug.SSL`.
By default it sets the "strict-transport-security" header in HTTPS requests,
forcing browsers to always use HTTPS. If an unsafe request (HTTP) is sent,
it redirects to the HTTPS version using the `:host` specified in the `:url`
configuration. To dynamically redirect to the `host` of the current request,
set `:host` in the `:force_ssl` configuration to `nil`
* `:secret_key_base` - a secret key used as a base to generate secrets
for encrypting and signing data. For example, cookies and tokens
are signed by default, but they may also be encrypted if desired.
Defaults to `nil` as it must be set per application
* `:server` - when `true`, starts the web server when the endpoint
supervision tree starts. Defaults to `false`. The `mix phx.server`
task automatically sets this to `true`
* `:url` - configuration for generating URLs throughout the app.
Accepts the `:host`, `:scheme`, `:path` and `:port` options. All
keys except `:path` can be changed at runtime. Defaults to:
[host: "localhost", path: "/"]
The `:port` option requires either an integer, string, or
`{:system, "ENV_VAR"}`. When given a tuple like `{:system, "PORT"}`,
the port will be referenced from `System.get_env("PORT")` at runtime
as a workaround for releases where environment specific information
is loaded only at compile-time.
The `:host` option requires a string or `{:system, "ENV_VAR"}`. Similar
to `:port`, when given a tuple like `{:system, "HOST"}`, the host
will be referenced from `System.get_env("HOST")` at runtime.
The `:scheme` option accepts `"http"` and `"https"` values. Default value
is infered from top level `:http` or `:https` option. It is useful
when hosting Phoenix behind a load balancer or reverse proxy and
terminating SSL there.
The `:path` option can be used to override root path. Useful when hosting
Phoenix behind a reverse proxy with URL rewrite rules
* `:static_url` - configuration for generating URLs for static files.
It will fallback to `url` if no option is provided. Accepts the same
options as `url`
* `:watchers` - a set of watchers to run alongside your server. It
expects a list of tuples containing the executable and its arguments.
Watchers are guaranteed to run in the application directory, but only
when the server is enabled. For example, the watcher below will run
the "watch" mode of the webpack build tool when the server starts.
You can configure it to whatever build tool or command you want:
[node: ["node_modules/webpack/bin/webpack.js", "--mode", "development",
"--watch-stdin"]]
The `:cd` option can be used on a watcher to override the folder from
which the watcher will run. By default this will be the project's root:
`File.cwd!()`
[node: ["node_modules/webpack/bin/webpack.js", "--mode", "development",
"--watch-stdin"], cd: "my_frontend"]
* `:live_reload` - configuration for the live reload option.
Configuration requires a `:patterns` option which should be a list of
file patterns to watch. When these files change, it will trigger a reload.
If you are using a tool like [pow](http://pow.cx) in development,
you may need to set the `:url` option appropriately.
live_reload: [
url: "ws://localhost:4000",
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif)$},
~r{web/views/.*(ex)$},
~r{web/templates/.*(eex)$}
]
]
* `:pubsub` - configuration for this endpoint's pubsub adapter.
Configuration either requires a `:name` of the registered pubsub
server or a `:name` and `:adapter` pair. The pubsub name and adapter
are compile time configuration, while the remaining options are runtime.
The given adapter and name pair will be started as part of the supervision
tree. If no adapter is specified, the pubsub system will work by sending
events and subscribing to the given name. Defaults to:
[adapter: Phoenix.PubSub.PG2, name: MyApp.PubSub]
It also supports custom adapter configuration:
[name: :my_pubsub, adapter: Phoenix.PubSub.Redis,
host: "192.168.100.1"]
## Endpoint API
In the previous section, we have used the `c:config/2` function that is
automatically generated in your endpoint. Here's a list of all the functions
that are automatically defined in your endpoint:
* for handling paths and URLs: `c:struct_url/0`, `c:url/0`, `c:path/1`,
`c:static_url/0`, and `c:static_path/1`
* for handling channel subscriptions: `c:subscribe/2` and `c:unsubscribe/1`
* for broadcasting to channels: `c:broadcast/3`, `c:broadcast!/3`,
`c:broadcast_from/4`, and `c:broadcast_from!/4`
* for configuration: `c:start_link/0`, `c:config/2`, and `c:config_change/2`
* for instrumentation: `c:instrument/3`
* as required by the `Plug` behaviour: `c:Plug.init/1` and `c:Plug.call/2`
## Instrumentation
Phoenix supports instrumentation through an extensible API. Each endpoint
defines an `c:instrument/3` macro that both users and Phoenix internals can call
to instrument generic events. This macro is responsible for measuring the time
it takes for the event to be processed and for notifying a list of interested
instrumenter modules of this measurement.
You can configure this list of instrumenter modules in the compile-time
configuration of your endpoint. (see the `:instrumenters` option above). The
way these modules express their interest in events is by exporting public
functions where the name of each function is the name of an event. For
example, if someone instruments the `:render_view` event, then each
instrumenter module interested in that event will have to export
`render_view/3`.
### Callbacks cycle
The event callback sequence is:
1. The event callback is called *before* the event happens (in this case,
before the view is rendered) with the atom `:start` as the first
argument; see the "Before clause" section below
2. The event occurs (in this case, the view is rendered)
3. The same event callback is called again, this time with the atom `:stop`
as the first argument; see the "After clause" section below
The second and third argument that each event callback takes depends on the
callback being an "after" or a "before" callback i.e. it depends on the
value of the first argument, `:start` or `:stop`. For this reason, most of
the time you will want to define (at least) two separate clauses for each
event callback, one for the "before" and one for the "after" callbacks.
All event callbacks are run in the same process that calls the `c:instrument/3`
macro; hence, instrumenters should be careful to avoid performing blocking actions.
If an event callback fails in any way (exits, throws, or raises), it won't
affect anything as the error is caught, but the failure will be logged. Note
that "after" callbacks are not guaranteed to be called as, for example, a link
may break before they've been called.
#### "Before" clause
When the first argument to an event callback is `:start`, the signature of
that callback is:
event_callback(:start, compile_metadata, runtime_metadata)
where:
* `compile_metadata` is a map of compile-time metadata about the environment
where `instrument/3` has been called. It contains the module where the
instrumentation is happening (under the `:module` key), the file and line
(`:file` and `:line`), and the function inside which the instrumentation
is happening (under `:function`). This information can be used arbitrarily
by the callback
* `runtime_metadata` is a map of runtime data that the instrumentation
passes to the callbacks. This can be used for any purposes: for example,
when instrumenting the rendering of a view, the name of the view could be
passed in these runtime data so that instrumenters know which view is
being rendered (`instrument(:view_render, %{view: "index.html"}, fn
...)`)
#### "After" clause
When the first argument to an event callback is `:stop`, the signature of that
callback is:
event_callback(:stop, time_diff, result_of_before_callback)
where:
* `time_diff` is an integer representing the time it took to execute the
instrumented function **in native units**
* `result_of_before_callback` is the return value of the "before" clause of
the same `event_callback`. This is a means of passing data from the
"before" clause to the "after" clause when instrumenting
The return value of each "before" event callback will be stored and passed to
the corresponding "after" callback.
### Using instrumentation
Each Phoenix endpoint defines its own `instrument/3` macro. This macro is
called like this:
require MyApp.Endpoint
MyApp.Endpoint.instrument(:render_view, %{view: "index.html"}, fn ->
# actual view rendering
end)
All the instrumenter modules that export a `render_view/3` function will be
notified of the event so that they can perform their respective actions.
### Phoenix default events
By default, Phoenix instruments the following events:
* `:phoenix_controller_call` - the entire controller pipeline.
The `%Plug.Conn{}` is passed as runtime metadata
* `:phoenix_controller_render` - the rendering of a view from a
controller. The map of runtime metadata passed to instrumentation
callbacks has the `:view` key - for the name of the view, e.g. `HexWeb.ErrorView`,
the `:template` key - for the name of the template, e.g.,
`"index.html"`, the `:format` key - for the format of the template, and
the `:conn` key - containing the `%Plug.Conn{}`
* `:phoenix_error_render` - the rendering of an error view when an exception,
throw, or exit is caught. The map of runtime metadata contains the `:status`
key of the error's HTTP status code, the `:conn` key containg the
`%Plug.Conn{}`, as well as the `:kind`, `:reason`, and `:stacktrace` of
the caught error
* `:phoenix_channel_join` - the joining of a channel. The `%Phoenix.Socket{}`
and join params are passed as runtime metadata via `:socket` and `:params`
* `:phoenix_channel_receive` - the receipt of an incoming message over a
channel. The `%Phoenix.Socket{}`, payload, event, and ref are passed as
runtime metadata via `:socket`, `:params`, `:event`, and `:ref`
* `:phoenix_socket_connect` - the connection of the user socket transport.
The map of runtime metadata contains the `:transport`, `:params`, a map of
`connect_info`, and the `:user_socket` module.
### Dynamic instrumentation
If you want to instrument a piece of code, but the endpoint that should
instrument it (the one that contains the `c:instrument/3` macro you want to use)
is not known at compile time, only at runtime, you can use the
`Phoenix.Endpoint.instrument/4` macro. Refer to its documentation for more
information.
"""
@type topic :: String.t
@type event :: String.t
@type msg :: map
require Logger
# Configuration
@doc """
Starts the endpoint supervision tree.
Starts endpoint's configuration cache and possibly the servers for
handling requests.
"""
@callback start_link() :: Supervisor.on_start
@doc """
Access the endpoint configuration given by key.
"""
@callback config(key :: atom, default :: term) :: term
@doc """
Reload the endpoint configuration on application upgrades.
"""
@callback config_change(changed :: term, removed :: term) :: term
@doc """
Initialize the endpoint configuration.
Invoked when the endpoint supervisor starts, allows dynamically
configuring the endpoint from system environment or other runtime sources.
"""
@callback init(:supervisor, config :: Keyword.t) :: {:ok, Keyword.t}
# Paths and URLs
@doc """
Generates the endpoint base URL, but as a `URI` struct.
"""
@callback struct_url() :: URI.t
@doc """
Generates the endpoint base URL without any path information.
"""
@callback url() :: String.t
@doc """
Generates the path information when routing to this endpoint.
"""
@callback path(path :: String.t) :: String.t
@doc """
Generates the static URL without any path information.
"""
@callback static_url() :: String.t
@doc """
Generates a route to a static file in `priv/static`.
"""
@callback static_path(path :: String.t) :: String.t
# Channels
@doc """
Subscribes the caller to the given topic.
See `Phoenix.PubSub.subscribe/3` for options.
"""
@callback subscribe(topic, opts :: Keyword.t) :: :ok | {:error, term}
@doc """
Unsubscribes the caller from the given topic.
"""
@callback unsubscribe(topic) :: :ok | {:error, term}
@doc """
Broadcasts a `msg` as `event` in the given `topic`.
"""
@callback broadcast(topic, event, msg) :: :ok | {:error, term}
@doc """
Broadcasts a `msg` as `event` in the given `topic`.
Raises in case of failures.
"""
@callback broadcast!(topic, event, msg) :: :ok | no_return
@doc """
Broadcasts a `msg` from the given `from` as `event` in the given `topic`.
"""
@callback broadcast_from(from :: pid, topic, event, msg) :: :ok | {:error, term}
@doc """
Broadcasts a `msg` from the given `from` as `event` in the given `topic`.
Raises in case of failures.
"""
@callback broadcast_from!(from :: pid, topic, event, msg) :: :ok | no_return
# Instrumentation
@doc """
Allows instrumenting operation defined by `function`.
`runtime_metadata` may be omitted and defaults to `nil`.
Read more about instrumentation in the "Instrumentation" section.
"""
@macrocallback instrument(instrument_event :: Macro.t, runtime_metadata :: Macro.t, function :: Macro.t) :: Macro.t
@doc false
defmacro __using__(opts) do
quote do
@behaviour Phoenix.Endpoint
unquote(config(opts))
unquote(pubsub())
unquote(plug())
unquote(server())
end
end
defp config(opts) do
quote do
@otp_app unquote(opts)[:otp_app] || raise "endpoint expects :otp_app to be given"
var!(config) = Phoenix.Endpoint.Supervisor.config(@otp_app, __MODULE__)
var!(code_reloading?) = var!(config)[:code_reloader]
# Avoid unused variable warnings
_ = var!(code_reloading?)
@doc false
def init(_key, config) do
{:ok, config}
end
defoverridable init: 2
end
end
@doc false
def __pubsub_server__!(module) do
if server = module.__pubsub_server__() do
server
else
raise ArgumentError, """
no :pubsub server configured at, please setup :pubsub in your config.
By default this looks like:
config :my_app, MyApp.PubSub,
...,
pubsub: [name: MyApp.PubSub,
adapter: Phoenix.PubSub.PG2]
"""
end
end
defp pubsub() do
quote do
def __pubsub_server__ do
Phoenix.Config.cache(__MODULE__,
:__phoenix_pubsub_server__,
&Phoenix.Endpoint.Supervisor.pubsub_server/1)
end
# TODO v2: Remove pid version
@doc false
def subscribe(pid, topic) when is_pid(pid) and is_binary(topic) do
IO.warn "#{__MODULE__}.subscribe/2 is deprecated, please use subscribe/1"
Phoenix.PubSub.subscribe(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), pid, topic, [])
end
def subscribe(pid, topic, opts) when is_pid(pid) and is_binary(topic) and is_list(opts) do
Phoenix.PubSub.subscribe(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), pid, topic, opts)
end
def subscribe(topic) when is_binary(topic) do
Phoenix.PubSub.subscribe(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), topic, [])
end
def subscribe(topic, opts) when is_binary(topic) and is_list(opts) do
Phoenix.PubSub.subscribe(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), topic, opts)
end
# TODO v2: Remove pid version
@doc false
def unsubscribe(pid, topic) do
IO.warn "#{__MODULE__}.unsubscribe/2 is deprecated, please use unsubscribe/1"
Phoenix.PubSub.unsubscribe(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), topic)
end
def unsubscribe(topic) do
Phoenix.PubSub.unsubscribe(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), topic)
end
def broadcast_from(from, topic, event, msg) do
Phoenix.Channel.Server.broadcast_from(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), from, topic, event, msg)
end
def broadcast_from!(from, topic, event, msg) do
Phoenix.Channel.Server.broadcast_from!(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), from, topic, event, msg)
end
def broadcast(topic, event, msg) do
Phoenix.Channel.Server.broadcast(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), topic, event, msg)
end
def broadcast!(topic, event, msg) do
Phoenix.Channel.Server.broadcast!(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), topic, event, msg)
end
end
end
defp plug() do
quote location: :keep do
use Plug.Builder
import Phoenix.Endpoint
Module.register_attribute(__MODULE__, :phoenix_sockets, accumulate: true)
if force_ssl = Phoenix.Endpoint.__force_ssl__(__MODULE__, var!(config)) do
plug Plug.SSL, force_ssl
end
if var!(config)[:debug_errors] do
use Plug.Debugger,
otp_app: @otp_app,
banner: {Phoenix.Endpoint.RenderErrors, :__debugger_banner__, []},
style: [
primary: "#EB532D",
logo: "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAJEAAABjCAYAAACbguIxAAAAAXNSR0IArs4c6QAAAAlwSFlzAAALEwAACxMBAJqcGAAAHThJREFUeAHtPWlgVOW197vbLNkTFoFQlixAwpIVQZ8ooE+tRaBWdoK4VF5tfe2r1tb2ta611r6n9b1Xd4GETRGxIuJSoKACAlkIkD0hsiRoIHtmues7J3LpOJ2Z3Jm5yUxi5s+991vOOd+5Z777fWf7CGXA79Ct46ZGmyPnshw9WaX5qTSlJBCKjqU51aoohKVUivaIRqUUmlactEK3iCp1gablTztsnZ9kbK16w2P7wcKw5AAJhKqiBWlzIyIjVrKsnKtQ7HiiqiaGZQOC5Qm/JAkiUekqSha2X7/x2JP1FOXw1G6wLDw4oPvFl94+ZVmkib9HJnQuy7MRfUW+qoqSLMtHWi60PzB9Z+2BvsI7iEc/B3wK0d8Wjk8dHRX7B5hjbqBZU6R+sMa3VBWFUiSxqLmhdc303XVHjMcwCDFQDngUosO3JF0VPzz2eSKRLJrjPLbxhVARYYXDUCKlKAJFMV00yw731d6fOlWVKadT/mjSxsIb/ek32Lb3OPANAdl/c3La8CExmziGnUYYz2thd1JwhpBk5RDDyBccTuWgKNpqWxzCsdk76iuwbdXiyd/nIqO2ufcL9lmVBZvgcP5k4pYTrwcLa7B/cBy4LESVeVlvsxS9wN+ZR1Jkioi2B5M3nPiTJ1LqVuXaCcuaPdUZUSbJjg9T1hXfZASsQRiBcYDULJ/2OM1zDxOa0zf1eMFDROmcQ5Jeam7peE+iKOfQ+IjFHM//gqF7T4A0UhD3dflHkusHd3EaS/r0SupWZO+lCHWFwislio2Kpi30cKKQZEKYGEL7L1e4ZqFkRSWs/2upYEauSpKjpblldvaOmkPBwBns6z8HLn/O3Lsenjs+N2pU7G94hr6JpjnevT4cn0GQ1HZb29JBZWXfvh2vQuRCBg2z1W5i4q9zKQvfW1mmOrrsy6duPb4pfIkcWJTp+V4p4zcUzrY72h9SJCX8R88wVGSEdWPZkskrw5/YgUGhnpno8khLbk9dHBMZu4Wimctl4XqjKCrV4ehcmbH5xAZXGsuWTLpFdSpylyC1t3RIjQfLv2h6pInqdG0zeO8fB/wSIgR9clnGw1aL5Un/0ISmtSorVJe97cYpb1R8pFFQtSzzBc5iXoPPMqyhCKOqlEycKqW2gHL0vCqRvR1S146srRX7tD6DV98c8FuIEFxlXnYxz/EZvkGHR60kSUrjVy1TZu2qKdMoqr4j8wOWMXvVeOMsJqlyB0vkfRdPtz42aGbROOf5GpAQIai61Tlgiw1Ot+SZJONLFUUU5q49GlPvokequStzM0OZl/SEDWczmLIq2mwdv8rcVvVOT+2/jfV6FtYe+SJQ9CseK8KwEFUUu1flNLqSlvxa8VKH0/msa5mnezT/EJ6fGBubsL1qdfahVxOj4z21+zaXBTwTIdNq7siVGIYN/1X2pTcsCY6alILiFNcXfmxR+qrICMsrIGica7m3e0WWRFWyP+zNzOOt30AuD3gmQqbAwnRPf2IOy5uTa1dlfuxK87Q3T64/V9o0RhLFBtdyb/c0w3KMKeqZyhVZu721+baVByVELS3tv+pvDANT3vUVt019xpXuWYVfNKbkHx0liM7tuKjW8+NNpjk1q6af/9vkcYa5uejBG45tgvqc4YCq83I6WY7rM09Ho5jY1n5xiSfzCOqRLBbrWormh+rBBYt20emw/yht88lX9bQfiG2CmomQIYqifN4fGRMZGb1p46QRY9xpT9tSvnPc2sJhotjxgiLLTvd692dcS1ms0a9U5uW85173bXkOWohssrSjPzKLAfXEjNzEclfa86cOH4aRK1iWmn/iR0nrDpslQdiqqKLo2s7TPc9xt1Tm5bafXDL1fk/1A7ks6M/Z7mmJo8ZmjDpLs0HLY0j4jAtqXA8hclzfjM+M/7ugCqUTNxxf7EIQe3LFlGdZYlrC89wQl3KPt7IoXJAVeqfU1b4lfXvlB66Ntt88OmnikJhFxEbH7zt+4el7qxouuNb3x/ughQgHXZU3vZPjmH63LtJemCRIx1IKjnRr4E8unHCTJTZ2l6jIdRPWH03S2mjX0vmp3zVbI+6jeeYqQjGxPf15upWVYFNBPytCE4jAU0WiKC2CxHz44aHa+++vaW7XYPfXqzFCtHz6Kc7MjO2vTEC6FcX5XtLaonl4j4JkjY/fJUO0UofofCBzc+lzWO7+++yWpMnDYyMXixQ7nefIBAjFjCZEtUA7FvTcDAM7PZUhqqLS4OyptqhELBEd4sa0LScK3GH152dDhKhmedZ+xmy6pj8zAmmXFfHl5LVH78X76vkTfsAOid+K9+h+2253/EKvj9IPR1LW5fEjEzY2N1x8uYGyIYxgfwe/m3JldBSXwUhsMmdhR6gmlVFE9UvJQVU7VMeJUBqMDRGiyhW563gTuypYRoVD/06b8NSUzYUPIy0YqcKazW9prr4oTJIsrE3eeOw/e5tWnOVi46z3WhjTXIUm42iKNnt1V4ZgCZjuHLIqldrt0p/1CrtRYzBEiMpXZDxiNll+ZxRRoYYjO2xPaIKCbsJxo4fsZxnGrNGFBl14bcVSl1yQ9mYJ2hAhvi74H35G+cjIOxWKzOYYZojesC13zIIk1rWdbV7SV94HhggR2p+io6LXuQ+mPz/bHfYn0zaW/AbH8MhQKnLZTbnlHM8muo+JyJIsqmoDuCaVU4rzI8Uhnjxc/OWh1fWtre5tXZ9xVzs0Ne5as4WZrlDMbI6iU2iOxfWUIT8VTHyCKP9u4qbixw0B6AOIIUKkLUR94OmXVXab49W0zcX3aMR3x+Yx/EKa9s02FCxYU4sQ8yIwtGSTZGJHGDRLWWSFtcLim4f9Gs+yva8XcQqdz00sOP4zbQy9cfXNDZ0YcdE3fHj8Ia/fbJ1wwrGZ6LTtSN1w7FaNtuOLJ/5rpDVig16ziNYvlFdvJh6jaOqfGkKjRq8DDmeyzqtbmX1Zs42utmgWcbZ2/QnSlTh0gAh5k8iImI29SYQhQoQ2SAr0aAP1h05paGg+sWhitx4JxzlxW+mDKesOW9DGJshSR6jHjv7i3mhAn6+qpZk7vdUHW27I5wxtTtdkjWkA9VrYOqih5lhQpFJVkbfbZaUyyuYUO62mRCvDzuNYMoMwvLUnZn6dvEJ6KzW/8Hb3tjUrJj8AMNaAFns85B4whK/uOLRnRQTHcVWqVwh3UHYIn6uivbZVkM7yFjbJyloywI63EN7EFML8Y82F4V7791XG9bTg13D4czVksOEuROiN2NLWNidne9Wn3phTtiLzVRPN3KknoQVkzGlz2OwPpb9R9pI7vP3ZY0YMGR/zM85ims8Q6jtGJbNAtQJYTqpE1bFpUsGJpwGvzyBAtAOOzorfBgEVV2s0uipTtTIjroYIUbcRNvuK0zQJP8d9zFrS0dl+nR6NLuqEYkYl7OY5NkoPc0X498s222OTtp1EXZHH3/GFk25gIyw3w7phGsXQYymVDCUU7MwYiqMU0s1/lIbudQUDzwqoDVFHrqgCTOunZUqusovC2+7xcx6ReSgsWzTlZ+ZIy39DbgUK0vE0jV9XOMxDs6CKDBGitWNjY6+ZlXKB4cLP3xomoYbk9V9b6fVyqvaOnHqa4cbobY8vxympG/YfPv97vVZ5nL2ThltGMhZyeUZRRIYRz9guXHui4Yxe3HradQedRidswU96/s7Po4wO1jREiHAgdXfmOAjhTHoG1Zdt0OV1Qn7R9/3FWbUyq4jjTZn+9MMYN0LJpwVZ3c112D5I+WvlW/707822WtCmvbP1vrQ3yv9iJC7DhKhq1ZVtHEtHG0mcEbCCUbZVrZy6jeMj/BZAjW70AiCM0qnI9JegYHTSKjFJolSTurl4IbQxxFSi4dJzxYRjsIcrSc0/MlNPe71tDNnidyNTlLD0i6EJ/0+mCr3MSS0ovc3W2bYGdkPdGme9/bR2+HmnaT6G5dhUCBKZAnvw0QorVUE9uIb0/U9S7WtZosYYjZk1CiCjyhAc+M+2JaPgBwqHZugZgfbFfpd2YC/V5GW9D9v3G8C+5RfPcDsuU9RRsaP9UXcvx2DoCqRvU2PnywmJVuMmjktEGPY5q1s1rYCw1hWBDK43+2Am250H6mKN8CAcS1HmD1ZOeYol3DzwaExUVdbkyY4GubedlKie6pKo7fM2Fz5W7xK+3Ztj1QkbhejyYl5nH5/NDBOiikVpa0xRMS/4xBaiStQqo+O90egP35oyK9JqGqPS7GgTeDR2KOpFkypWY8SI0bjCGZ5hQoRKtsSpVzSEoxEWbVxoogjnF9GfaTNMiJAJvb1DU2UJwtxAXQfmFU+fEV8vwuG0PzppQ8kjvtqEYx266UrRXApR2RRCkUTw9rfAuToyHMDDKERtpmS5pNPpKMp9q/KvoaLfUCGqzMvYx3OWWUYORpLEM6oqvS122D+4UN1xsq7T1pGenpAWHRN5K01Mi/UGCOACNyn/iK6kDUbS7y8sNPJyZutqnqZmKoRO0JtoApSqqDKoVFXnxpT842gW6bOfoUJkpIcjWqVFxf5rsBM95YsbR34wYX6cNfJVhuN7jAdzCo59EwuKr/MFLxR1Y2HB/uGK3BdZTlmAKoFgacBgS0mit0zIP5wXLCw9/Q0VIkRYuypXhLM8/NoGeyLU2dVxlz9HLmC2D0zW4AmWa1lHe2fYZJZFc9Gs2eMLCKFvAm2/XzzDODb4qAk0kbp1TiohrAofejjiC/LPX9rFC6Iqs9QrEMFyH/Cg13RThgtR9cqsz1jedJXri/P3Xpac9cnri8b52w8t8RaT+S5f/XBddfb4V4mYCcRXu96uQ1rNPLPKH+FR0K6iSkWdorwZ/mR7Zrx7qtSFThoScMWOHh8XMzLBmsxwplQ+klkNm/mhXTbHbzGFjktbQ28NFyI8oWjoFcM+C4ZKm93+6/RNJb8PBEb58mmPms3W3/rqK4pyV2r+4ZAcvYWpkU1m8/+AgVf3Z0sGn20wnr696+CpuwPRd2F2t7vPtjf74kkwdYYLERKDeXvAmW54oIS12ZvnZGyq3Btof83Y6Ks/+Oc0J609muCrjZF16N8zNjPufYY3ZfkDV1aFwvrDzbdcf+LUl/7068u2fn2H9RLW0tV275CY+ICTZEp2VdSLy1O71E3F/1a1Ytoo9I/2VI9lsOuJr12dc3H/3pqk3vD2c8VbtjTzFRPP3uHPWhHdSzpsjgf9+Qx1H6URa8kgVjqNU7mhAk1FgXdSE22XWxy8cszW6jh51a6aYlfajLjvlZkICTuVl9NAcdyIQIhsbb240IhMrTV5OccZjpvsiwZURDrs7fNdc137ao8OeFFjLEnT363e76sdfkKuuibpaTPPrvDHu1EW5Xan0/mX9DeO/coXfK2uaOnUpVaWuZejSTZk843sSdkrgj88ZJeoUJ32Fye+WfaiBieYa68J0Wc3jM0Y+Z0RAUm9e7xXMAOsyZvexnCMTxeV7qNBKflyHL4vfHiw4BVD416jCRmnggZQkZWzhBJr4R/vlAlrg8wfQ3mangauiqP1enriwTaCSmpkwfG/6VtKn/eFX6srvy39Hi4y4vFglg2YxEsUxCcgwPEJDW4g114TIiSmdnXWDpo2fc9fwsCH+XzS2sKAZjF3XC+ljhxy/b+M/FLPC0UvyPY2W17WO2U9JfVkIe/jU6yVW6TSdKK/QYiqgnGNik0SmQrZ4dxbfKLp/5aXN37hTrunZ5wJvzNtxB50L/FU76kM13+gbH2v1WF/W7VLTSxnspis/JUmhr5NUdh40tn2YDAOdL0qRDggzB6m12dZYwDODAcPnR6rl7FaP29X1AJHRMW9663etRxxy7JwuLGpY7VrFn7XNu73JcsmzDbRlmsZmeSqHD2SAidprQ3ogOw0JbfQRL5oF0m5U1VONR/v2BPIQrlsefoveM76e3/SPjud9rUTN5TcqdHj6YqCOffY2XOe6vSUXR6snsaBtMETrcdHJ1T4G0YD/9BPkjcWGWZCqcrLeA6yK/673jHIqKijSKHN1vakEeszvXi9tatcPmUTb45c6q3evRz/DA5H5z19kZC014UIB1e2NP1uTI7pPlCfz3Bu2UcHzg7V6/juE9alyupVmQfgONqZetq6tsHPgSyre5wdtpenbC//2LXOqHuczd75uPKIJyf6QOh2tLb/0FcUyt55YycOi7TOZNSvEwtA7s1aPRExnsbbJ0KEiDF3tCk24gFPRHgrc4py9cT8w7q//d7guJYHs2tEOKiohN1NOVGEUggCeOfcefuJG/d/ccoVh5573L3NzB0x3RJtXi6ppoWQ+OGLgp1FV7oLUc3KrEJ/dUvePBZQBRA7LOYRxkxfDUe0Rmt5l7rpxRxHRHGCD1+F0yH80Z8cR30mREho1fLM5zmz+Sd6mKy1sXd0/kfam8ef1Z6NuNbdkd2lJ+JVDy70nKSI0gX/505RZZqJIrdCfqEmVRWcsIPr1sMRlhcVSTXD+mg47OiGQXhZDFTEqpeOtMBt95Ej5ya4rwErV+Ye4Xk2Rw8dWhvB0bl5wsbjy7RnvKIVIT5h6HaGI7pjzmCTcRxCrVAx2qPNrU+FCAd0cknG73gL/wir8+A9zLNTfaopKZB/O+Lz9EMHulGTh532R/nnCY4RZbLorE3OL0p2hxWIW43qFP6Op2S6w8IASlOk5WmQdhqickeBX1KCnkhfUHjaGptar7x6Z+0Jd5iuz30uRIgc09hRJvMmjtMXp4YnTc9ZfySu3kBf5cJ5yTPihsR+FsrjtgSnc8+EDUVzXV8I3mNQABhQb3Yv9/UsCNLRCQVHcn210epwszM6KvYPNGHm96SewLCnpgutV898v/pzrb/7NSRChERgcsxfzs0uxIwb7kR5eobptXXD+0dHu68ZPLXVW4bTfNyQ+E96YqReeHrboSeB3SE+lr6l5FH3PoEEPHibgdxhuz/vuCExZdLIkZ/0pLBEA/AXxY1jvKkBQiZE2oDQ6s6x3C8hLovXyrxdMf6rtaVlTvaOmkPe2vhbjovN+MT4T/Xg9xe2p/b4+Spv/OrmeR+frXavDySBqt3peC1tQ/Hd7rD8edZjHkLtdlNz03Q395NuNCEXokuDZcvzsraxhPleT7OCih41qvP51PySn/rDKF9tUdkGQQYlerLl+4Ljq04QpQ74LP/Rm4mhekXGetZk0e2JCCcBdHXZ2+/ydMiNLzq81ek5khXTCNrsnfe7h2GHRIhqV2RtQAvzpPyi+a6DwgNbcrOHga+N+UZIreNzZsKMHJJof9jIxOIVKzP/buLN17rSFOw9mNQ6HYK4Ln3Dca+7UvgD/dXMmS6n9POJE5SgDqLscOedax+c0RhemSyLlB08IKsdsrTHwvHfx5wExbdm326NoZZPKChc4NoH74GOg0BHj8GeuHMTnI5nzjR0fFp/XuwIiRBholBzbNwuyBvU0FDUMMNTFoyy5RlP8DSzElKRj2YgXb37gC8/y87zTkFef7a0/dlATAmX4Vy6wQwaUdaYP8POLWB/qG4HREWt7pKEF71l49fwYio/PetCXJfIinKoqvHL1Z4+hRo8vKJ2Hs4huZ+wNLG3dz3DmLlUnufnj3vtIKlZlXMOPt0j8d61j3ZftXzaa6CQXY19tTJvV/DlVhw26bEeG3oDEGw5OtijzxEkXgJ7q7gudeMxj26t3ZrVmKj7TLTpOkJIErg6WLy5O6AbBbgAnmJU54Zgj9fEvD6syXQv6HrA1dR3yhxcKKu0bANdUBmRlY++OHHxRW+LUI1v5Usn/5znLY+DsFq0MvcrWvchQqoRkhZt37u75rf+eCeiioBWuWw4sySyenXOFpbmFquCUAG+2BPgEHfq+oKj1novu11MxD4kPvYFjqZzwPHqG0nYUS8G1mMbZD+pFBTnG3/7vPHFkAkRMszVlRU1wZCt/jktd7Q7Q7Vn3JrTkdYZVsaUQdFyNOg8INQd5is4RoMGDZ9EMZLd2bbLqLUC5rBePCt9KYmOyIY1wTCwwIugFuBoRemQiFThlKgzpSebPsor/fIrjUYvVxr0NXMjovk8WeUWuh80iMm4OPj2SApzUaSEOiKp75e3XNi0cNeZWi/wfBZXrcypAKVmEoZJVa7M/oTlyFXdngzwOVRoqu1Ue/OV12+vw+QSPn/IbytvmiIR1gwa7YtfSV1H3fuFVIiQend3EVUWbaJEth74tPqnRnscfjhrzLjEkXF5LA/+PpSSAAkavoLPRNn59rbNs3fUV/jkZpCVOKOOiI170cTAQTLwg7nrNBw5dBoOFGnsghONlE7bodt21JTUe5kd/EWP6xueIZPApSYWTSegKQfNs/Q2CKmFZbkft7W1LfCVftAffCEXIiQW/imwM+Lhxf7jh2sAilZKhC7b6+67gX+06vkO/YnmZI/4JTHTi2mFHuXtW48KTYck/ldPM2HPGL22wI0CBhj2yQ/HnWyhTfhZ3Td55Ojq1s4u7XOIBwO+fvRUjVGH14SFECFXcfrleK77X+rOZZjjBULEGkhk+LkiObcVH2s94W5n0vog865Kj8lkIsyLzTR7DXgaJvnKagvCI6m0coHIdLtDFrf2ohBpJA64a9gIEXJW704FF3eEhu0roRzgCGbHvuA4bGJpxQzJNa16vBhReOwO4U96fZkRx+DPMwfCSoiQRNiClsIWdIpncg0qlWW5tu1CmvsC0SDo3zowl+Jtw2fc4H4wFQ2TvUmRCruTQQEyjsNhJ0Q4NLRsi6L9zzpcWQLiBCT9jUdvy4A6D3b6Jw6E3efMlcLi21IXREbFbnY9sM61Pph79EEWRNubX5W3/zTUcfnBjCMc+oa1EF1iEF+Tl1sEWuP03mAYqu7BqHsKZqdDHc7OHbZOpWrZrpryeoP0Nb1Bc7jB7A9C1M0z9Ig0W9iHIfzZp2E2WAbjDKVSYECRaYEBtbGsgm8Bo0CkDy3CQXcXVFUpkxSpvKK5OT9QbXKwNIZb/34jRJcYx4JNaDdP87NA9xNSXqJdC+wsLaD5PnDxq7anpu+sPRBSgkKIvL8JUTer0CMRDISvEZaZCKkLQ8i+r1Hj7KXIYm2LrevnocydGCpG9Esh0piFsVoRTMQTkAcUzivT0oNptaG5gvXkYMr64qCSfIWG8sCx9msh0oaNJ/bMmHLFU7BcgjPGSEJvzU5oaWcUOEtKwUOBARPtWUOCRuTGppYeoyQ0+vv7dUAIketLQNeFyLj4H0Es2NUwNyX6sxDH0GnI5iECU2yQ//AcIVKjSHO1YofzJMU4K+0XhJb2aKoN8VkddERUNDuUoUgyy/LZkBA9FRIjTwJfnTjNxbe1SViU+W7hVlf6BuL9gBMi95eEXpR8FD+NIfRkQaFHw0vvTkNM06pNoZmLquxophWqrl2mz3W22o7pTeLgjkd7xoxoIybHrDHxzI8hiDGq9VzzNdN31x3R6gfidcALkZEv7cDNyZmxUZbrBNXZ8Pmxzt095QlAAcazWXsK/jOSxlDAGhQiP7iOkaSWePOdRGZmghfBKAJZrWSacmBKOzgbsxFcaY/YHLZ39WZd8wN1WDcdFKIAX0/Zooz7OAv7EHgJjnYHAX5P7USRPty3t3qN5gjm3mYgPQ8KUZBvs2hB2tzouIh1kIE80R0UhiBDvNnatM3F97jXDaTnQSEy6G1WrMh43WSyrPYEDqMsxhcUTvJUNxDKBoXIwLdYsnTyimizeb2nJBGSIJxKKSgcbyC6sAE1KEQGvwp0gh86JOEouOh2qxJcwQuiUDIhvzDTtWwg3HtWuQ6EkYVoDJjw4PyZC9PRQOtOAs/xGRXLpv3Bvby/Pw8KUS+8was/ri+52NW+UJHAPuL2482mhzAixa24Xz8OClEvvT605jd3tS6ApKHfOGKCEIaaM3NkUS+hDQnYQSHqRbajIH1WeCZRFaVvhCujbqlmdc5LvYi6T0EPLqz7iN14Wjdtivg1C0eha9Z/OB/x0P49lbf0d4XkoBD1kRBpaNChLiYhYY2JUufIrDpCEkkR5FrE3No9ZmnVYITb9f8BhSZnYemqCy4AAAAASUVORK5CYII="
]
end
# Compile after the debugger so we properly wrap it.
@before_compile Phoenix.Endpoint
@phoenix_render_errors var!(config)[:render_errors]
end
end
defp server() do
quote location: :keep, unquote: false do
@doc """
Returns the child specification to start the endpoint
under a supervision tree.
"""
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]},
type: :supervisor
}
end
@doc """
Starts the endpoint supervision tree.
"""
def start_link(_opts \\ []) do
Phoenix.Endpoint.Supervisor.start_link(@otp_app, __MODULE__)
end
@doc """
Returns the endpoint configuration for `key`
Returns `default` if the key does not exist.
"""
def config(key, default \\ nil) do
case :ets.lookup(__MODULE__, key) do
[{^key, val}] -> val
[] -> default
end
end
@doc """
Reloads the configuration given the application environment changes.
"""
def config_change(changed, removed) do
Phoenix.Endpoint.Supervisor.config_change(__MODULE__, changed, removed)
end
@doc """
Generates the endpoint base URL without any path information.
It uses the configuration under `:url` to generate such.
"""
def url do
Phoenix.Config.cache(__MODULE__,
:__phoenix_url__,
&Phoenix.Endpoint.Supervisor.url/1)
end
@doc """
Generates the static URL without any path information.
It uses the configuration under `:static_url` to generate
such. It falls back to `:url` if `:static_url` is not set.
"""
def static_url do
Phoenix.Config.cache(__MODULE__,
:__phoenix_static_url__,
&Phoenix.Endpoint.Supervisor.static_url/1)
end
@doc """
Generates the endpoint base URL but as a `URI` struct.
It uses the configuration under `:url` to generate such.
Useful for manipulating the URL data and passing it to
URL helpers.
"""
def struct_url do
Phoenix.Config.cache(__MODULE__,
:__phoenix_struct_url__,
&Phoenix.Endpoint.Supervisor.struct_url/1)
end
@doc """
Returns the host for the given endpoint.
"""
def host do
Phoenix.Config.cache(__MODULE__,
:__phoenix_host__,
&Phoenix.Endpoint.Supervisor.host/1)
end
@doc """
Generates the path information when routing to this endpoint.
"""
def path(path) do
Phoenix.Config.cache(__MODULE__,
:__phoenix_path__,
&Phoenix.Endpoint.Supervisor.path/1) <> path
end
@doc """
Generates the script name.
"""
def script_name do
Phoenix.Config.cache(__MODULE__,
:__phoenix_script_name__,
&Phoenix.Endpoint.Supervisor.script_name/1)
end
@doc """
Generates a route to a static file in `priv/static`.
"""
def static_path(path) do
Phoenix.Config.cache(__MODULE__, :__phoenix_static__,
&Phoenix.Endpoint.Supervisor.static_path/1) <>
Phoenix.Config.cache(__MODULE__, {:__phoenix_static__, path},
&Phoenix.Endpoint.Supervisor.static_path(&1, path))
end
end
end
@doc false
def __force_ssl__(module, config) do
if force_ssl = config[:force_ssl] do
Keyword.put_new(force_ssl, :host, {module, :host, []})
end
end
@doc false
defmacro __before_compile__(%{module: module}) do
sockets = Module.get_attribute(module, :phoenix_sockets)
otp_app = Module.get_attribute(module, :otp_app)
instrumentation = Phoenix.Endpoint.Instrument.definstrument(otp_app, module)
dispatches =
for {path, socket, socket_opts} <- sockets,
{path, return} <- socket_paths(module, path, socket, socket_opts) do
quote do
def __dispatch__(unquote(path), _opts), do: unquote(Macro.escape(return))
end
end
quote do
defoverridable [call: 2]
# Inline render errors so we set the endpoint before calling it.
def call(conn, opts) do
conn = put_in conn.secret_key_base, config(:secret_key_base)
conn = put_in conn.script_name, script_name()
conn = Plug.Conn.put_private(conn, :phoenix_endpoint, __MODULE__)
try do
super(conn, opts)
rescue
e in Plug.Conn.WrapperError ->
%{conn: conn, kind: kind, reason: reason, stack: stack} = e
Phoenix.Endpoint.RenderErrors.__catch__(conn, kind, reason, stack, @phoenix_render_errors)
catch
kind, reason ->
stack = System.stacktrace()
Phoenix.Endpoint.RenderErrors.__catch__(conn, kind, reason, stack, @phoenix_render_errors)
end
end
@doc false
def __sockets__, do: unquote(Macro.escape(sockets))
@doc false
def __dispatch__(path, opts)
unquote(dispatches)
def __dispatch__(_, opts), do: {:plug, __MODULE__, opts}
unquote(instrumentation)
end
end
@doc false
defp socket_paths(endpoint, path, socket, opts) do
paths = []
websocket = Keyword.get(opts, :websocket, true)
longpoll = Keyword.get(opts, :longpoll, false)
paths =
if websocket do
triplet = {:websocket, socket, socket_config(websocket, Phoenix.Transports.WebSocket)}
[{socket_path(path, :websocket), triplet} | paths]
else
paths
end
paths =
if longpoll do
plug_init = {endpoint, socket, socket_config(longpoll, Phoenix.Transports.LongPoll)}
[{socket_path(path, :longpoll), {:plug, Phoenix.Transports.LongPoll, plug_init}} | paths]
else
paths
end
paths
end
defp socket_path(path, key) do
String.split(path, "/", trim: true) ++ [Atom.to_string(key)]
end
defp socket_config(true, module), do: module.default_config()
defp socket_config(config, module), do: Keyword.merge(module.default_config(), config)
## API
@doc """
Defines a websocket/longpoll mount-point for a socket.
Note: for backwards compatibility purposes, the `:websocket`
and `:longpoll` options only have an effect if the socket
given as argument has no `transport` declarations in it.
## Options
* `:websocket` - controls the websocket configuration.
Defaults to `true`. May be false or a keyword list
of options. See "Shared configuration" and
"WebSocket configuration" for the whole list
* `:longpoll` - controls the longpoll configuration.
Defaults to `false`. May be true or a keyword list
of options. See "Shared configuration" and
"Longpoll configuration" for the whole list
* `:shutdown` - the maximum shutdown time of each channel
when the endpoint is shutting down. Applies only to
channel-based sockets
## Examples
socket "/ws", MyApp.UserSocket
socket "/ws/admin", MyApp.AdminUserSocket,
longpoll: true,
websocket: [compress: true]
## Shared configuration
The configuration below can be given to both `:websocket` and
`:longpoll` keys:
* `:serializer` - a list of serializers for messages. See
`Phoenix.Socket` for more information
* `:transport_log` - if the transport layer itself should log and,
if so, the level
* `:check_origin` - if we should check the origin of requests when the
origin header is present. It defaults to true and, in such cases,
it will check against the host value in `YourApp.Endpoint.config(:url)[:host]`.
It may be set to `false` (not recommended) or to a list of explicitly
allowed origins.
check_origin: ["https://example.com",
"//another.com:888", "//other.com"]
Note: To connect from a native app be sure to either have the native app
set an origin or allow any origin via `check_origin: false`
* `:code_reloader` - enable or disable the code reloader. Defaults to your
endpoint configuration
* `:connect_info` - a list of keys that represent data to be copied from
the transport to be made available in the user socket `connect/3` callback
The valid keys are:
* `:peer_data` - the result of `Plug.Conn.get_peer_data/1`
* `:x_headers` - all request headers that have an "x-" prefix
* `:uri` - a `%URI{}` with information from the conn
For example:
socket "/socket", AppWeb.UserSocket,
websocket: [
connect_info: [:peer_data, :x_headers, :uri]
]
## Websocket configuration
The following configuration applies only to `:websocket`.
* `:timeout` - the timeout for keeping websocket connections
open after it last received data, defaults to 60_000ms
## Longpoll configuration
The following configuration applies only to `:longpoll`:
* `:window_ms` - how long the client can wait for new messages
in its poll request
* `:pubsub_timeout_ms` - how long a request can wait for the
pubsub layer to respond
* `:crypto` - options for verifying and signing the token, accepted
by `Phoenix.Token`. By default tokens are valid for 2 weeks
"""
defmacro socket(path, module, opts \\ []) do
# Tear the alias to simply store the root in the AST.
# This will make Elixir unable to track the dependency
# between endpoint <-> socket and avoid recompiling the
# endpoint (alongside the whole project ) whenever the
# socket changes.
module = tear_alias(module)
quote do
@phoenix_sockets {unquote(path), unquote(module), unquote(opts)}
end
end
@doc """
Instruments the given function using the instrumentation provided by
the given endpoint.
To specify the endpoint that will provide instrumentation, the first argument
can be:
* a module name - the endpoint itself
* a `Plug.Conn` struct - this macro will look for the endpoint module in the
`:private` field of the connection; if it's not there, `fun` will be
executed with no instrumentation
* a `Phoenix.Socket` struct - this macro will look for the endpoint module in the
`:endpoint` field of the socket; if it's not there, `fun` will be
executed with no instrumentation
Usually, users should prefer to instrument events using the `c:instrument/3`
macro defined in every Phoenix endpoint. This macro should only be used for
cases when the endpoint is dynamic and not known at compile time.
## Examples
endpoint = MyApp.Endpoint
Phoenix.Endpoint.instrument endpoint, :render_view, fn -> ... end
"""
defmacro instrument(endpoint_or_conn_or_socket, event, runtime \\ Macro.escape(%{}), fun) do
compile = Phoenix.Endpoint.Instrument.strip_caller(__CALLER__) |> Macro.escape()
quote do
case Phoenix.Endpoint.Instrument.extract_endpoint(unquote(endpoint_or_conn_or_socket)) do
nil -> unquote(fun).()
endpoint -> endpoint.instrument(unquote(event), unquote(compile), unquote(runtime), unquote(fun))
end
end
end
@doc """
Checks if Endpoint's web server has been configured to start.
* `otp_app` - The OTP app running the endpoint, for example `:my_app`
* `endpoint` - The endpoint module, for example `MyApp.Endpoint`
## Examples
iex> Phoenix.Endpoint.server?(:my_app, MyApp.Endpoint)
true
"""
def server?(otp_app, endpoint) when is_atom(otp_app) and is_atom(endpoint) do
Phoenix.Endpoint.Supervisor.server?(otp_app, endpoint)
end
defp tear_alias({:__aliases__, meta, [h|t]}) do
alias = {:__aliases__, meta, [h]}
quote do
Module.concat([unquote(alias)|unquote(t)])
end
end
defp tear_alias(other), do: other
end
| 46.130123 | 10,138 | 0.74191 |
9e36486ee4cff7214fbbbdabe8a75349cac2436e | 1,465 | ex | Elixir | clients/data_migration/lib/google_api/data_migration/v1beta1/model/vm_selection_config.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/data_migration/lib/google_api/data_migration/v1beta1/model/vm_selection_config.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/data_migration/lib/google_api/data_migration/v1beta1/model/vm_selection_config.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DataMigration.V1beta1.Model.VmSelectionConfig do
@moduledoc """
VM selection configuration message
## Attributes
* `vmZone` (*type:* `String.t`, *default:* `nil`) - Required. The Google Cloud Platform zone the VM is located.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:vmZone => String.t()
}
field(:vmZone)
end
defimpl Poison.Decoder, for: GoogleApi.DataMigration.V1beta1.Model.VmSelectionConfig do
def decode(value, options) do
GoogleApi.DataMigration.V1beta1.Model.VmSelectionConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DataMigration.V1beta1.Model.VmSelectionConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.170213 | 115 | 0.745392 |
9e3679d9311c39b1312d6c312e4fb312c6d9db18 | 588 | exs | Elixir | demo/sitBRU_Demo/test/views/error_view_test.exs | ceedee666/sitbru_2016 | b23b4a81a1daec87801c6435ff04d32be043e0b9 | [
"MIT"
] | null | null | null | demo/sitBRU_Demo/test/views/error_view_test.exs | ceedee666/sitbru_2016 | b23b4a81a1daec87801c6435ff04d32be043e0b9 | [
"MIT"
] | null | null | null | demo/sitBRU_Demo/test/views/error_view_test.exs | ceedee666/sitbru_2016 | b23b4a81a1daec87801c6435ff04d32be043e0b9 | [
"MIT"
] | null | null | null | defmodule SitBRU_Demo.ErrorViewTest do
use SitBRU_Demo.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(SitBRU_Demo.ErrorView, "404.html", []) ==
"Page not found"
end
test "render 500.html" do
assert render_to_string(SitBRU_Demo.ErrorView, "500.html", []) ==
"Server internal error"
end
test "render any other" do
assert render_to_string(SitBRU_Demo.ErrorView, "505.html", []) ==
"Server internal error"
end
end
| 26.727273 | 69 | 0.687075 |
9e36856550579601f7e146184fd71ded3aa133ad | 390 | ex | Elixir | web/concerns/company_concern.ex | simwms/apiv4 | c3da7407eaf3580b759f49726028439b4b8ea9d0 | [
"MIT"
] | 2 | 2016-02-25T20:12:35.000Z | 2018-01-03T00:03:12.000Z | web/concerns/company_concern.ex | simwms/apiv4 | c3da7407eaf3580b759f49726028439b4b8ea9d0 | [
"MIT"
] | 1 | 2016-01-11T04:50:39.000Z | 2016-01-12T05:00:08.000Z | web/concerns/company_concern.ex | simwms/apiv4 | c3da7407eaf3580b759f49726028439b4b8ea9d0 | [
"MIT"
] | null | null | null | defmodule Apiv4.CompanyConcern do
import Ecto.Changeset
alias Apiv4.Repo
alias Apiv4.Company
def punch_company_name(changeset) do
changeset
|> get_field(:company_id)
|> case do
nil -> nil
id -> Repo.get(Company, id)
end
|> case do
nil -> changeset
%{name: name} ->
changeset |> put_change(:company_name, name)
end
end
end | 20.526316 | 52 | 0.623077 |
9e369c216368a9baac9efaf4169b41a9c7253655 | 4,049 | ex | Elixir | lib/ex_dimensions/math.ex | fast-radius/ex_dimensions | fe2c548d363cdce2dad9152e6ebc19fb0166efa3 | [
"BSD-3-Clause"
] | 5 | 2019-11-23T23:51:47.000Z | 2021-12-08T20:48:01.000Z | lib/ex_dimensions/math.ex | fast-radius/ex_dimensions | fe2c548d363cdce2dad9152e6ebc19fb0166efa3 | [
"BSD-3-Clause"
] | 2 | 2020-03-03T18:53:33.000Z | 2021-11-16T17:54:24.000Z | lib/ex_dimensions/math.ex | fast-radius/ex_dimensions | fe2c548d363cdce2dad9152e6ebc19fb0166efa3 | [
"BSD-3-Clause"
] | null | null | null | defmodule ExDimensions.Math do
@moduledoc """
This module contains the required macros to perform math on unit quantities.
It must be `use`d in the current scope for the math functions to work properly.
Math on standard values will still work as expected if this module is `use`d.
Using these math functions will ensure proper dimensional analysis is performed
on quantities. The following rules are enforced:
* Quantities with different units may not be added or subtracted
* Quantities may not be added or subtracted with plain scalar numbers
* Quantities may be multiplied and divided with scalars
* Quantities with different units that are multiplied and divided will have
their units changed as needed
Any math operations that violate these rules will result in an ArithmeticError
that will bubble up to the caller.
"""
@moduledoc since: "0.1.0"
defmacro __using__(_opts) do
quote do
import Kernel,
except: [
{:+, 2},
{:-, 2},
{:*, 2},
{:/, 2},
{:<, 2},
{:<=, 2},
{:>, 2},
{:>=, 2},
{:==, 2}
]
def %{value: v1, units: u, denom: d} + %{value: v2, units: u, denom: d} do
%ExDimensions.Quantity{value: v1 + v2, units: u, denom: d}
end
def %{value: v1, units: u, denom: d} - %{value: v2, units: u, denom: d} do
%ExDimensions.Quantity{value: v1 - v2, units: u, denom: d}
end
def %{value: v1, units: u, denom: d} * x when is_number(x) do
%ExDimensions.Quantity{value: v1 * x, units: u, denom: d}
end
def %{value: v1, units: u, denom: []} *
%{value: v2, units: u2, denom: []} do
%ExDimensions.Quantity{value: v1 * v2, units: u ++ u2, denom: []}
end
def %{value: v1, units: u, denom: d} *
%{value: v2, units: u2, denom: d2} do
%ExDimensions.Quantity{
value: v1 * v2,
units: u ++ u2,
denom: d ++ d2
}
end
def %{value: v1, units: u, denom: d} / x when is_number(x) do
%ExDimensions.Quantity{value: v1 / x, units: u, denom: d}
end
def %{value: v1, units: [u | u_rest], denom: []} /
%{value: v2, units: [u | u_rest], denom: []} do
v1 / v2
end
def %{value: v1, units: u, denom: []} / %{value: v2, units: u2, denom: []} do
{units, denom} = cancel_units(u, u2)
%ExDimensions.Quantity{value: v1 / v2, units: units, denom: denom}
end
def %{value: v1, units: u, denom: d} > %{value: v2, units: u, denom: d} do
v1 > v2
end
def %{value: v1, units: u, denom: d} >= %{value: v2, units: u, denom: d} do
v1 >= v2
end
def %{value: v1, units: u, denom: d} < %{value: v2, units: u, denom: d} do
v1 < v2
end
def %{value: v1, units: u, denom: d} <= %{value: v2, units: u, denom: d} do
v1 <= v2
end
def %{value: v1, units: u, denom: d} == %{value: v2, units: u, denom: d} do
v1 == v2
end
defp cancel_units(u1, u2) do
u2
|> Enum.reduce({u1, u2}, fn u, {num, denom} ->
if Enum.member?(num, u) do
{List.delete(num, u), List.delete(denom, u)}
else
{num, denom}
end
end)
end
def u ^^^ num do
List.duplicate(u, num)
end
def left + right do
Kernel.+(left, right)
end
def left - right do
Kernel.-(left, right)
end
def left * right do
Kernel.*(left, right)
end
def left / right do
Kernel./(left, right)
end
def left < right do
Kernel.<(left, right)
end
def left <= right do
Kernel.<=(left, right)
end
def left > right do
Kernel.>(left, right)
end
def left >= right do
Kernel.>=(left, right)
end
def left == right do
Kernel.==(left, right)
end
end
end
end
| 27.174497 | 83 | 0.527538 |
9e36a05254957c0e2aa285c4e8dc7fa5ea228515 | 10,209 | ex | Elixir | lib/farmbot/farm_event/manager.ex | defcon201/farmbot_os | acc22702afbb13be461c9d80591604958117ff75 | [
"MIT"
] | null | null | null | lib/farmbot/farm_event/manager.ex | defcon201/farmbot_os | acc22702afbb13be461c9d80591604958117ff75 | [
"MIT"
] | null | null | null | lib/farmbot/farm_event/manager.ex | defcon201/farmbot_os | acc22702afbb13be461c9d80591604958117ff75 | [
"MIT"
] | 1 | 2020-12-16T16:39:32.000Z | 2020-12-16T16:39:32.000Z | defmodule Farmbot.FarmEvent.Manager do
@moduledoc """
Manages execution of FarmEvents.
## Rules for FarmEvent execution.
* Regimen
* ignore `end_time`.
* ignore calendar.
* if start_time is more than 60 seconds passed due, assume it already started, and don't start it again.
* Sequence
* if `start_time` is late, check the calendar.
* for each item in the calendar, check if it's event is more than 60 seconds in the past. if not, execute it.
* if there is only one event in the calendar, ignore the `end_time`
"""
# credo:disable-for-this-file Credo.Check.Refactor.FunctionArity
use GenServer
use Farmbot.Logger
alias Farmbot.FarmEvent.Execution
alias Farmbot.Asset
alias Farmbot.Asset.{FarmEvent, Sequence, Regimen}
# @checkup_time 100
@checkup_time 30_000
def register_events(event_list) do
GenServer.call(__MODULE__, {:register_events, event_list}, 10_000)
end
## GenServer
defmodule State do
@moduledoc false
defstruct [timer: nil, last_time_index: %{}, events: [], checkup: nil]
end
@doc false
def start_link do
GenServer.start_link(__MODULE__, [], [name: __MODULE__])
end
def init([]) do
send self(), :checkup
{:ok, struct(State)}
end
def terminate(reason, _state) do
Logger.error 1, "FarmEvent Manager terminated: #{inspect reason}"
end
def handle_call({:register_events, events}, _, state) do
maybe_farm_event_log "Reindexed FarmEvents"
if match?({_, _}, state.checkup) do
Process.exit(state.checkup |> elem(0), {:success, %{state | events: events}})
end
if state.timer do
Process.cancel_timer(state.timer)
timer = Process.send_after(self(), :checkup, @checkup_time)
{:reply, :ok, %{state | events: events, timer: timer}}
else
{:reply, :ok, %{state | events: events}}
end
end
def handle_info(:checkup, state) do
checkup = spawn_monitor __MODULE__, :async_checkup, [self(), state]
{:noreply, %{state | timer: nil, checkup: checkup}}
end
def handle_info({:DOWN, _, :process, _, {:success, new_state}}, _old_state) do
timer = Process.send_after(self(), :checkup, @checkup_time)
{:noreply, %{new_state | timer: timer, checkup: nil}}
end
def handle_info({:DOWN, _, :process, _, error}, state) do
Logger.error 1, "Farmevent checkup process died: #{inspect error}"
timer = Process.send_after(self(), :checkup, @checkup_time)
{:noreply, %{state | timer: timer, checkup: nil}}
end
def async_checkup(_manager, state) do
now = get_now()
# maybe_farm_event_log "Rebuilding calendar."
all_events = Enum.map(state.events, &FarmEvent.build_calendar(&1))
# maybe_farm_event_log "Rebuilding calendar complete."
# do checkup is the bulk of the work.
{late_events, new} = do_checkup(all_events, now, state)
#TODO(Connor) Conditionally start events based on some state info.
unless Enum.empty?(late_events) do
Logger.debug 3, "Time for events: #{inspect late_events} to run at: #{now.hour}:#{now.minute}"
start_events(late_events, now)
end
exit({:success, %{new | events: all_events}})
end
defp do_checkup(list, time, late_events \\ [], state)
defp do_checkup([], _now, late_events, state), do: {late_events, state}
defp do_checkup([farm_event | rest], now, late_events, state) do
# new_late will be a executable event (Regimen or Sequence.)
{new_late_event, last_time} = check_event(farm_event, now, state.last_time_index[farm_event.id])
# update state.
new_state = %{state | last_time_index: Map.put(state.last_time_index, farm_event.id, last_time)}
case new_late_event do
# if `new_late_event` is nil, don't accumulate it.
nil -> do_checkup(rest, now, late_events, new_state)
# if there is a new event, accumulate it.
event -> do_checkup(rest, now, [event | late_events], new_state)
end
end
defp check_event(%FarmEvent{} = f, now, last_time) do
# Get the executable out of the database this may fail.
mod = Module.safe_concat([f.executable_type])
event = lookup!(mod, f.executable_id)
# build a local start time and end time
start_time = Timex.parse! f.start_time, "{ISO:Extended}"
end_time = Timex.parse! f.end_time, "{ISO:Extended}"
# start_time = f.start_time
# end_time = f.end_time
# get local bool of if the event is started and finished.
started? = Timex.after? now, start_time
finished? = Timex.after? now, end_time
case mod do
Regimen -> maybe_start_regimen(started?, start_time, last_time, event, now)
Sequence -> maybe_start_sequence(started?, finished?, f, last_time, event, now)
end
end
defp maybe_start_regimen(started?, start_time, last_time, event, now)
defp maybe_start_regimen(true = _started?, start_time, last_time, event, now) do
case is_too_old?(now, start_time) do
true ->
maybe_farm_event_log "regimen #{event.name} (#{event.id}) is too old to start or already started."
{nil, last_time}
false ->
maybe_farm_event_log "regimen #{event.name} (#{event.id}) starting."
{event, now}
end
end
defp maybe_start_regimen(false = _started?, start_time, last_time, event, _) do
maybe_farm_event_log "regimen #{event.name} (#{event.id}) is not started yet. (#{inspect start_time}) (#{inspect Timex.now()})"
{nil, last_time}
end
defp lookup!(module, sr_id) when is_atom(module) and is_number(sr_id) do
case module do
Sequence -> Asset.get_sequence_by_id!(sr_id)
Regimen -> Asset.get_regimen_by_id!(sr_id)
_ -> raise "unknown executable type: #{module}"
end
end
# signals the start of a sequence based on the described logic.
defp maybe_start_sequence(started?, finished?, farm_event, last_time, event, now)
# We only want to check if the sequence is started, and not finished.
defp maybe_start_sequence(true = _started?, false = _finished?, farm_event, last_time, event, now) do
{run?, next_time} = should_run_sequence?(farm_event.calendar, last_time, now)
case run? do
true -> {event, next_time}
false -> {nil, last_time}
end
end
# if `farm_event.time_unit` is "never" we can't use the `end_time`.
# if we have no `last_time`, time to execute.
defp maybe_start_sequence(true = _started?, _, %{time_unit: "never"} = f, nil = _last_time, event, now) do
maybe_farm_event_log "Ignoring end_time."
case should_run_sequence?(f.calendar, nil, now) do
{true, next} -> {event, next}
{false, _} -> {nil, nil}
end
end
# if started is false, the event isn't ready to be executed.
defp maybe_start_sequence(false = _started?, _fin, _farm_event, last_time, event, _now) do
maybe_farm_event_log "sequence #{event.name} (#{event.id}) is not started yet."
{nil, last_time}
end
# if the event is finished (but not a "never" time_unit), we don't execute.
defp maybe_start_sequence(_started?, true = _finished?, _farm_event, last_time, event, _now) do
maybe_farm_event_log "sequence #{event.name} (#{event.id}) is finished."
{nil, last_time}
end
# Checks if we shoudl run a sequence or not. returns {event | nil, time | nil}
defp should_run_sequence?(calendar, last_time, now)
# if there is no last time, check if time is passed now within 60 seconds.
defp should_run_sequence?([first_time | _], nil, now) do
maybe_farm_event_log "Checking sequence event that hasn't run before #{first_time}"
# convert the first_time to a DateTime
dt = Timex.parse! first_time, "{ISO:Extended}"
# if now is after the time, we are in fact late
if Timex.after?(now, dt) do
{true, now}
else
# make sure to return nil as the last time because it stil hasnt executed yet.
maybe_farm_event_log "Sequence Event not ready yet."
{false, nil}
end
end
defp should_run_sequence?(nil, last_time, now) do
maybe_farm_event_log "Checking sequence with no calendar."
if is_nil(last_time) do
{true, now}
else
{false, last_time}
end
end
defp should_run_sequence?(calendar, last_time, now) do
# get rid of all the items that happened before last_time
filtered_calendar = Enum.filter(calendar, fn(iso_time) ->
dt = Timex.parse! iso_time, "{ISO:Extended}"
# we only want this time if it happened after the last_time
Timex.after?(dt, last_time)
end)
# if after filtering, there are events that need to be run
# check if they are older than a minute ago,
case filtered_calendar do
[iso_time | _] ->
dt = Timex.parse! iso_time, "{ISO:Extended}"
if Timex.after?(now, dt) do
{true, dt}
# too_old? = is_too_old?(now, dt)
# if too_old?, do: {false, last_time}, else: {true, dt}
else
maybe_farm_event_log "Sequence Event not ready yet."
{false, dt}
end
[] ->
maybe_farm_event_log "No items in calendar."
{false, last_time}
end
end
# Enumeration is complete.
defp start_events([], _now), do: :ok
# Enumerate the events to be started.
defp start_events([event | rest], now) do
# Spawn to be non blocking here. Maybe link to this process?
spawn fn() -> Execution.execute_event(event, now) end
# Continue enumeration.
start_events(rest, now)
end
# is then more than 1 minute in the past?
defp is_too_old?(now, then) do
time_str_fun = fn(dt) -> "#{dt.hour}:#{dt.minute}:#{dt.second}" end
seconds = DateTime.to_unix(now, :second) - DateTime.to_unix(then, :second)
c = seconds > 60 # not in MS here
maybe_farm_event_log "is checking #{time_str_fun.(now)} - #{time_str_fun.(then)} = #{seconds} seconds ago. is_too_old? => #{c}"
c
end
defp get_now(), do: Timex.now()
defp maybe_farm_event_log(message) do
if Application.get_env(:farmbot, :farm_event_debug_log) do
Logger.debug 3, message
else
:ok
end
end
@doc "Enable or disbale debug logs for farmevents."
def debug_logs(bool \\ true) when is_boolean(bool) do
Application.put_env(:farmbot, :farm_event_debug_log, bool)
end
end
| 35.447917 | 131 | 0.674209 |
9e36a2434555f26db8a7012aaf3f963fccb9fb19 | 459 | ex | Elixir | lib/subspace_product_api/model/v1_transport_type.ex | subspace-com/subspace_elixir_openapi_client | 9e52d31edb228de2d93f2f988c98207106241a32 | [
"MIT"
] | null | null | null | lib/subspace_product_api/model/v1_transport_type.ex | subspace-com/subspace_elixir_openapi_client | 9e52d31edb228de2d93f2f988c98207106241a32 | [
"MIT"
] | null | null | null | lib/subspace_product_api/model/v1_transport_type.ex | subspace-com/subspace_elixir_openapi_client | 9e52d31edb228de2d93f2f988c98207106241a32 | [
"MIT"
] | null | null | null | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule SubspaceProductAPI.Model.V1TransportType do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
]
@type t :: %__MODULE__{
}
end
defimpl Poison.Decoder, for: SubspaceProductAPI.Model.V1TransportType do
def decode(value, _options) do
value
end
end
| 17.653846 | 91 | 0.703704 |
9e36a713f5c04bafbb2ad54757d70c1e938d4cf0 | 697 | ex | Elixir | apps/bbq_ui/web/gettext.ex | easco/ex_bbq | d736204bb124ea256907629f5025c3abaf08c0bb | [
"BSD-3-Clause"
] | 2 | 2016-09-22T13:32:35.000Z | 2017-02-17T20:26:50.000Z | apps/bbq_ui/web/gettext.ex | easco/ex_bbq | d736204bb124ea256907629f5025c3abaf08c0bb | [
"BSD-3-Clause"
] | null | null | null | apps/bbq_ui/web/gettext.ex | easco/ex_bbq | d736204bb124ea256907629f5025c3abaf08c0bb | [
"BSD-3-Clause"
] | null | null | null | defmodule BbqUi.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import BbqUi.Gettext
# Simple translation
gettext "Here is the string to translate"
# Plural translation
ngettext "Here is the string to translate",
"Here are the strings to translate",
3
# Domain-based translation
dgettext "errors", "Here is the error message to translate"
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :bbq_ui
end
| 27.88 | 72 | 0.675753 |
9e36be1cccab28433ada802feef6749d43451de9 | 890 | ex | Elixir | lib/okr_app/analytics/okr_load_query.ex | sb8244/okr_app_pub | 933872107bd13390a0a5ea119d7997d4cb5ea7db | [
"MIT"
] | 12 | 2019-05-10T21:48:06.000Z | 2021-11-07T14:04:30.000Z | lib/okr_app/analytics/okr_load_query.ex | sb8244/okr_app_pub | 933872107bd13390a0a5ea119d7997d4cb5ea7db | [
"MIT"
] | 2 | 2019-05-14T19:07:10.000Z | 2019-05-20T21:06:27.000Z | lib/okr_app/analytics/okr_load_query.ex | sb8244/okr_app_pub | 933872107bd13390a0a5ea119d7997d4cb5ea7db | [
"MIT"
] | 3 | 2019-05-19T18:24:20.000Z | 2019-10-31T20:29:12.000Z | defmodule OkrApp.Analytics.OkrLoadQuery do
alias OkrApp.Repo
import Ecto.Query
alias OkrApp.Analytics.{AnalyticsEvent}
def distinct_okr_views(owner: %{id: owner_id}, days: days) when is_integer(days) do
date = Timex.now() |> Timex.shift(days: -days)
from(
a in AnalyticsEvent,
where: a.type == "okr_view",
where: a.inserted_at > ^date,
where: fragment("?->>'owner_id' = ?", a.metadata, ^owner_id),
select: count(a.user_id, :distinct)
)
|> Repo.one()
end
def total_okr_views(owner: %{id: owner_id}, days: days) when is_integer(days) do
date = Timex.now() |> Timex.shift(days: -days)
from(
a in AnalyticsEvent,
where: a.type == "okr_view",
where: a.inserted_at > ^date,
where: fragment("?->>'owner_id' = ?", a.metadata, ^owner_id),
select: count(a.user_id)
)
|> Repo.one()
end
end
| 26.969697 | 85 | 0.620225 |
9e36d5a837b9ff2cf446be87cf7a7c2c31830be8 | 4,966 | ex | Elixir | lib/elixir_script/compiler.ex | beadsland/elixirscript | cb9698ad96075fcbe87b3933009d7ab2a2c939de | [
"MIT"
] | 854 | 2017-02-19T01:50:45.000Z | 2022-03-14T18:55:38.000Z | lib/elixir_script/compiler.ex | beadsland/elixirscript | cb9698ad96075fcbe87b3933009d7ab2a2c939de | [
"MIT"
] | 210 | 2017-02-20T17:44:39.000Z | 2020-08-01T10:18:07.000Z | lib/elixir_script/compiler.ex | beadsland/elixirscript | cb9698ad96075fcbe87b3933009d7ab2a2c939de | [
"MIT"
] | 56 | 2017-02-19T14:50:05.000Z | 2022-02-25T17:25:30.000Z | defmodule ElixirScript.Compiler do
@moduledoc """
The entry point for the ElixirScript compilation process.
Takes the given module(s) and compiles them and all modules
and functions they use into JavaScript.
Will also take a path to Elixir files
"""
@doc """
Takes either a module name, list of module names, or a path as
the entry point(s) of an application/library. From there
it will determine which modules and functions are needed
to be compiled.
Available options are:
* `output`: The path of the generated JavaScript file.
If output is `nil`, then generated code is sent to standard out
If output is a path, the generated code placed in that path.
If path ends in `.js` then that will be the name of the file.
If a directory is given, file will be named `elixirscript.build.js`
* `root`: Optional root for imports of FFI JavaScript modules. Defaults to `.`.
"""
alias ElixirScript.{
State,
Translate,
FindUsedModules,
FindUsedFunctions,
Output
}
alias ElixirScript.ModuleSystems.ES
alias Kernel.ParallelCompiler
@type compiler_input ::
atom
| [atom]
| binary
@spec compile(compiler_input, []) :: map
def compile(path, opts \\ [])
def compile(path, opts) when is_binary(path) do
opts = build_compiler_options(opts)
{:ok, pid} = State.start_link(opts)
path =
if String.ends_with?(path, [".ex", ".exs"]) do
path
else
Path.join([path, "**", "*.{ex,exs}"])
end
files = Path.wildcard(path)
ParallelCompiler.compile(files, each_module: &on_module_compile(pid, &1, &2, &3))
entry_modules =
pid
|> State.get_in_memory_modules()
|> Keyword.keys()
do_compile(entry_modules, pid, opts)
end
def compile(entry_modules, opts) do
opts = build_compiler_options(opts)
{:ok, pid} = State.start_link(opts)
entry_modules = List.wrap(entry_modules)
do_compile(entry_modules, pid, opts)
end
defp do_compile(entry_modules, pid, opts) do
FindUsedModules.execute(entry_modules, pid)
if opts.remove_unused_functions do
FindUsedFunctions.execute(entry_modules, pid)
end
modules = State.list_modules(pid)
Translate.execute(modules, pid)
modules = State.list_modules(pid)
result = Output.execute(modules, pid, opts)
State.stop(pid)
transform_output(modules, result, opts)
end
defp build_compiler_options(opts) do
remove_used_functions? = Keyword.get(opts, :remove_unused_functions, true)
default_options =
Map.new()
|> Map.put(:output, Keyword.get(opts, :output))
|> Map.put(:format, :es)
|> Map.put(:root, Keyword.get(opts, :root, "."))
|> Map.put(:remove_unused_functions, remove_used_functions?)
options = default_options
Map.put(options, :module_formatter, ES)
end
defp on_module_compile(pid, _file, module, beam) do
State.put_in_memory_module(pid, module, beam)
end
defp transform_output(modules, compiled_js, opts) do
output_path =
cond do
opts.output == nil or opts.output == :stdout ->
""
File.dir?(opts.output) ->
opts.output
true ->
Path.dirname(opts.output)
end
data = %{
ElixirScript.Core => %{
references: [],
last_modified: nil,
beam_path: nil,
source: nil,
js_path: Path.join(output_path, "ElixirScript.Core.js"),
diagnostics: [],
js_code: nil,
type: :ffi
}
}
Enum.reduce(modules, data, fn {module, info}, current_data ->
diagnostics =
Map.get(info, :diagnostics, [])
|> Enum.map(fn x ->
Map.put(x, :file, Map.get(info, :file))
end)
info = %{
references: Map.get(info, :used_modules, []),
last_modified: Map.get(info, :last_modified, nil),
beam_path: Map.get(info, :beam_path),
source: Map.get(info, :file),
js_path: Path.join(output_path, "#{module}.js"),
diagnostics: diagnostics
}
info =
case Keyword.get(compiled_js, module) do
[js_input_path, js_output_path] ->
last_modified =
case File.stat(js_input_path, time: :posix) do
{:ok, file_info} ->
file_info.mtime
_ ->
nil
end
info
|> Map.put(:last_modified, last_modified)
|> Map.put(:beam_path, nil)
|> Map.put(:source, js_input_path)
|> Map.put(:js_path, js_output_path)
|> Map.put(:js_code, nil)
|> Map.put(:type, :ffi)
js_code ->
info
|> Map.put(:js_path, Path.join(output_path, "#{module}.js"))
|> Map.put(:js_code, js_code)
|> Map.put(:type, :module)
end
Map.put(current_data, module, info)
end)
end
end
| 26.55615 | 85 | 0.60733 |
9e36f9db3cddad21338b49c38ae3c24e98474c59 | 502 | exs | Elixir | test/controllers/page_controller_test.exs | zcdunn/elixirstatus-web | 9df758dce01c676403effdeb3231db0529285e6c | [
"MIT"
] | 299 | 2015-06-24T09:14:27.000Z | 2022-03-03T13:31:59.000Z | test/controllers/page_controller_test.exs | zcdunn/elixirstatus-web | 9df758dce01c676403effdeb3231db0529285e6c | [
"MIT"
] | 63 | 2015-07-04T19:42:12.000Z | 2021-12-10T14:27:28.000Z | test/controllers/page_controller_test.exs | zcdunn/elixirstatus-web | 9df758dce01c676403effdeb3231db0529285e6c | [
"MIT"
] | 49 | 2015-07-06T13:42:43.000Z | 2021-12-22T21:38:04.000Z | defmodule ElixirStatus.PageControllerTest do
use ElixirStatus.ConnCase
use ElixirStatus.ConnLoginHelper
test "GET /" do
conn = get(build_conn(), "/")
assert html_response(conn, 200) =~ "elixirstatus"
end
test "GET / (logged in)" do
conn =
logged_in_conn()
|> get("/")
assert html_response(conn, 200)
end
@tag logged_in: true
test "GET /about (logged in)" do
conn =
logged_in_conn()
|> get("/about")
assert logged_in?(conn)
end
end
| 18.592593 | 53 | 0.631474 |
9e3716bed9a83dd00335ada4f8ffc55bbf81cc73 | 2,711 | ex | Elixir | lib/avrora/codec/object_container_file.ex | MichalDolata/avrora | 6c025da91530dff70714bdab18ceacbe432e84b5 | [
"MIT"
] | null | null | null | lib/avrora/codec/object_container_file.ex | MichalDolata/avrora | 6c025da91530dff70714bdab18ceacbe432e84b5 | [
"MIT"
] | null | null | null | lib/avrora/codec/object_container_file.ex | MichalDolata/avrora | 6c025da91530dff70714bdab18ceacbe432e84b5 | [
"MIT"
] | null | null | null | defmodule Avrora.Codec.ObjectContainerFile do
@moduledoc """
An Avro encoder/decoder working with an Object Container File formatted Avro messages.
It works with a binary format, which includes a required schema inside the message.
See more about [Object Container File](https://avro.apache.org/docs/1.8.1/spec.html#Object+Container+Files).
"""
@behaviour Avrora.Codec
@magic_bytes <<"Obj", 1>>
@meta_schema_key "avro.schema"
require Logger
alias Avrora.{AvroDecoderOptions, Codec, Config, Mapper, Resolver, Schema}
@impl true
def is_decodable(payload) when is_binary(payload) do
case payload do
<<@magic_bytes, _::binary>> -> true
_ -> false
end
end
@impl true
def extract_schema(payload) when is_binary(payload) do
with {:ok, {headers, {_, _, _, _, _, _, full_name, _} = erlavro, _}} <- do_decode(payload),
{:ok, nil} <- memory_storage().get(full_name),
{:ok, json} <- extract_json_schema(headers),
{:ok, schema} <- Schema.from_erlavro(erlavro, json: json) do
memory_storage().put(full_name, schema)
end
end
@impl true
def decode(payload) when is_binary(payload) do
with {:ok, {_, _, decoded}} <- do_decode(payload), do: {:ok, Mapper.to_map(decoded)}
end
@impl true
def decode(payload, schema: _schema) when is_binary(payload) do
Logger.warn("message already contains embeded schema, given schema will be ignored")
decode(payload)
end
@impl true
def encode(payload, schema: schema) when is_map(payload) do
with {:ok, schema} <- resolve(schema),
{:ok, body} <- Codec.Plain.encode(payload, schema: schema),
{:ok, schema} <- Schema.to_erlavro(schema) do
do_encode(body, schema)
end
end
defp resolve(schema) do
cond do
is_binary(schema.full_name) && is_reference(schema.lookup_table) -> {:ok, schema}
is_binary(schema.full_name) -> Resolver.resolve(schema.full_name)
true -> {:error, :unusable_schema}
end
end
defp do_decode(payload) do
{:ok, :avro_ocf.decode_binary(payload, AvroDecoderOptions.options())}
rescue
MatchError -> {:error, :schema_mismatch}
error -> {:error, error}
end
defp do_encode(payload, schema) do
encoded =
schema
|> :avro_ocf.make_header()
|> :avro_ocf.make_ocf(List.wrap(payload))
|> :erlang.list_to_binary()
{:ok, encoded}
rescue
error -> {:error, error}
end
defp extract_json_schema(headers) do
with {_, _, meta, _} <- headers,
{@meta_schema_key, json} <- Enum.find(meta, fn {key, _} -> key == @meta_schema_key end) do
{:ok, json}
end
end
defp memory_storage, do: Config.self().memory_storage()
end
| 30.122222 | 110 | 0.663224 |
9e37396e1474dba7737289a62f9948cccd6b493a | 2,781 | ex | Elixir | lib/mix/tasks/ecto.migrations.ex | hauleth/ecto_sql | 1d7f4b73bfa04e02a26bba8b3ea79a457850af0f | [
"Apache-2.0"
] | 1 | 2020-10-18T09:11:56.000Z | 2020-10-18T09:11:56.000Z | lib/mix/tasks/ecto.migrations.ex | hauleth/ecto_sql | 1d7f4b73bfa04e02a26bba8b3ea79a457850af0f | [
"Apache-2.0"
] | 3 | 2021-06-20T14:51:14.000Z | 2021-06-25T00:56:11.000Z | lib/mix/tasks/ecto.migrations.ex | hauleth/ecto_sql | 1d7f4b73bfa04e02a26bba8b3ea79a457850af0f | [
"Apache-2.0"
] | 1 | 2019-05-31T02:43:25.000Z | 2019-05-31T02:43:25.000Z | defmodule Mix.Tasks.Ecto.Migrations do
use Mix.Task
import Mix.Ecto
import Mix.EctoSQL
@shortdoc "Displays the repository migration status"
@aliases [
r: :repo
]
@switches [
repo: [:keep, :string],
no_compile: :boolean,
no_deps_check: :boolean,
migrations_path: :keep
]
@moduledoc """
Displays the up / down migration status for the given repository.
The repository must be set under `:ecto_repos` in the
current app configuration or given via the `-r` option.
By default, migrations are expected at "priv/YOUR_REPO/migrations"
directory of the current application but it can be configured
by specifying the `:priv` key under the repository configuration.
If the repository has not been started yet, one will be
started outside our application supervision tree and shutdown
afterwards.
## Examples
mix ecto.migrations
mix ecto.migrations -r Custom.Repo
## Command line options
* `-r`, `--repo` - the repo to obtain the status for
* `--no-compile` - does not compile applications before running
* `--no-deps-check` - does not check dependencies before running
* `--migrations-path` - the path to load the migrations from, defaults to
`"priv/repo/migrations"`. This option may be given multiple times in which case the migrations
are loaded from all the given directories and sorted as if they were in the same one.
Note, if you have previously run migrations from e.g. paths `a/` and `b/`, and now run `mix
ecto.migrations --migrations-path a/` (omitting path `b/`), the migrations from the path
`b/` will be shown in the output as `** FILE NOT FOUND **`.
"""
@impl true
def run(args, migrations \\ &Ecto.Migrator.migrations/2, puts \\ &IO.puts/1) do
repos = parse_repo(args)
{opts, _} = OptionParser.parse! args, strict: @switches, aliases: @aliases
for repo <- repos do
ensure_repo(repo, args)
paths = ensure_migrations_paths(repo, opts)
case Ecto.Migrator.with_repo(repo, &migrations.(&1, paths), [mode: :temporary]) do
{:ok, repo_status, _} ->
puts.(
"""
Repo: #{inspect(repo)}
Status Migration ID Migration Name
--------------------------------------------------
""" <>
Enum.map_join(repo_status, "\n", fn {status, number, description} ->
" #{format(status, 10)}#{format(number, 16)}#{description}"
end) <> "\n"
)
{:error, error} ->
Mix.raise "Could not start repo #{inspect repo}, error: #{inspect error}"
end
end
:ok
end
defp format(content, pad) do
content
|> to_string
|> String.pad_trailing(pad)
end
end
| 29.903226 | 100 | 0.627113 |
9e374318e708ad1fad2c0abd0dc751a8ae50478e | 959 | ex | Elixir | test/support/channel_case.ex | myobie/post_register | 924a832d7fa1693d655c34b0295affa8b3275f26 | [
"MIT"
] | null | null | null | test/support/channel_case.ex | myobie/post_register | 924a832d7fa1693d655c34b0295affa8b3275f26 | [
"MIT"
] | null | null | null | test/support/channel_case.ex | myobie/post_register | 924a832d7fa1693d655c34b0295affa8b3275f26 | [
"MIT"
] | null | null | null | defmodule PostRegister.Web.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common datastructures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
# The default endpoint for testing
@endpoint PostRegister.Web.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(PostRegister.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(PostRegister.Repo, {:shared, self()})
end
:ok
end
end
| 25.236842 | 74 | 0.720542 |
9e374f7122e5c6dd1e5eb3715df2dc5cdfbae34f | 4,581 | exs | Elixir | apps/artemis/test/artemis/contexts/key_value/update_key_value_test.exs | artemis-platform/artemis_dashboard | 5ab3f5ac4c5255478bbebf76f0e43b44992e3cab | [
"MIT"
] | 9 | 2019-08-19T19:56:34.000Z | 2022-03-22T17:56:38.000Z | apps/artemis/test/artemis/contexts/key_value/update_key_value_test.exs | chrislaskey/artemis_teams | 9930c3d9528e37b76f0525390e32b66eed7eadde | [
"MIT"
] | 7 | 2019-07-12T21:41:01.000Z | 2020-08-17T21:29:22.000Z | apps/artemis/test/artemis/contexts/key_value/update_key_value_test.exs | chrislaskey/artemis_teams | 9930c3d9528e37b76f0525390e32b66eed7eadde | [
"MIT"
] | 2 | 2019-07-05T22:51:47.000Z | 2019-08-19T19:56:37.000Z | defmodule Artemis.UpdateKeyValueTest do
use Artemis.DataCase
import Artemis.Factories
alias Artemis.UpdateKeyValue
describe "call!" do
test "raises an exception when id not found" do
invalid_id = 50_000_000
params = params_for(:key_value)
assert_raise Artemis.Context.Error, fn ->
UpdateKeyValue.call!(invalid_id, params, Mock.system_user())
end
end
test "returns successfully when params are empty" do
key_value = insert(:key_value)
params = %{}
updated = UpdateKeyValue.call!(key_value, params, Mock.system_user())
assert updated.key == key_value.key
assert updated.value == key_value.value
end
test "updates a record when passed valid params" do
key_value = insert(:key_value)
params = params_for(:key_value)
assert key_value.key != params.key
assert key_value.value != params.value
updated = UpdateKeyValue.call!(key_value, params, Mock.system_user())
assert updated.key == params.key
assert updated.value == params.value
end
test "updates a record when passed an id and valid params" do
key_value = insert(:key_value)
params = params_for(:key_value)
assert key_value.key != params.key
assert key_value.value != params.value
updated = UpdateKeyValue.call!(key_value.id, params, Mock.system_user())
assert updated.key == params.key
assert updated.value == params.value
end
test "updates a record when passed a key and valid params" do
key_value = insert(:key_value)
params = params_for(:key_value)
assert key_value.key != params.key
assert key_value.value != params.value
updated = UpdateKeyValue.call!(key_value.key, params, Mock.system_user())
assert updated.key == params.key
assert updated.value == params.value
end
end
describe "call" do
test "returns an error when id not found" do
invalid_id = 50_000_000
params = params_for(:key_value)
{:error, _} = UpdateKeyValue.call(invalid_id, params, Mock.system_user())
end
test "returns successfully when params are empty" do
key_value = insert(:key_value)
params = %{}
{:ok, updated} = UpdateKeyValue.call(key_value, params, Mock.system_user())
assert updated.key == key_value.key
assert updated.value == key_value.value
end
test "updates a record when passed valid params" do
key_value = insert(:key_value)
params = params_for(:key_value)
assert key_value.key != params.key
assert key_value.value != params.value
{:ok, updated} = UpdateKeyValue.call(key_value, params, Mock.system_user())
assert updated.key == params.key
assert updated.value == params.value
end
test "updates a record when passed an id and valid params" do
key_value = insert(:key_value)
params = params_for(:key_value)
assert key_value.key != params.key
assert key_value.value != params.value
{:ok, updated} = UpdateKeyValue.call(key_value.id, params, Mock.system_user())
assert updated.key == params.key
assert updated.value == params.value
end
test "updates a record when passed a key and valid binary params" do
key_value = insert(:key_value)
params = params_for(:key_value, key: "hello", value: "world")
assert key_value.key != params.key
assert key_value.value != params.value
{:ok, updated} = UpdateKeyValue.call(key_value.key, params, Mock.system_user())
assert updated.key == params.key
assert updated.value == params.value
end
test "updates a record when passed a key and valid non-binary params" do
key_value = insert(:key_value)
params = params_for(:key_value, key: 'hello', value: %{hello: %{world: true}})
assert key_value.key != params.key
assert key_value.value != params.value
{:ok, updated} = UpdateKeyValue.call(key_value.key, params, Mock.system_user())
assert updated.key == params.key
assert updated.value == params.value
end
end
describe "broadcast" do
test "publishes event and record" do
ArtemisPubSub.subscribe(Artemis.Event.get_broadcast_topic())
key_value = insert(:key_value)
params = params_for(:key_value)
{:ok, updated} = UpdateKeyValue.call(key_value, params, Mock.system_user())
assert_received %Phoenix.Socket.Broadcast{
event: "key-value:updated",
payload: %{
data: ^updated
}
}
end
end
end
| 29.178344 | 85 | 0.666667 |
9e375f50d53cfc0c69c1de0497711dd13da08c82 | 4,443 | ex | Elixir | lib/ecto_enum.ex | altenwald/ecto_enum | add06bb6115d6055af3b6a3e905d8ceb6b5ea2ad | [
"MIT"
] | null | null | null | lib/ecto_enum.ex | altenwald/ecto_enum | add06bb6115d6055af3b6a3e905d8ceb6b5ea2ad | [
"MIT"
] | null | null | null | lib/ecto_enum.ex | altenwald/ecto_enum | add06bb6115d6055af3b6a3e905d8ceb6b5ea2ad | [
"MIT"
] | null | null | null | defmodule EctoEnum do
@moduledoc """
Provides `defenum/2` and `defenum/3` macro for defining an Enum Ecto type.
This module can also be `use`d to create an Ecto Enum like:
defmodule CustomEnum do
use EctoEnum, ready: 0, set: 1, go: 2
end
Or in place of using `EctoEnum.Postgres` like:
defmodule PostgresType do
use EctoEnum, type: :new_type, enums: [:ready, :set, :go]
end
The difference between the above two examples is that the previous one would use an
integer column in the database while the latter one would use a custom type in PostgreSQL.
Note that only PostgreSQL is supported for custom data types at the moment.
"""
@doc """
Defines an enum custom `Ecto.Type`.
For second argument, it accepts either a list of strings or a keyword list with keyword
values that are either strings or integers. Below are examples of a valid argument:
[registered: 0, active: 1, inactive: 2, archived: 3]
[registered: "registered", active: "active", inactive: "inactive", archived: "archived"]
["registered", "active", "inactive", "archived"]
It can be used like any other `Ecto.Type` by passing it to a field in your model's
schema block. For example:
import EctoEnum
defenum StatusEnum, registered: 0, active: 1, inactive: 2, archived: 3
defmodule User do
use Ecto.Schema
schema "users" do
field :status, StatusEnum
end
end
In the above example, the `:status` will behave like an enum and will allow you to
pass an `integer`, `atom` or `string` to it. This applies to saving the model,
invoking `Ecto.Changeset.cast/4`, or performing a query on the status field. Let's
do a few examples:
iex> user = Repo.insert!(%User{status: 0})
iex> Repo.get(User, user.id).status
:registered
iex> %{changes: changes} = cast(%User{}, %{"status" => "Active"}, ~w(status), [])
iex> changes.status
:active
iex> from(u in User, where: u.status == :registered) |> Repo.all() |> length
1
Passing an invalid value to a `Ecto.Changeset.cast/3` will add an error to `changeset.errors`
field.
iex> changeset = cast(%User{}, %{"status" => "retroactive"}, ~w(status), [])
iex> changeset.errors
[status: "is invalid"]
Passing an invalid value directly into a model struct will in an error when calling
`Repo` functions.
iex> Repo.insert!(%User{status: :none})
** (Ecto.ChangeError) `"none"` is not a valid enum value for `EctoEnumTest.StatusEnum`.
Valid enum values are `[0, 1, 2, 3, :registered, :active, :inactive, :archived, "active",
"archived", "inactive", "registered"]`
The enum type `StatusEnum` will also have a reflection function for inspecting the
enum map in runtime.
iex> StatusEnum.__enum_map__()
[registered: 0, active: 1, inactive: 2, archived: 3]
Enums also generate a typespec for use with dialyzer, available as the `t()` type
iex> t(StatusEnum)
@type t() :: :registered | :active | :inactive | :archived
"""
defmacro __using__(opts) do
quote do
opts = unquote(opts)
if opts[:type] && opts[:enums] do
use EctoEnum.Postgres.Use, unquote(opts)
else
use EctoEnum.Use, unquote(opts)
end
end
end
defmacro defenum(module, type, enum, options \\ []) do
EctoEnum.Postgres.defenum(module, type, enum, options)
end
defmacro defenum(module, enum) do
quote do
enum = Macro.escape(unquote(enum))
defmodule unquote(module) do
use EctoEnum.Use, enum
end
end
end
alias Ecto.Changeset
@spec validate_enum(
Ecto.Changeset.t(),
atom,
(atom, String.t(), list(String.t() | integer | atom) -> String.t())
) :: Ecto.Changeset.t()
def validate_enum(changeset, field, error_msg \\ &default_error_msg/3) do
Changeset.validate_change(changeset, field, :validate_enum, fn field, value ->
type = changeset.types[field]
error_msg = error_msg.(field, value, type.__valid_values__())
if type.valid_value?(value) do
[]
else
Keyword.put([], field, error_msg)
end
end)
end
defp default_error_msg(field, value, valid_values) do
"Value `#{inspect(value)}` is not a valid enum for `#{inspect(field)}` field. " <>
"Valid enums are `#{inspect(valid_values)}`"
end
end
| 31.735714 | 95 | 0.648436 |
9e37997b0497fe4a18612ae85b4fc517b5e0adbd | 1,354 | exs | Elixir | test/genome/genome_test.exs | ORBAT/mulix | 75503f79eb1832329afe96540cfd0e2dc9c01575 | [
"MIT"
] | null | null | null | test/genome/genome_test.exs | ORBAT/mulix | 75503f79eb1832329afe96540cfd0e2dc9c01575 | [
"MIT"
] | null | null | null | test/genome/genome_test.exs | ORBAT/mulix | 75503f79eb1832329afe96540cfd0e2dc9c01575 | [
"MIT"
] | null | null | null | defmodule GenomeTest do
use ExUnit.Case, async: true
doctest Genome
doctest Genome.Mush
alias Genome.Mush
describe "Mush.evaluate" do
test "with program that doesn't close all parens, has blocks and curr_block" do
# mush_env =
# ...> Genome.new_env()
# ...> |> Environment.update_stack(:blocks, [[]])
# ...> |> Environment.update_stack(:curr_block, [[:number_drop], [:y, :sub, :mul]])
# ...> |> Environment.update_stack(:code, [:if, 7, 1])
# ...> |> Genome.Mush.close_parens(1)
# ...> |> Environment.get_stacks([:code, :blocks, :curr_block])
# %{
# blocks: [[]],
# code: [:if, 7, 1],
# curr_block: [[[:number_drop], :y, :sub, :mul]]
# }
assert [[], [[:drop], :y, :sub, :mul], :if, 7, 1] =
Genome.new_env()
|> Environment.update_stack(:blocks, [[]])
|> Environment.update_stack(:curr_block, [[], [:y, :sub, :mul]])
|> Environment.update_stack(:code, [:if, 7, 1])
|> Mush.evaluate(Mush.genome(item: :drop, close: 0))
|> Environment.get_stack(:code)
end
end
describe "Mush handle_close" do
test "with close = 0"
test "with close > 0, open current block, nothing in blocks"
test "with close > 0, open current block, something in blocks"
end
end
| 35.631579 | 89 | 0.553176 |
9e37a3d71e723057a030dabe30abed9aca7b1b42 | 1,818 | ex | Elixir | web/channels/user_socket.ex | Poniverse/LunaTube-API | 433473f32ec04a636f6de60642e445fd2e478f98 | [
"Apache-2.0"
] | null | null | null | web/channels/user_socket.ex | Poniverse/LunaTube-API | 433473f32ec04a636f6de60642e445fd2e478f98 | [
"Apache-2.0"
] | null | null | null | web/channels/user_socket.ex | Poniverse/LunaTube-API | 433473f32ec04a636f6de60642e445fd2e478f98 | [
"Apache-2.0"
] | null | null | null | defmodule Lunatube.UserSocket do
use Phoenix.Socket
require Logger
import Guardian.Phoenix.Socket
alias Lunatube.User
## Channels
channel "room:*", Lunatube.RoomChannel
## Transports
transport :websocket, Phoenix.Transports.WebSocket
# transport :longpoll, Phoenix.Transports.LongPoll
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(%{"token" => jwt}, socket) do
case sign_in(socket, jwt) do
{:ok, authed_socket, guardian_params} ->
authed_socket = authed_socket
|> set_current_user(guardian_params[:resource])
{:ok, authed_socket}
_ ->
#unauthenticated socket
{:ok, socket}
end
end
def connect(_params, socket) do
{:ok, socket |> set_current_user(%User{id: :anonymous})}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "users_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# Lunatube.Endpoint.broadcast("users_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(%{assigns: %{user: %User{id: :anonymous}}}), do: nil
def id(%{assigns: %{user: %User{id: id}}}), do: "users_socket:#{id}"
def id(_), do: nil
def set_current_user(socket, user) do
assign(socket, :user, user);
end
end
| 30.3 | 83 | 0.674917 |
9e37e00554f57c5b17c5e48d783b13131d164ecd | 708 | ex | Elixir | lib/sneex/address/helper.ex | NickMcG/SNEEX | 901215dea41fa21314a4f4db46b51648158f1544 | [
"MIT"
] | 1 | 2019-11-16T00:33:02.000Z | 2019-11-16T00:33:02.000Z | lib/sneex/address/helper.ex | NickMcG/SNEEX | 901215dea41fa21314a4f4db46b51648158f1544 | [
"MIT"
] | 1 | 2019-08-11T23:02:15.000Z | 2019-08-11T23:02:15.000Z | lib/sneex/address/helper.ex | NickMcG/SNEEX | 901215dea41fa21314a4f4db46b51648158f1544 | [
"MIT"
] | null | null | null | defmodule Sneex.Address.Helper do
@moduledoc """
This module defines common functions that are used by various addressing modes
Maybe this ends up going away?
"""
alias Sneex.Cpu
use Bitwise
def indexed(addr, cpu = %Cpu{}, :x), do: (addr + Cpu.x(cpu)) |> band(0xFFFFFF)
def indexed(addr, cpu = %Cpu{}, :y), do: (addr + Cpu.y(cpu)) |> band(0xFFFFFF)
def calc_offset(part1, part2), do: (part1 + part2) |> band(0xFFFF)
def absolute_offset(upper_byte, addr), do: upper_byte |> bsl(16) |> bor(addr) |> band(0xFFFFFF)
def read_indirect(addr, cpu = %Cpu{}, size), do: cpu |> Cpu.read_data(addr, size)
def extra_cycle_for_16_bit(:bit16), do: 1
def extra_cycle_for_16_bit(_), do: 0
end
| 33.714286 | 97 | 0.675141 |
9e37f027fd43a91dfd917b03397a43a51019d730 | 1,382 | exs | Elixir | test/ex_binance/private/cancel_order_test.exs | arturictus/ex_binance | 9d1f51070a452e5e763a1d148d1151dddedd1956 | [
"MIT"
] | null | null | null | test/ex_binance/private/cancel_order_test.exs | arturictus/ex_binance | 9d1f51070a452e5e763a1d148d1151dddedd1956 | [
"MIT"
] | null | null | null | test/ex_binance/private/cancel_order_test.exs | arturictus/ex_binance | 9d1f51070a452e5e763a1d148d1151dddedd1956 | [
"MIT"
] | null | null | null | defmodule ExBinance.Private.CancelOrderTest do
use ExUnit.Case
use ExVCR.Mock, adapter: ExVCR.Adapter.Hackney
setup_all do
HTTPoison.start()
end
@credentials %ExBinance.Credentials{
api_key: System.get_env("BINANCE_API_KEY"),
secret_key: System.get_env("BINANCE_API_SECRET")
}
describe ".cancel_order_by_order_id" do
test "returns an ok tuple with the response" do
order_id = 165_812_252
use_cassette "private/cancel_order_by_order_id_ok" do
assert {:ok, response} =
ExBinance.Private.cancel_order_by_order_id("LTCBTC", order_id, @credentials)
assert %ExBinance.Responses.CancelOrder{} = response
assert response.order_id == order_id
end
end
test "returns an error tuple when the order id can't be found" do
use_cassette "private/cancel_order_by_order_id_error_not_found" do
assert {:error, {:not_found, msg}} =
ExBinance.Private.cancel_order_by_order_id("LTCBTC", "12345", @credentials)
assert msg == "Unknown order sent."
end
end
test "bubbles unhandled errors" do
use_cassette "private/cancel_order_by_order_id_error_unhandled" do
assert {:error, {:binance_error, %{"code" => -9999}}} =
ExBinance.Private.cancel_order_by_order_id("LTCBTC", "6789", @credentials)
end
end
end
end
| 31.409091 | 93 | 0.685239 |
9e37f8adccee5bf6b6c67f4de9704824e94d0368 | 250 | ex | Elixir | Destructuration.ex | hectorip/ErlangExercises | 4a1aa5de0504da1bfe5a6c31c1d20277524ab363 | [
"MIT"
] | 4 | 2016-09-22T03:47:56.000Z | 2017-02-02T17:42:57.000Z | Destructuration.ex | hectorip/ErlangExercises | 4a1aa5de0504da1bfe5a6c31c1d20277524ab363 | [
"MIT"
] | null | null | null | Destructuration.ex | hectorip/ErlangExercises | 4a1aa5de0504da1bfe5a6c31c1d20277524ab363 | [
"MIT"
] | null | null | null | {:module, _, binary, _} = contents
<< a :: utf8, rest :: binary>> = "This is a String"
# The equals symbol is called pattern matching, is not an assignment
:rpc.call :"node_two@MacBook-Pro-de-HectorIP", :code, :load_binary, [Hello, 'iex', binary]
| 31.25 | 90 | 0.676 |
9e380b489d451cd3d06f5c27e6a8928e25edaa04 | 1,483 | exs | Elixir | mix.exs | skeleton-elixir/skeleton_permission | 9d86d338cfcd5b0b9925cde139d03c225dc9e4ee | [
"MIT"
] | null | null | null | mix.exs | skeleton-elixir/skeleton_permission | 9d86d338cfcd5b0b9925cde139d03c225dc9e4ee | [
"MIT"
] | 1 | 2020-07-02T13:17:17.000Z | 2020-07-02T13:17:17.000Z | mix.exs | skeleton-elixir/skeleton_permission | 9d86d338cfcd5b0b9925cde139d03c225dc9e4ee | [
"MIT"
] | null | null | null | defmodule SkeletonPermission.MixProject do
use Mix.Project
@version "2.1.1"
@source_url "https://github.com/skeleton-elixir/skeleton_permission"
@maintainers [
"Diego Nogueira",
"Jhonathas Matos"
]
def project do
[
name: "SkeletonPermission",
app: :skeleton_permission,
version: @version,
elixir: "~> 1.8",
elixirc_paths: elixirc_paths(Mix.env()),
package: package(),
source_url: @source_url,
description: description(),
maintainers: @maintainers,
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:ex_doc, ">= 0.0.0", only: :dev, runtime: false},
{:plug_cowboy, "~> 2.0"}
]
end
defp description() do
"O Skeleton Permission ajuda a controlar toda parte de autorização do seu sistema,
seja via Controller, Resolver(Absinthe), View e LiveView."
end
defp elixirc_paths(:test), do: ["lib", "test/app", "test/app_web"]
defp elixirc_paths(_), do: ["lib"]
defp package do
[
maintainers: @maintainers,
licenses: ["MIT"],
files: ~w(lib CHANGELOG.md LICENSE mix.exs README.md),
links: %{
"GitHub" => @source_url,
"Changelog" => "#{@source_url}/blob/master/CHANGELOG.md"
}
]
end
end
| 23.919355 | 86 | 0.616993 |
9e3838a6d07cedf3c09a48a2c3a798bae6886710 | 1,526 | ex | Elixir | lib/xdr/transactions/operations/revoke_sponsorship_result.ex | einerzg/stellar_base | 2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f | [
"MIT"
] | 3 | 2021-08-17T20:32:45.000Z | 2022-03-13T20:26:02.000Z | lib/xdr/transactions/operations/revoke_sponsorship_result.ex | einerzg/stellar_base | 2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f | [
"MIT"
] | 45 | 2021-08-12T20:19:41.000Z | 2022-03-27T21:00:10.000Z | lib/xdr/transactions/operations/revoke_sponsorship_result.ex | einerzg/stellar_base | 2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f | [
"MIT"
] | 2 | 2021-09-22T23:11:13.000Z | 2022-01-23T03:19:11.000Z | defmodule StellarBase.XDR.Operations.RevokeSponsorshipResult do
@moduledoc """
Representation of Stellar `RevokeSponsorshipResult` type.
"""
alias StellarBase.XDR.Void
alias StellarBase.XDR.Operations.RevokeSponsorshipResultCode
@behaviour XDR.Declaration
@arms [REVOKE_SPONSORSHIP_SUCCESS: Void, default: Void]
@type t :: %__MODULE__{result: any(), code: RevokeSponsorshipResultCode.t()}
defstruct [:result, :code]
@spec new(result :: any(), code :: RevokeSponsorshipResultCode.t()) :: t()
def new(result, %RevokeSponsorshipResultCode{} = code),
do: %__MODULE__{result: result, code: code}
@impl true
def encode_xdr(%__MODULE__{result: result, code: code}) do
code
|> XDR.Union.new(@arms, result)
|> XDR.Union.encode_xdr()
end
@impl true
def encode_xdr!(%__MODULE__{result: result, code: code}) do
code
|> XDR.Union.new(@arms, result)
|> XDR.Union.encode_xdr!()
end
@impl true
def decode_xdr(bytes, spec \\ union_spec())
def decode_xdr(bytes, spec) do
case XDR.Union.decode_xdr(bytes, spec) do
{:ok, {{code, result}, rest}} -> {:ok, {new(result, code), rest}}
error -> error
end
end
@impl true
def decode_xdr!(bytes, spec \\ union_spec())
def decode_xdr!(bytes, spec) do
{{code, result}, rest} = XDR.Union.decode_xdr!(bytes, spec)
{new(result, code), rest}
end
@spec union_spec() :: XDR.Union.t()
defp union_spec do
nil
|> RevokeSponsorshipResultCode.new()
|> XDR.Union.new(@arms)
end
end
| 25.864407 | 78 | 0.673001 |
9e383d9aef5673eb7d4322eeea36c632bafece4e | 4,282 | ex | Elixir | lib/elixir/lib/bitwise.ex | kenichi/elixir | 8c27da88c70623cbe516d5310c885943395a82a2 | [
"Apache-2.0"
] | 2 | 2018-11-15T06:38:14.000Z | 2018-11-17T18:03:14.000Z | lib/elixir/lib/bitwise.ex | kenichi/elixir | 8c27da88c70623cbe516d5310c885943395a82a2 | [
"Apache-2.0"
] | 1 | 2018-09-10T23:36:45.000Z | 2018-09-10T23:36:45.000Z | lib/elixir/lib/bitwise.ex | kenichi/elixir | 8c27da88c70623cbe516d5310c885943395a82a2 | [
"Apache-2.0"
] | 1 | 2018-09-10T23:32:56.000Z | 2018-09-10T23:32:56.000Z | defmodule Bitwise do
@moduledoc """
A set of macros that perform calculations on bits.
The macros in this module come in two flavors: named or
operators. For example:
iex> use Bitwise
iex> bnot(1) # named
-2
iex> 1 &&& 1 # operator
1
If you prefer to use only operators or skip them, you can
pass the following options:
* `:only_operators` - includes only operators
* `:skip_operators` - skips operators
For example:
iex> use Bitwise, only_operators: true
iex> 1 &&& 1
1
When invoked with no options, `use Bitwise` is equivalent
to `import Bitwise`.
All bitwise macros can be used in guards:
iex> use Bitwise
iex> odd? = fn
...> int when band(int, 1) == 1 -> true
...> _ -> false
...> end
iex> odd?.(1)
true
"""
@doc false
defmacro __using__(options) do
except =
cond do
Keyword.get(options, :only_operators) ->
[bnot: 1, band: 2, bor: 2, bxor: 2, bsl: 2, bsr: 2]
Keyword.get(options, :skip_operators) ->
[~~~: 1, &&&: 2, |||: 2, ^^^: 2, <<<: 2, >>>: 2]
true ->
[]
end
quote do
import Bitwise, except: unquote(except)
end
end
@doc """
Calculates the bitwise NOT of its argument.
iex> bnot(2)
-3
iex> bnot(2) &&& 3
1
"""
@doc guard: true
defmacro bnot(expr) do
quote(do: :erlang.bnot(unquote(expr)))
end
@doc """
Prefix (unary) operator; calculates the bitwise NOT of its argument.
iex> ~~~2
-3
iex> ~~~2 &&& 3
1
"""
@doc guard: true
defmacro ~~~expr do
quote(do: :erlang.bnot(unquote(expr)))
end
@doc """
Calculates the bitwise AND of its arguments.
iex> band(9, 3)
1
"""
@doc guard: true
defmacro band(left, right) do
quote(do: :erlang.band(unquote(left), unquote(right)))
end
@doc """
Infix operator; calculates the bitwise AND of its arguments.
iex> 9 &&& 3
1
"""
@doc guard: true
defmacro left &&& right do
quote(do: :erlang.band(unquote(left), unquote(right)))
end
@doc """
Calculates the bitwise OR of its arguments.
iex> bor(9, 3)
11
"""
@doc guard: true
defmacro bor(left, right) do
quote(do: :erlang.bor(unquote(left), unquote(right)))
end
@doc """
Infix operator; calculates the bitwise OR of its arguments.
iex> 9 ||| 3
11
"""
@doc guard: true
defmacro left ||| right do
quote(do: :erlang.bor(unquote(left), unquote(right)))
end
@doc """
Calculates the bitwise XOR of its arguments.
iex> bxor(9, 3)
10
"""
@doc guard: true
defmacro bxor(left, right) do
quote(do: :erlang.bxor(unquote(left), unquote(right)))
end
@doc """
Infix operator; calculates the bitwise XOR of its arguments.
iex> 9 ^^^ 3
10
"""
@doc guard: true
defmacro left ^^^ right do
quote(do: :erlang.bxor(unquote(left), unquote(right)))
end
@doc """
Calculates the result of an arithmetic left bitshift.
iex> bsl(1, 2)
4
iex> bsl(1, -2)
0
iex> bsl(-1, 2)
-4
iex> bsl(-1, -2)
-1
"""
@doc guard: true
defmacro bsl(left, right) do
quote(do: :erlang.bsl(unquote(left), unquote(right)))
end
@doc """
Infix operator; calculates the result of an arithmetic left bitshift.
iex> 1 <<< 2
4
iex> 1 <<< -2
0
iex> -1 <<< 2
-4
iex> -1 <<< -2
-1
"""
@doc guard: true
defmacro left <<< right do
quote(do: :erlang.bsl(unquote(left), unquote(right)))
end
@doc """
Calculates the result of an arithmetic right bitshift.
iex> bsr(1, 2)
0
iex> bsr(1, -2)
4
iex> bsr(-1, 2)
-1
iex> bsr(-1, -2)
-4
"""
@doc guard: true
defmacro bsr(left, right) do
quote(do: :erlang.bsr(unquote(left), unquote(right)))
end
@doc """
Infix operator; calculates the result of an arithmetic right bitshift.
iex> 1 >>> 2
0
iex> 1 >>> -2
4
iex> -1 >>> 2
-1
iex> -1 >>> -2
-4
"""
@doc guard: true
defmacro left >>> right do
quote(do: :erlang.bsr(unquote(left), unquote(right)))
end
end
| 18.456897 | 72 | 0.552312 |
9e3841f976fa4240dce645b994993efdfc09dac2 | 2,443 | ex | Elixir | clients/sheets/lib/google_api/sheets/v4/model/add_dimension_group_request.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/sheets/lib/google_api/sheets/v4/model/add_dimension_group_request.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/sheets/lib/google_api/sheets/v4/model/add_dimension_group_request.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Sheets.V4.Model.AddDimensionGroupRequest do
@moduledoc """
Creates a group over the specified range. If the requested range is a superset of the range of an existing group G, then the depth of G is incremented and this new group G' has the depth of that group. For example, a group [C:D, depth 1] + [B:E] results in groups [B:E, depth 1] and [C:D, depth 2]. If the requested range is a subset of the range of an existing group G, then the depth of the new group G' becomes one greater than the depth of G. For example, a group [B:E, depth 1] + [C:D] results in groups [B:E, depth 1] and [C:D, depth 2]. If the requested range starts before and ends within, or starts within and ends after, the range of an existing group G, then the range of the existing group G becomes the union of the ranges, and the new group G' has depth one greater than the depth of G and range as the intersection of the ranges. For example, a group [B:D, depth 1] + [C:E] results in groups [B:E, depth 1] and [C:D, depth 2].
## Attributes
* `range` (*type:* `GoogleApi.Sheets.V4.Model.DimensionRange.t`, *default:* `nil`) - The range over which to create a group.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:range => GoogleApi.Sheets.V4.Model.DimensionRange.t()
}
field(:range, as: GoogleApi.Sheets.V4.Model.DimensionRange)
end
defimpl Poison.Decoder, for: GoogleApi.Sheets.V4.Model.AddDimensionGroupRequest do
def decode(value, options) do
GoogleApi.Sheets.V4.Model.AddDimensionGroupRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Sheets.V4.Model.AddDimensionGroupRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 51.978723 | 943 | 0.740074 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.