hexsha
stringlengths
40
40
size
int64
2
991k
ext
stringclasses
2 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
4
208
max_stars_repo_name
stringlengths
6
106
max_stars_repo_head_hexsha
stringlengths
40
40
max_stars_repo_licenses
sequence
max_stars_count
int64
1
33.5k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
4
208
max_issues_repo_name
stringlengths
6
106
max_issues_repo_head_hexsha
stringlengths
40
40
max_issues_repo_licenses
sequence
max_issues_count
int64
1
16.3k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
4
208
max_forks_repo_name
stringlengths
6
106
max_forks_repo_head_hexsha
stringlengths
40
40
max_forks_repo_licenses
sequence
max_forks_count
int64
1
6.91k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
2
991k
avg_line_length
float64
1
36k
max_line_length
int64
1
977k
alphanum_fraction
float64
0
1
f78f808fbcbb4939109f663e9f55f697e654c021
6,377
exs
Elixir
test/cadet_web/controllers/sourcecast_controller_test.exs
chrisgzf/cadet
101dfe43ca7d6626ee86bb1f2ba20f054602a563
[ "Apache-2.0" ]
null
null
null
test/cadet_web/controllers/sourcecast_controller_test.exs
chrisgzf/cadet
101dfe43ca7d6626ee86bb1f2ba20f054602a563
[ "Apache-2.0" ]
null
null
null
test/cadet_web/controllers/sourcecast_controller_test.exs
chrisgzf/cadet
101dfe43ca7d6626ee86bb1f2ba20f054602a563
[ "Apache-2.0" ]
null
null
null
defmodule CadetWeb.SourcecastControllerTest do use CadetWeb.ConnCase alias CadetWeb.SourcecastController test "swagger" do SourcecastController.swagger_definitions() SourcecastController.swagger_path_index(nil) SourcecastController.swagger_path_create(nil) SourcecastController.swagger_path_delete(nil) end describe "GET /sourcecast, unauthenticated" do test "renders a list of all sourcecast entries for public", %{ conn: conn } do %{sourcecasts: sourcecasts} = seed_db() expected = sourcecasts |> Enum.map( &%{ "id" => &1.id, "title" => &1.title, "description" => &1.description, "playbackData" => &1.playbackData, "uploader" => %{ "name" => &1.uploader.name, "id" => &1.uploader.id }, "url" => Cadet.Course.Upload.url({&1.audio, &1}) } ) res = conn |> get(build_url()) |> json_response(200) |> Enum.map(&Map.delete(&1, "audio")) |> Enum.map(&Map.delete(&1, "inserted_at")) |> Enum.map(&Map.delete(&1, "updated_at")) assert expected == res end end describe "POST /sourcecast, unauthenticated" do test "unauthorized", %{conn: conn} do conn = post(conn, build_url(), %{}) assert response(conn, 401) =~ "Unauthorised" end end describe "DELETE /sourcecast, unauthenticated" do test "unauthorized", %{conn: conn} do conn = delete(conn, build_url(1), %{}) assert response(conn, 401) =~ "Unauthorised" end end describe "GET /sourcecast, all roles" do test "renders a list of all sourcecast entries", %{ conn: conn } do %{sourcecasts: sourcecasts} = seed_db() expected = sourcecasts |> Enum.map( &%{ "id" => &1.id, "title" => &1.title, "description" => &1.description, "playbackData" => &1.playbackData, "uploader" => %{ "name" => &1.uploader.name, "id" => &1.uploader.id }, "url" => Cadet.Course.Upload.url({&1.audio, &1}) } ) res = conn |> get(build_url()) |> json_response(200) |> Enum.map(&Map.delete(&1, "audio")) |> Enum.map(&Map.delete(&1, "inserted_at")) |> Enum.map(&Map.delete(&1, "updated_at")) assert expected == res end end describe "POST /sourcecast, student" do @tag authenticate: :student test "prohibited", %{conn: conn} do conn = post(conn, build_url(), %{ "sourcecast" => %{ "title" => "Title", "description" => "Description", "playbackData" => "{\"init\":{\"editorValue\":\"// Type your program in here!\"},\"inputs\":[]}", "audio" => %Plug.Upload{ content_type: "audio/wav", filename: "upload.wav", path: "test/fixtures/upload.wav" } } }) assert response(conn, 403) =~ "User is not permitted to upload" end end describe "DELETE /sourcecast, student" do @tag authenticate: :student test "prohibited", %{conn: conn} do conn = delete(conn, build_url(1), %{}) assert response(conn, 403) =~ "User is not permitted to delete" end end describe "POST /sourcecast, staff" do @tag authenticate: :staff test "successful", %{conn: conn} do conn = post(conn, build_url(), %{ "sourcecast" => %{ "title" => "Title", "description" => "Description", "playbackData" => "{\"init\":{\"editorValue\":\"// Type your program in here!\"},\"inputs\":[]}", "audio" => %Plug.Upload{ content_type: "audio/wav", filename: "upload.wav", path: "test/fixtures/upload.wav" } } }) assert response(conn, 200) == "OK" end @tag authenticate: :staff test "missing parameter", %{conn: conn} do conn = post(conn, build_url(), %{}) assert response(conn, 400) =~ "Missing or invalid parameter(s)" end end describe "DELETE /sourcecast, staff" do @tag authenticate: :staff test "successful", %{conn: conn} do %{sourcecasts: sourcecasts} = seed_db() sourcecast = List.first(sourcecasts) conn = delete(conn, build_url(sourcecast.id), %{}) assert response(conn, 200) =~ "OK" end end describe "POST /sourcecast, admin" do @tag authenticate: :admin test "successful", %{conn: conn} do conn = post(conn, build_url(), %{ "sourcecast" => %{ "title" => "Title", "description" => "Description", "playbackData" => "{\"init\":{\"editorValue\":\"// Type your program in here!\"},\"inputs\":[]}", "audio" => %Plug.Upload{ content_type: "audio/wav", filename: "upload.wav", path: "test/fixtures/upload.wav" } } }) assert response(conn, 200) == "OK" end @tag authenticate: :admin test "missing parameter", %{conn: conn} do conn = post(conn, build_url(), %{}) assert response(conn, 400) =~ "Missing or invalid parameter(s)" end end describe "DELETE /sourcecast, admin" do @tag authenticate: :admin test "successful", %{conn: conn} do %{sourcecasts: sourcecasts} = seed_db() sourcecast = List.first(sourcecasts) conn = delete(conn, build_url(sourcecast.id), %{}) assert response(conn, 200) =~ "OK" end end defp build_url, do: "/v1/sourcecast/" defp build_url(sourcecast_id), do: "#{build_url()}#{sourcecast_id}/" defp seed_db do sourcecasts = for i <- 0..4 do insert(:sourcecast, %{ title: "Title#{i}", description: "Description#{i}", playbackData: "{\"init\":{\"editorValue\":\"// Type your program in here!\"},\"inputs\":[]}", audio: %Plug.Upload{ content_type: "audio/wav", filename: "upload#{i}.wav", path: "test/fixtures/upload.wav" } }) end %{sourcecasts: sourcecasts} end end
27.969298
93
0.53003
f78f8278926aebb46b5af2408da12ae84f6d3261
173
ex
Elixir
lib/chit_chat_web/views/pow_email_confirmation/mailer_view.ex
areski/ex-chitchat
0ec14e9af6acba40d6708f924b76fb4fbe592dcf
[ "MIT" ]
1
2021-09-10T16:49:36.000Z
2021-09-10T16:49:36.000Z
lib/chit_chat_web/views/pow_email_confirmation/mailer_view.ex
areski/ex-chitchat
0ec14e9af6acba40d6708f924b76fb4fbe592dcf
[ "MIT" ]
2
2020-05-22T18:42:14.000Z
2021-01-25T16:34:38.000Z
lib/chit_chat_web/views/pow_email_confirmation/mailer_view.ex
areski/ex-chitchat
0ec14e9af6acba40d6708f924b76fb4fbe592dcf
[ "MIT" ]
null
null
null
defmodule ChitChatWeb.PowEmailConfirmation.MailerView do use ChitChatWeb, :mailer_view def subject(:email_confirmation, _assigns), do: "Confirm your email address" end
28.833333
78
0.815029
f78f994ee00f2d446bef1fff0ad3bdae92ef331c
74
exs
Elixir
test/views/page_view_test.exs
jschoch/guardex
26b2894385efbf032527ceef4a38c23e454faf40
[ "MIT" ]
null
null
null
test/views/page_view_test.exs
jschoch/guardex
26b2894385efbf032527ceef4a38c23e454faf40
[ "MIT" ]
null
null
null
test/views/page_view_test.exs
jschoch/guardex
26b2894385efbf032527ceef4a38c23e454faf40
[ "MIT" ]
null
null
null
defmodule Guardex.PageViewTest do use Guardex.ConnCase, async: true end
18.5
35
0.810811
f78fa5d3dd44aff7fc1569d93772c403565298fa
1,619
ex
Elixir
apps/initializer/lib/initializer.ex
rucker/hindsight
876a5d344c5d8eebbea37684ee07e0a91e4430f0
[ "Apache-2.0" ]
null
null
null
apps/initializer/lib/initializer.ex
rucker/hindsight
876a5d344c5d8eebbea37684ee07e0a91e4430f0
[ "Apache-2.0" ]
null
null
null
apps/initializer/lib/initializer.ex
rucker/hindsight
876a5d344c5d8eebbea37684ee07e0a91e4430f0
[ "Apache-2.0" ]
null
null
null
defmodule Initializer do @callback on_start(state) :: {:ok, state} | {:error, term} when state: map defmacro __using__(opts) do name = Keyword.fetch!(opts, :name) supervisor = Keyword.fetch!(opts, :supervisor) quote location: :keep do use GenServer use Retry @behaviour Initializer @dialyzer [ {:nowarn_function, handle_info: 2}, {:no_match, init: 1} ] def start_link(init_arg) do GenServer.start_link(__MODULE__, init_arg, name: unquote(name)) end def init(init_arg) do supervisor_ref = setup_monitor() state = Map.new(init_arg) |> Map.put(:supervisor_ref, supervisor_ref) case on_start(state) do {:ok, new_state} -> {:ok, new_state} {:error, reason} -> {:stop, reason} end end def handle_info({:DOWN, supervisor_ref, _, _, _}, %{supervisor_ref: supervisor_ref} = state) do retry with: constant_backoff(100) |> Stream.take(10), atoms: [false] do Process.whereis(unquote(supervisor)) != nil after _ -> supervisor_ref = setup_monitor() state = Map.put(state, :supervisor_ref, supervisor_ref) case on_start(state) do {:ok, new_state} -> {:noreply, state} {:error, reason} -> {:stop, reason, state} end else _ -> {:stop, "Supervisor not available", state} end end defp setup_monitor() do Process.whereis(unquote(supervisor)) |> Process.monitor() end end end end
27.440678
101
0.571958
f78fa699261b884169ff1622a6ac53363bded2e4
1,947
ex
Elixir
clients/compute/lib/google_api/compute/v1/model/backend_services_scoped_list.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
null
null
null
clients/compute/lib/google_api/compute/v1/model/backend_services_scoped_list.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
1
2020-12-18T09:25:12.000Z
2020-12-18T09:25:12.000Z
clients/compute/lib/google_api/compute/v1/model/backend_services_scoped_list.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
1
2020-10-04T10:12:44.000Z
2020-10-04T10:12:44.000Z
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.Compute.V1.Model.BackendServicesScopedList do @moduledoc """ ## Attributes * `backendServices` (*type:* `list(GoogleApi.Compute.V1.Model.BackendService.t)`, *default:* `nil`) - A list of BackendServices contained in this scope. * `warning` (*type:* `GoogleApi.Compute.V1.Model.BackendServicesScopedListWarning.t`, *default:* `nil`) - Informational warning which replaces the list of backend services when the list is empty. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :backendServices => list(GoogleApi.Compute.V1.Model.BackendService.t()), :warning => GoogleApi.Compute.V1.Model.BackendServicesScopedListWarning.t() } field(:backendServices, as: GoogleApi.Compute.V1.Model.BackendService, type: :list) field(:warning, as: GoogleApi.Compute.V1.Model.BackendServicesScopedListWarning) end defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.BackendServicesScopedList do def decode(value, options) do GoogleApi.Compute.V1.Model.BackendServicesScopedList.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.BackendServicesScopedList do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
38.94
199
0.755521
f78fb7fc6b57938ffc9e6cb50bbb34cf64965850
2,097
exs
Elixir
mix.exs
simedw/fastimage
b7ad5506984de72d00c91e22a004cf9d19448956
[ "MIT" ]
30
2016-08-12T06:28:45.000Z
2021-09-24T11:55:14.000Z
mix.exs
simedw/fastimage
b7ad5506984de72d00c91e22a004cf9d19448956
[ "MIT" ]
31
2016-08-10T21:30:08.000Z
2018-11-22T15:46:04.000Z
mix.exs
simedw/fastimage
b7ad5506984de72d00c91e22a004cf9d19448956
[ "MIT" ]
13
2016-08-10T15:11:42.000Z
2022-01-12T23:06:14.000Z
defmodule Fastimage.Mixfile do use Mix.Project @name "Fastimage" @version "1.0.0-rc4" @source "https://github.com/stephenmoloney/fastimage" @maintainers ["Stephen Moloney"] @elixir_versions ">= 1.4.0" @allowed_hackney_versions ~w(1.6 1.7 1.8 1.9 1.13 1.14) @hackney_versions "~> " <> Enum.join(@allowed_hackney_versions, " or ~> ") def project do [ app: :fastimage, name: @name, version: @version, source_url: @source, elixir: @elixir_versions, build_embedded: Mix.env() == :prod, start_permanent: Mix.env() == :prod, preferred_cli_env: coveralls(), test_coverage: [ tool: ExCoveralls ], description: description(), deps: deps(), package: package(), docs: docs(), aliases: aliases() ] end def application do [ extra_applications: [] ] end defp deps do [ {:hackney, @hackney_versions}, # dev/test only {:excoveralls, "~> 0.10", only: [:test], runtime: false}, {:benchfella, "~> 0.3", only: [:dev], runtime: false}, {:credo, "~> 0.10", only: [:dev, :test], runtime: false}, {:earmark, "~> 1.2", only: [:dev], runtime: false}, {:ex_doc, "~> 0.19", only: [:dev], runtime: false} ] end defp description do """ #{@name} finds the dimensions/size or file type of a remote or local image file given the file path or uri respectively. """ end defp package do %{ licenses: ["MIT"], maintainers: @maintainers, links: %{"GitHub" => @source}, files: ~w(priv bench/fastimage_bench.exs lib mix.exs README* LICENCE* CHANGELOG*) } end defp docs do [ main: "api-reference" ] end defp aliases do [ prep: ["clean", "format", "compile", "credo #{credo_args()}"] ] end defp credo_args do "--strict --ignore maxlinelength,cyclomaticcomplexity,todo" end def coveralls do [ coveralls: :test, "coveralls.detail": :test, "coveralls.post": :test, "coveralls.html": :test ] end end
22.793478
87
0.577969
f7900b68c483dbf73bd78c380cb16240b6550678
1,822
ex
Elixir
clients/compute/lib/google_api/compute/v1/model/instance_group_manager_status.ex
linjunpop/elixir-google-api
444cb2b2fb02726894535461a474beddd8b86db4
[ "Apache-2.0" ]
null
null
null
clients/compute/lib/google_api/compute/v1/model/instance_group_manager_status.ex
linjunpop/elixir-google-api
444cb2b2fb02726894535461a474beddd8b86db4
[ "Apache-2.0" ]
null
null
null
clients/compute/lib/google_api/compute/v1/model/instance_group_manager_status.ex
linjunpop/elixir-google-api
444cb2b2fb02726894535461a474beddd8b86db4
[ "Apache-2.0" ]
null
null
null
# Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the &quot;License&quot;); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an &quot;AS IS&quot; BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This class is auto generated by the swagger code generator program. # https://github.com/swagger-api/swagger-codegen.git # Do not edit the class manually. defmodule GoogleApi.Compute.V1.Model.InstanceGroupManagerStatus do @moduledoc """ ## Attributes - isStable (boolean()): [Output Only] A bit indicating whether the managed instance group is in a stable state. A stable state means that: none of the instances in the managed instance group is currently undergoing any type of change (for example, creation, restart, or deletion); no future changes are scheduled for instances in the managed instance group; and the managed instance group itself is not being modified. Defaults to: `null`. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :isStable => any() } field(:isStable) end defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.InstanceGroupManagerStatus do def decode(value, options) do GoogleApi.Compute.V1.Model.InstanceGroupManagerStatus.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.InstanceGroupManagerStatus do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
37.958333
441
0.760154
f7900f16e5f7d0b7e303698e487064dfa075641c
564
exs
Elixir
tests/message/test.exs
pilotier/nimler
bc80503d9e300ba59889f533b79dc51962083ba7
[ "MIT" ]
83
2019-10-20T12:04:33.000Z
2022-03-17T01:04:26.000Z
tests/message/test.exs
pilotier/nimler
bc80503d9e300ba59889f533b79dc51962083ba7
[ "MIT" ]
18
2019-10-12T17:56:25.000Z
2022-03-21T03:05:02.000Z
tests/message/test.exs
pilotier/nimler
bc80503d9e300ba59889f533b79dc51962083ba7
[ "MIT" ]
6
2020-02-21T14:00:30.000Z
2021-11-06T04:18:38.000Z
ExUnit.start(trace: false, seed: 0) defmodule NimlerMessage.Test do use ExUnit.Case, async: false alias NimlerMessage, as: NimlerWrapper setup do {:ok, [mymsg: 123]} end test "send_message()", context do assert(:ok == NimlerWrapper.send_message(self(), context[:mymsg])) receive do msg -> assert(msg == context[:mymsg]) end end test "send_message_caller()", context do assert(:ok == NimlerWrapper.send_message_caller(context[:mymsg])) receive do msg -> assert(msg == context[:mymsg]) end end end
24.521739
72
0.654255
f79024e285946d7ec4c3fab59569c3e7b725468b
4,856
exs
Elixir
test/features/visitor_views_post_test.exs
synion/tilex
ea29646830efaa89fc47fad347f6e495ff7ce48b
[ "MIT" ]
1
2019-05-28T20:43:28.000Z
2019-05-28T20:43:28.000Z
test/features/visitor_views_post_test.exs
synion/tilex
ea29646830efaa89fc47fad347f6e495ff7ce48b
[ "MIT" ]
1
2019-02-11T23:14:15.000Z
2019-02-11T23:14:15.000Z
test/features/visitor_views_post_test.exs
synion/tilex
ea29646830efaa89fc47fad347f6e495ff7ce48b
[ "MIT" ]
1
2019-12-02T08:59:45.000Z
2019-12-02T08:59:45.000Z
defmodule VisitorViewsPostTest do use Tilex.IntegrationCase, async: Application.get_env(:tilex, :async_feature_test) alias TilexWeb.Endpoint alias Tilex.Integration.Pages.{ PostShowPage } test "the page shows a post", %{session: session} do developer = Factory.insert!(:developer) channel = Factory.insert!(:channel, name: "command-line") post = Factory.insert!( :post, title: "A special post", body: "This is how to be super awesome!", developer: developer, channel: channel ) session |> PostShowPage.navigate(post) |> PostShowPage.expect_post_attributes(%{ title: "A special post", body: "This is how to be super awesome!", channel: "#command-line", likes_count: 1 }) assert page_title(session) == "A special post - Today I Learned" end test "and sees marketing copy, if it exists", %{session: session} do marketing_channel = Factory.insert!(:channel, name: "elixir") post_in_marketing_channel = Factory.insert!(:post, channel: marketing_channel) copy = session |> visit(post_path(Endpoint, :show, post_in_marketing_channel)) |> find(Query.css(".more-info")) |> Element.text() {:ok, marketing_content} = File.read("lib/tilex_web/templates/shared/_elixir.html.eex") assert copy =~ String.slice(marketing_content, 0, 10) end test "and sees a special slug", %{session: session} do post = Factory.insert!(:post, title: "Super Sluggable Title") url = session |> visit(post_path(Endpoint, :show, post)) |> current_url assert url =~ "#{post.slug}-super-sluggable-title" changeset = Post.changeset(post, %{title: "Alternate Also Cool Title"}) Repo.update!(changeset) post = Repo.get(Post, post.id) url = session |> visit(post_path(Endpoint, :show, post)) |> current_url assert url =~ "#{post.slug}-alternate-also-cool-title" end test "and sees a channel specific twitter card and a post specific twitter description", %{ session: session } do popular_channel = Factory.insert!(:channel, name: "command-line") post = Factory.insert!( :post, channel: popular_channel, body: "One sentence that sets up the post.\nAnother sentence that is more informative" ) image_url = session |> visit(post_path(Endpoint, :show, post)) |> find(Query.css("meta[name='twitter:image']", visible: false)) |> Element.attr("content") assert image_url =~ "command_line_twitter_card.png" twitter_description = session |> find(Query.css("meta[name='twitter:description']", visible: false)) |> Element.attr("content") assert twitter_description =~ "One sentence that sets up the post." refute twitter_description =~ "Another sentence" end test "and clicks 'like' for that post", %{session: session} do developer = Factory.insert!(:developer) post = Factory.insert!(:post, title: "A special post", developer: developer, likes: 1) session |> visit(post_path(Endpoint, :show, post)) |> find(Query.css("header[data-likes-loaded=true]")) link = find(session, Query.css(".post .js-like-action")) Element.click(link) session |> assert_has(Query.css("header[data-likes-loaded=true]")) |> assert_has(Query.css(".post .js-like-action.liked")) post = Repo.get(Post, post.id) assert post.likes == 2 assert post.max_likes == 2 Element.click(link) session |> assert_has(Query.css("header[data-likes-loaded=true]")) |> assert_has(Query.css(".post .js-like-action")) |> refute_has(Query.css(".post .js-like-action.liked")) post = Repo.get(Post, post.id) assert post.likes == 1 assert post.max_likes == 2 end test "sees raw markdown version", %{session: session} do title = "A special post" body = """ # title **some text** [hashrocket](http://hashrocket.com) """ developer = Factory.insert!(:developer) post = Factory.insert!( :post, title: title, body: body, developer: developer ) session |> visit("#{post_path(Endpoint, :show, post)}.md") assert text(session) == String.trim(""" #{title} #{body} #{developer.username} #{TilexWeb.SharedView.display_date(post)} """) end test "via the random url", %{session: session} do post = Factory.insert!(:post) session |> visit(post_path(Endpoint, :random)) |> PostShowPage.expect_post_attributes(%{ title: post.title, body: post.body, channel: post.channel.name, likes_count: 1 }) assert page_title(session) == "#{post.title} - Today I Learned" end end
26.977778
94
0.626647
f7902b750ac766a67dd0308292935484b6d2f481
453
ex
Elixir
lib/control_flow_3/ok.ex
mikan/elixir-practice
624525605eb2324e0c55a4ddcb68388c0d2ecefc
[ "Apache-2.0" ]
null
null
null
lib/control_flow_3/ok.ex
mikan/elixir-practice
624525605eb2324e0c55a4ddcb68388c0d2ecefc
[ "Apache-2.0" ]
1
2020-01-28T00:19:53.000Z
2020-01-28T00:19:53.000Z
lib/control_flow_3/ok.ex
mikan/elixir-practice
624525605eb2324e0c55a4ddcb68388c0d2ecefc
[ "Apache-2.0" ]
null
null
null
defmodule ControlFlow3 do def ok!({atom, data}) do case atom == :ok do true -> data false -> raise "atom: #{atom}, data: #{data}" end end end # True case file = ControlFlow3.ok! File.open("README.md") File.close(file) # False calse #_ = ControlFlow3.ok! File.open("README2.md") # ** (RuntimeError) atom: error, data: enoent # lib/control_flow_3/ok.ex:5: ControlFlow3.ok!/1 # (elixir) lib/code.ex:370: Code.require_file/2
23.842105
51
0.646799
f7903c62127a66e16e552a11665568e8b134166a
378
ex
Elixir
example/lib/example_web/controllers/secret_controller.ex
bvandgrift/ueberauth_microsoft_single_tenant
680523dca182536ac8caa87a7ec31840a877eac4
[ "MIT" ]
null
null
null
example/lib/example_web/controllers/secret_controller.ex
bvandgrift/ueberauth_microsoft_single_tenant
680523dca182536ac8caa87a7ec31840a877eac4
[ "MIT" ]
null
null
null
example/lib/example_web/controllers/secret_controller.ex
bvandgrift/ueberauth_microsoft_single_tenant
680523dca182536ac8caa87a7ec31840a877eac4
[ "MIT" ]
null
null
null
defmodule ExampleWeb.SecretController do use ExampleWeb, :controller def index(conn, _params) do current_user = get_session(conn, :current_user) if current_user do conn |> render("index.html", current_user: current_user) else conn |> put_status(:not_found) |> put_view(ExampleWeb.ErrorView) |> render("404.html") end end end
23.625
62
0.674603
f7905f1ac72fcf2e727cb05055d7f4351451d5b2
1,544
exs
Elixir
test/mailgun_ex/api_bypass_test.exs
aforward/mailgun_ex
70456f4086ff5207c9c4b184f6237447448f0f9c
[ "MIT" ]
null
null
null
test/mailgun_ex/api_bypass_test.exs
aforward/mailgun_ex
70456f4086ff5207c9c4b184f6237447448f0f9c
[ "MIT" ]
null
null
null
test/mailgun_ex/api_bypass_test.exs
aforward/mailgun_ex
70456f4086ff5207c9c4b184f6237447448f0f9c
[ "MIT" ]
1
2019-05-10T13:32:19.000Z
2019-05-10T13:32:19.000Z
defmodule MailgunEx.ApiBypassTest do use ExUnit.Case alias MailgunEx.{Api, BypassApi} # In this file, we will be making stubbed out requests # to a local *bypass* server*. These tests will load # fixtures from ./test/fixtures which have been generated # by running the MailgunEx.ApiSandboxTest module tests # Most tests here should have an associated live test # (where possible) so that we can assert data as close # to the real thing as often as possible. setup do bypass = BypassApi.setup() on_exit(fn -> BypassApi.teardown() end) {:ok, bypass: bypass} end test "GET /domains", %{bypass: bypass} do BypassApi.request(bypass, "GET", "/domains", 200, "domains.json") {ok, data} = Api.request(:get, resource: "domains") assert 200 == ok assert is_map(data) assert 5 == data[:total_count] assert 5 == data[:items] |> Enum.count() end test "POST /<domain>/messages", %{bypass: bypass} do BypassApi.request(bypass, "POST", "/myapp.local/messages", 200, "messages.json") {ok, data} = Api.request( :post, domain: "myapp.local", resource: "messages", params: [ from: "[email protected]", to: "[email protected]", subject: "Hello From Test", text: "Hello, from test.", html: "<b>Hello</b>, from test." ] ) assert 200 == ok assert "Queued. Thank you." == data[:message] assert "<[email protected]>" == data[:id] end end
28.072727
84
0.620466
f79084fc0f98dadd70463f2c9fb675c860b0c653
267
ex
Elixir
lib/problems/002.ex
andreogle/elixir-euler
12fb76c63dd170dd4d4a7bb7c943831bc92cf877
[ "MIT" ]
null
null
null
lib/problems/002.ex
andreogle/elixir-euler
12fb76c63dd170dd4d4a7bb7c943831bc92cf877
[ "MIT" ]
null
null
null
lib/problems/002.ex
andreogle/elixir-euler
12fb76c63dd170dd4d4a7bb7c943831bc92cf877
[ "MIT" ]
null
null
null
defmodule Problem2 do require Integer def solve(limit) do fibonacci(limit) end def fibonacci(0), do: 0 def fibonacci(1), do: 1 def fibonacci(n) do sum = fibonacci(n - 1) + fibonacci(n - 2) if Integer.is_even(sum), do: sum, else: 0 end end
17.8
45
0.644195
f79099b6f3bf752cd5c3ff59f3891ab39794440a
1,773
ex
Elixir
clients/admin/lib/google_api/admin/directory_v1/model/user_ssh_public_key.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
null
null
null
clients/admin/lib/google_api/admin/directory_v1/model/user_ssh_public_key.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
1
2020-12-18T09:25:12.000Z
2020-12-18T09:25:12.000Z
clients/admin/lib/google_api/admin/directory_v1/model/user_ssh_public_key.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
1
2020-10-04T10:12:44.000Z
2020-10-04T10:12:44.000Z
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.Admin.Directory_v1.Model.UserSshPublicKey do @moduledoc """ JSON template for a POSIX account entry. ## Attributes * `expirationTimeUsec` (*type:* `String.t`, *default:* `nil`) - An expiration time in microseconds since epoch. * `fingerprint` (*type:* `String.t`, *default:* `nil`) - A SHA-256 fingerprint of the SSH public key. (Read-only) * `key` (*type:* `String.t`, *default:* `nil`) - An SSH public key. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :expirationTimeUsec => String.t(), :fingerprint => String.t(), :key => String.t() } field(:expirationTimeUsec) field(:fingerprint) field(:key) end defimpl Poison.Decoder, for: GoogleApi.Admin.Directory_v1.Model.UserSshPublicKey do def decode(value, options) do GoogleApi.Admin.Directory_v1.Model.UserSshPublicKey.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.Admin.Directory_v1.Model.UserSshPublicKey do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
33.45283
117
0.716864
f790a8d5c3b565795a86fbccc9a319a7ea9df012
33,591
ex
Elixir
lib/ex_admin/register.ex
totorigolo/ex_admin
ba494582c5816f839aa4b6451893751f3cfc627b
[ "MIT" ]
null
null
null
lib/ex_admin/register.ex
totorigolo/ex_admin
ba494582c5816f839aa4b6451893751f3cfc627b
[ "MIT" ]
null
null
null
lib/ex_admin/register.ex
totorigolo/ex_admin
ba494582c5816f839aa4b6451893751f3cfc627b
[ "MIT" ]
null
null
null
defmodule ExAdmin.Register do @moduledoc """ Allows registering a resource or a page to be displayed with ExAdmin. For each model you wanted rendered by ExAdmin, use the `register_resource` call. For each general page (like a dashboard), use the `register_page` call. To allow ExAdmin to manage the resource with defaults, do not place any additional code in the block of `register_resource`. ## Examples Register the Survey.Answer model with all defaults. defmodule Survey.ExAdmin.Answer do use ExAdmin.Register register_resource Survey.Answer do end end ## Commands available in the register_resource do block * `menu` - Customize the properties of the menu item * `index` - Customize the index page * `show` - Customize the show page * `form` - Customize the form page * `query` - Customize the `Ecto` queries for each page * `options` - Change various options for a resource * `member_action` - Add a custom action for id based requests * `filter` - Disable/Customize the filter pages * `controller` - Override the default controller * `action_items` - Define which actions are available for a resource * `batch_actions` - Customize the batch_actions shown on the index page * `csv` - Customize the csv export file * `collection_action` - Add a custom action for collection based requests * `clear_action_items!` - Remove the action item buttons * `action_item` - Defines custom action items * `create_changeset` - Defines custom changeset function for creation * `update_changeset` - Defines custom changeset function for updates """ if File.dir?("/tmp") do @filename "/tmp/ex_admin_registered" else @filename System.tmp_dir() <> "/ex_admin_registered" end import ExAdmin.Utils import ExAdmin.DslUtils defmacro __using__(_) do quote do use ExAdmin.Index, except: [actions: 1] use ExAdmin.Show use ExAdmin.Form, except: [actions: 1] use ExAdmin.CSV import unquote(__MODULE__) import Phoenix.HTML.Tag import Ecto.Query, only: [from: 2] import Xain, except: [input: 1, input: 2, input: 3, menu: 1, form: 2] import ExAdmin.ViewHelpers Module.register_attribute(__MODULE__, :member_actions, accumulate: true, persist: true) Module.register_attribute(__MODULE__, :collection_actions, accumulate: true, persist: true) end end File.rm(@filename) File.touch(@filename) @doc """ Register an Ecto model. Once registered, ExAdmin adds the resource to the administration pages. If no additional code is added to the do block, the resource will be rendered with defaults, including: * A paginated index page listing all columns in the model's database table * A details page (show) listing fields and simple associations * New and edit pages * A menu item * A CSV export link on the index page # Default Association Rendering ExAdmin will render an association using the following algorithm in the following order: * Look for a `:name` field in the association * Look for a display_name/1 function in the Admin Resource Module * Look for a display_name/1 function in the Model's Module * Use the 2nd field in the Model's schema """ defmacro register_resource(mod, do: block) do quote location: :keep do import ExAdmin.ViewHelpers import ExAdmin.Utils require Logger @all_options [:edit, :show, :new, :delete] Module.register_attribute(__MODULE__, :query, accumulate: false, persist: true) Module.register_attribute(__MODULE__, :index_filters, accumulate: true, persist: true) Module.register_attribute(__MODULE__, :batch_actions, accumulate: true, persist: true) Module.register_attribute(__MODULE__, :selectable_column, accumulate: false, persist: true) Module.register_attribute(__MODULE__, :form_items, accumulate: true, persist: true) Module.register_attribute(__MODULE__, :controller_plugs, accumulate: true, persist: true) Module.register_attribute(__MODULE__, :sidebars, accumulate: true, persist: true) Module.register_attribute(__MODULE__, :scopes, accumulate: true, persist: true) Module.register_attribute(__MODULE__, :actions, accumulate: true, persist: true) Enum.each(@all_options, &Module.put_attribute(__MODULE__, :actions, &1)) module = unquote(mod) Module.put_attribute(__MODULE__, :module, module) Module.put_attribute(__MODULE__, :query, nil) Module.put_attribute(__MODULE__, :selectable_column, nil) Module.put_attribute(__MODULE__, :update_changeset, :changeset) Module.put_attribute(__MODULE__, :create_changeset, :changeset) @name_column Module.get_attribute(__MODULE__, :name_column) || apply(ExAdmin.Helpers, :get_name_field, [module]) alias unquote(mod) import Ecto.Query def config do apply(__MODULE__, :__struct__, []) end unquote(block) query_opts = case Module.get_attribute(__MODULE__, :query) do nil -> list = module.__schema__(:associations) |> Enum.map(&ExAdmin.Register.build_query_association(module, &1)) |> Enum.filter(&(not is_nil(&1))) query = %{all: [preload: list]} Module.put_attribute(__MODULE__, :query, query) query other -> other end controller = case Module.get_attribute(__MODULE__, :controller) do nil -> controller_mod = String.to_atom("#{module}Controller") Module.put_attribute(__MODULE__, :controller, controller_mod) other -> Logger.warn("Should not get here - controller: #{inspect(other)}") end menu_opts = case Module.get_attribute(__MODULE__, :menu) do false -> %{none: true} nil -> %{priority: 10, label: base_name(module) |> Inflex.pluralize()} other -> Enum.into(other, %{}) end controller_route = base_name(module) |> Inflex.underscore() |> Inflex.pluralize() controller_route = case Module.get_attribute(__MODULE__, :options) do nil -> controller_route options -> Keyword.get(options, :controller_route, controller_route) end plugs = case Module.get_attribute(__MODULE__, :controller_plugs) do nil -> [] list -> Enum.reverse(list) end sidebars = case Module.get_attribute(__MODULE__, :sidebars) do nil -> [] list -> Enum.reverse(list) end scopes = case Module.get_attribute(__MODULE__, :scopes) do nil -> [] list -> Enum.reverse(list) end controller_filters = (Module.get_attribute(__MODULE__, :controller_filters) || []) |> ExAdmin.Helpers.group_reduce_by_reverse() action_labels = ExAdmin.Register.get_action_labels(Module.get_attribute(__MODULE__, :actions)) actions = ExAdmin.Register.get_action_items( Module.get_attribute(__MODULE__, :actions), @all_options ) |> ExAdmin.Register.custom_action_actions( Module.get_attribute(__MODULE__, :member_actions), module, :member_actions ) |> ExAdmin.Register.custom_action_actions( Module.get_attribute(__MODULE__, :collection_actions), module, :collection_actions ) defstruct controller: @controller, controller_methods: Module.get_attribute(__MODULE__, :controller_methods), title_actions: &ExAdmin.default_resource_title_actions/2, type: :resource, resource_model: module, resource_name: resource_name(module), query_opts: query_opts, controller_route: controller_route, menu: menu_opts, actions: actions, action_labels: action_labels, member_actions: Module.get_attribute(__MODULE__, :member_actions), collection_actions: Module.get_attribute(__MODULE__, :collection_actions), controller_filters: controller_filters, index_filters: Module.get_attribute(__MODULE__, :index_filters), selectable_column: Module.get_attribute(__MODULE__, :selectable_column), position_column: Module.get_attribute(__MODULE__, :position_column), name_column: @name_column, batch_actions: Module.get_attribute(__MODULE__, :batch_actions), plugs: plugs, sidebars: sidebars, scopes: scopes, create_changeset: @create_changeset, update_changeset: @update_changeset def run_query(repo, defn, action, id \\ nil) do %__MODULE__{} |> Map.get(:resource_model) |> ExAdmin.Query.run_query(repo, defn, action, id, @query) end def run_query_counts(repo, defn, action, id \\ nil) do %__MODULE__{} |> Map.get(:resource_model) |> ExAdmin.Query.run_query_counts(repo, defn, action, id, @query) end def build_admin_search_query(keywords) do cond do function_exported?(@module, :admin_search_query, 1) -> apply(@module, :admin_search_query, [keywords]) function_exported?(__MODULE__, :admin_search_query, 1) -> apply(__MODULE__, :admin_search_query, [keywords]) true -> suggest_admin_search_query(keywords) end end defp suggest_admin_search_query(keywords) do field = @name_column query = from(r in @module, order_by: ^field) case keywords do nil -> query "" -> query keywords -> from(r in query, where: ilike(field(r, ^field), ^"%#{keywords}%")) end end def plugs(), do: @controller_plugs File.write!(unquote(@filename), "#{__MODULE__}\n", [:append]) end end @doc false def get_action_labels(nil), do: [] def get_action_labels([opts | _]) when is_list(opts) do opts[:labels] || [] end def get_action_labels(_), do: [] @doc false def get_action_items(nil, _), do: [] def get_action_items(actions, all_options) when is_list(actions) do {atoms, keywords} = List.flatten(actions) |> Enum.reduce({[], []}, fn atom, {acca, acck} when is_atom(atom) -> {[atom | acca], acck} kw, {acca, acck} -> {acca, [kw | acck]} end) atoms = Enum.reverse(atoms) keywords = Enum.reverse(Keyword.drop(keywords, [:labels])) cond do keywords[:only] && keywords[:except] -> raise "options :only and :except cannot be used together" keywords[:only] -> Keyword.delete(keywords, :only) ++ keywords[:only] keywords[:except] -> Keyword.delete(keywords, :except) ++ (all_options -- keywords[:except]) true -> keywords ++ atoms end end def custom_action_actions(actions, custom_actions, module, type) do custom_actions |> Enum.reduce(actions, fn {name, opts}, acc -> fun = quote do name = unquote(name) human_name = case unquote(opts)[:opts][:label] do nil -> humanize(name) label -> label end attrs = [] attrs = if unquote(opts)[:opts][:class] do class = unquote(opts)[:opts][:class] attrs ++ [class: class] else attrs end attrs = if unquote(opts)[:opts][:data_confirm] do data_confirm = unquote(opts)[:opts][:data_confirm] attrs ++ ["data-confirm": data_confirm] else attrs end module = unquote(module) type = unquote(type) if type == :member_actions do fn id -> resource = struct(module.__struct__, id: id) url = ExAdmin.Utils.admin_resource_path(resource, :member, [name]) attrs = [href: url, "data-method": :put] ++ attrs ExAdmin.ViewHelpers.action_item_link(human_name, attrs) end else fn id -> resource = module url = ExAdmin.Utils.admin_resource_path(resource, :collection, [name]) attrs = [href: url] ++ attrs ExAdmin.ViewHelpers.action_item_link(human_name, attrs) end end end action = if type == :member_actions, do: :show, else: :index [{action, fun} | acc] end) end @doc """ Override the controller for a resource. Allows custom actions, filters, and plugs for the controller. Commands in the controller block include: * `define_method` - Create a controller action with the body of the action * `before_filter` - Add a before_filter to the controller * `after_filter` - Add an after callback to the controller * `redirect_to` - Redirects to another page * `plug` - Add a plug to the controller """ defmacro controller(do: block) do quote do Module.register_attribute(__MODULE__, :controller_methods, accumulate: false, persist: true) Module.register_attribute(__MODULE__, :controller_filters, accumulate: true, persist: true) Module.put_attribute(__MODULE__, :controller_methods, []) unquote(block) end end defmacro controller(controller_mod) do quote do Module.put_attribute(__MODULE__, :controller, unquote(controller_mod)) end end @doc """ Override the changesets for a controller's update action """ defmacro update_changeset(changeset) do quote do Module.put_attribute(__MODULE__, :update_changeset, unquote(changeset)) end end @doc """ Override the changesets for a controller's create action """ defmacro create_changeset(changeset) do quote do Module.put_attribute(__MODULE__, :create_changeset, unquote(changeset)) end end @doc """ Override an action on a controller. Allows the customization of controller actions. ## Examples Override the index action to redirect to the root page. controller do define_method(:index) do redirect_to "/" end end """ defmacro define_method(name, do: block) do quote do methods = Module.get_attribute(__MODULE__, :controller_methods) Module.put_attribute(__MODULE__, :controller_methods, [{unquote(name), []} | methods]) unquote(block) end end @doc """ Add a before_filter to a controller. The before filter is executed before the controller action(s) are executed. Normally, the function should return the conn struct. However, if you want to modify the params, then return the tuple `{conn, new_parms}`. ## Examples The following example illustrates how to add a sync action that will be run before the index page is loaded. controller do before_filter :sync, only: [:index] def sync(conn, _) do BackupRestore.sync conn end end controller do before_filter :no_change, except: [:create, :modify] def no_change(conn, params) do {conn, put_in(params, [:setting, :no_mod], true)} end end """ defmacro before_filter(name, opts \\ []) do quote location: :keep do Module.put_attribute( __MODULE__, :controller_filters, {:before_filter, {unquote(name), unquote(opts)}} ) end end @doc """ Add an after filter to a controller. The after filter is executed after the controller action(s) are executed and before the page is rendered/redirected. In the case of `update` and `create`, it is only called on success. Normally, the function should return the conn struct. However, you can also return a `{conn, params, resource}` to modify the params and resource. ## Examples controller do after_filter :do_after, only: [:create, :update] def do_after(conn, params, resource, :create) do user = Repo.all(User) |> hd resource = Product.changeset(resource, %{user_id: user.id}) |> Repo.update! {Plug.Conn.assign(conn, :product, resource), params, resource} end def do_after(conn, _params, _resource, :update) do Plug.Conn.assign(conn, :answer, 42) end end """ defmacro after_filter(name, opts \\ []) do quote location: :keep do Module.put_attribute( __MODULE__, :controller_filters, {:after_filter, {unquote(name), unquote(opts)}} ) end end @doc """ Redirect to a given path. Use this command in a controller block to redirect to another page. """ defmacro redirect_to(path) do quote do [{name, opts} | tail] = Module.get_attribute(__MODULE__, :controller_methods) new_opts = [{:redirect_to, unquote(path)} | opts] Module.put_attribute(__MODULE__, :controller_methods, [{name, new_opts} | tail]) end end @doc """ Add a plug to the controller. Add custom plugs to a controller. ## Example controller do plug :my_plug, the_answer: 42 end """ defmacro plug(name, opts \\ []) do quote do Module.put_attribute(__MODULE__, :controller_plugs, {unquote(name), unquote(opts)}) end end @doc """ Register a static page. Use `register_page` to create a static page, like a dashboard, or welcome page to the admin interface. See the default dashboard page for an example. """ defmacro register_page(name, do: block) do quote location: :keep do import ExAdmin.Register, except: [column: 1] use ExAdmin.Page Module.register_attribute(__MODULE__, :query, accumulate: false, persist: true) Module.register_attribute(__MODULE__, :index_filters, accumulate: true, persist: true) Module.register_attribute(__MODULE__, :batch_actions, accumulate: true, persist: true) Module.register_attribute(__MODULE__, :selectable_column, accumulate: false, persist: true) Module.register_attribute(__MODULE__, :form_items, accumulate: true, persist: true) Module.register_attribute(__MODULE__, :sidebars, accumulate: true, persist: true) Module.put_attribute(__MODULE__, :controller_plugs, nil) page_name = unquote(name) unquote(block) # query_opts = Module.get_attribute(__MODULE__, :query) menu_opts = case Module.get_attribute(__MODULE__, :menu) do false -> %{none: true} nil -> %{label: page_name, priority: 99} other -> Enum.into(other, %{}) end controller_methods = Module.get_attribute(__MODULE__, :controller_methods) page_name = Kernel.to_string(page_name) plugs = case Module.get_attribute(__MODULE__, :controller_plugs) do nil -> [] list -> Enum.reverse(list) end sidebars = case Module.get_attribute(__MODULE__, :sidebars) do nil -> [] list -> Enum.reverse(list) end defstruct controller: Module.concat(Application.get_env(:ex_admin, :project), AdminController), controller_methods: Module.get_attribute(__MODULE__, :controller_methods), type: :page, page_name: page_name, title_actions: &ExAdmin.default_page_title_actions/2, controller_route: page_name |> Inflex.parameterize("_"), menu: menu_opts, member_actions: Module.get_attribute(__MODULE__, :member_actions), collection_actions: Module.get_attribute(__MODULE__, :collection_actions), controller_filters: Module.get_attribute(__MODULE__, :controller_filters), index_filters: [false], # selectable_column: Module.get_attribute(__MODULE__, :selectable_column), batch_actions: Module.get_attribute(__MODULE__, :batch_actions), plugs: plugs, sidebars: sidebars, scopes: [] def plugs(), do: @controller_plugs File.write!(unquote(@filename), "#{__MODULE__}\n", [:append]) end end @doc """ Add a sidebar to the page. The available options are: * `:only` - Filters the list of actions for the filter. * `:except` - Filters out actions in the except atom or list. ## Examples sidebar "ExAdmin Demo", only: [:index, :show] do Phoenix.View.render ExAdminDemo.AdminView, "sidebar_links.html", [] end sidebar :Orders, only: :show do attributes_table_for resource do row "title", fn(_) -> { resource.title } end row "author", fn(_) -> { resource.author } end end end # customize the panel sidebar "Expert Administration", box_attributes: ".box.box-warning", header_attributes: ".box-header.with-border.text-yellow" do Phoenix.View.render MyApp.AdminView, "sidebar_warning.html", [] end """ defmacro sidebar(name, opts \\ [], do: block) do contents = quote do unquote(block) end quote location: :keep, bind_quoted: [name: escape(name), opts: escape(opts), contents: escape(contents)] do fun_name = "side_bar_#{name}" |> String.replace(" ", "_") |> String.to_atom() def unquote(fun_name)(var!(conn), var!(resource)) do _ = var!(conn) _ = var!(resource) unquote(contents) end Module.put_attribute(__MODULE__, :sidebars, {name, opts, {__MODULE__, fun_name}}) end end @doc """ Scope the index page. ## Examples scope :all, default: true scope :available, fn(q) -> now = Ecto.Date.utc where(q, [p], p.available_on <= ^now) end scope :drafts, fn(q) -> now = Ecto.Date.utc where(q, [p], p.available_on > ^now) end scope :featured_products, [], fn(q) -> where(q, [p], p.featured == true) end scope :featured """ defmacro scope(name) do quote location: :keep do Module.put_attribute(__MODULE__, :scopes, {unquote(name), []}) end end defmacro scope(name, opts_or_fun) do quote location: :keep do opts_or_fun = unquote(opts_or_fun) if is_function(opts_or_fun) do scope(unquote(name), [], unquote(opts_or_fun)) else Module.put_attribute(__MODULE__, :scopes, {unquote(name), opts_or_fun}) end end end defmacro scope(name, opts, fun) do contents = quote do unquote(fun) end quote location: :keep, bind_quoted: [name: escape(name), opts: escape(opts), contents: escape(contents)] do fun_name = "scope_#{name}" |> String.replace(" ", "_") |> String.to_atom() def unquote(fun_name)(var!(resource)) do unquote(contents).(var!(resource)) end opts = [{:fun, {__MODULE__, fun_name}} | opts] Module.put_attribute(__MODULE__, :scopes, {name, opts}) end end @doc """ Customize the resource admin page by setting options for the page. The available actions are: * TBD """ defmacro options(opts) do quote do Module.put_attribute(__MODULE__, :options, unquote(opts)) end end @doc """ Customize the menu of a page. The available options are: * `:priority` - Sets the position of the menu, with 0 being the left most menu item * `:label` - The name used in the menu * `:if` - Only display the menu item if the condition returns non false/nil * `:url` - The custom URL used in the menu link ## Examples The following example adds a custom label, sets the priority, and is only displayed if the current user is a superadmin. menu label: "Backup & Restore", priority: 14, if: &__MODULE__.is_superadmin/1 This example disables the menu item: menu false """ defmacro menu(opts) do quote do Module.put_attribute(__MODULE__, :menu, unquote(opts)) end end @doc """ Add query options to the Ecto queries. For the most part, use `query` to setup preload options. Query customization can be done for all pages, or individually specified. ## Examples Load the belongs_to :category, has_many :phone_numbers, and the has_many :groups for all pages for the resource. query do %{ all: [preload: [:category, :phone_numbers, :groups]], } end Load the has_many :contacts association, as well as the has_many :phone_numbers of the contact query do %{show: [preload: [contacts: [:phone_numbers]]] } end A more complicated example that defines a default preload, with a more specific preload for the show page. query do %{ all: [preload: [:group]], show: [preload: [:group, messages: [receiver: [:category, :phone_numbers]]]] } end Change the index page default sort order to ascending. query do %{index: [default_sort_order: :asc]} end Change the index page default sort field and order. query do %{index: [default_sort: [asc: :name]]} end Change the index page default sort field. query do %{index: [default_sort_field: :name]} end """ defmacro query(do: qry) do quote do Module.put_attribute(__MODULE__, :query, unquote(qry)) end end @doc """ Add a column to a table. Can be used on the index page, or in the table attributes on the show page. A number of options are valid: * `label` - Change the name of the column heading * `fields` - Add the fields to be included in an association * `link` - Set to true to add a link to an association * `fn/1` - An anonymous function to be called to render the field * `collection` - Add the collection for a belongs_to association """ defmacro column(name, opts \\ [], fun \\ nil) do quote do opts = ExAdmin.DslUtils.fun_to_opts(unquote(opts), unquote(fun)) var!(columns, ExAdmin.Show) = [{unquote(name), opts} | var!(columns, ExAdmin.Show)] end end @doc """ Drag&drop control for sortable tables. `fa_icon_name` is one of [Font Awesome icons](https://fortawesome.github.io/Font-Awesome/icons/), default - ["bars"](http://fortawesome.github.io/Font-Awesome/icon/bars/) """ defmacro sort_handle_column(fa_icon_name \\ "bars") do quote do column("", [], fn _ -> i("", class: "fa fa-#{unquote(fa_icon_name)} handle", "aria-hidden": "true") end) end end @doc """ Add a row to the attributes table on the show page. See `column/3` for a list of options. """ defmacro row(name, opts \\ [], fun \\ nil) do quote do opts = ExAdmin.DslUtils.fun_to_opts(unquote(opts), unquote(fun)) var!(rows, ExAdmin.Show) = [{unquote(name), opts} | var!(rows, ExAdmin.Show)] end end @doc """ Add a link to a path """ defmacro link_to(name, path, opts \\ quote(do: [])) do quote do opts = Keyword.merge([to: unquote(path)], unquote(opts)) Phoenix.HTML.Link.link("#{unquote(name)}", opts) end end @doc false # Note: `actions/2` has been deprecated. Please use `action_items/1` instead defmacro actions(:all, opts \\ quote(do: [])) do require Logger Logger.warn("actions/2 has been deprecated. Please use action_items/1 instead") quote do opts = unquote(opts) Module.put_attribute(__MODULE__, :actions, unquote(opts)) end end @doc """ Define which actions will be displayed. Action labels could be overriden with `labels` option. ## Examples action_items except: [:new, :delete, :edit] action_items only: [:new] action_items labels: [delete: "Revoke"] Notes: * this replaces the deprecated `actions/2` macro * `action_items` macro will not remove any custom actions defined by the `action_item` macro. """ defmacro action_items(opts \\ nil) do quote do opts = unquote(opts) Module.put_attribute(__MODULE__, :actions, unquote(opts)) end end @doc """ Add an id based action and show page link. Member actions are those actions that act on an individual record in the database. ## Examples The following example illustrates how to add a restore action to a backup and restore page. member_action :restore, &__MODULE__.restore_action/2 ... def restore_action(conn, params) do case BackupRestore.restore Repo.get(BackupRestore, params[:id]) do {:ok, filename} -> Controller.put_flash(conn, :notice, "Restore \#{filename} complete.") {:error, message} -> Controller.put_flash(conn, :error, "Restore Failed: \#{message}.") end |> Controller.redirect(to: ExAdmin.Utils.admin_resource_path(conn, :index)) end The above example adds the following: * a custom `restore` action to the controller, accessible by the route /admin/:resource/:id/member/restore * a "Restore" action link to the show page ## Options * an optional label: "Button Label" """ defmacro member_action(name, fun, opts \\ []) do quote do Module.put_attribute( __MODULE__, :member_actions, {unquote(name), [fun: unquote(fun), opts: unquote(opts)]} ) end end @doc """ Add a action that acts on a collection and adds a link to the index page. ## Examples The following example shows how to add a backup action on the index page. collection_action :backup, &__MODULE__.backup_action/2, label: "Backup Database!" def backup_action(conn, _params) do Repo.insert %BackupRestore{} Controller.put_flash(conn, :notice, "Backup complete.") |> Controller.redirect(to: ExAdmin.Utils.admin_resource_path(conn, :index)) end The above example adds the following: * a custom `backup` action to the controller, accessible by the route /admin/:resource/collection/backup * a "Backup Database!" action link to the show page ## Options * an optional label: "Button Label" (shown above) """ defmacro collection_action(name, fun, opts \\ []) do quote do Module.put_attribute( __MODULE__, :collection_actions, {unquote(name), [fun: unquote(fun), opts: unquote(opts)]} ) end end @doc """ Clear the default [:edit, :show, :new, :delete] action items. Can be used alone, or followed with `action_item` to add custom actions. """ defmacro clear_action_items! do quote do Module.delete_attribute(__MODULE__, :actions) Module.register_attribute(__MODULE__, :actions, accumulate: true, persist: true) end end @doc """ Add a custom action button to the page. ## Examples The following example demonstrates how to add a custom button to your index page, with no other action buttons due to the `clear_action_items!` call. clear_action_items! action_item :index, fn -> action_item_link "Something Special", href: "/my/custom/route" end An example of adding a link to the show page action_item :show, fn id -> action_item_link "Show Link", href: "/custom/link", "data-method": :put, id: id end """ defmacro action_item(opts, fun) do fun = Macro.escape(fun, unquote: true) quote do Module.put_attribute(__MODULE__, :actions, {unquote(opts), unquote(fun)}) end end @doc """ Customize the filter pages on the right side of the index page. ## Examples Disable the filter view: filter false Only show index columns and filters for the specified fields: filter [:name, :email, :inserted_at] filter [:name, :inserted_at, email: [label: "EMail Address"]] filter [:name, :inserted_at, posts: [order_by: [asc: :name]]] Note: Restricting fields with the `filter` macro also removes the field columns from the default index table. """ defmacro filter(disable) when disable in [nil, false] do quote do Module.put_attribute(__MODULE__, :index_filters, false) end end defmacro filter(fields) when is_list(fields) do quote do Module.put_attribute(__MODULE__, :index_filters, unquote(fields)) end end defmacro filter(field, opts \\ quote(do: [])) do quote do Module.put_attribute(__MODULE__, :index_filters, {unquote(field), unquote(opts)}) end end @doc """ Disable the batch_actions button the index page. ## Examples batch_actions false """ defmacro batch_actions(false) do quote do Module.put_attribute(__MODULE__, :batch_actions, false) end end @doc false def build_query_association(module, field) do case module.__schema__(:association, field) do %Ecto.Association.BelongsTo{cardinality: :one} -> field %Ecto.Association.Has{cardinality: :many} -> check_preload(field, :preload_many) _ -> nil end end defp check_preload(field, key) do if Application.get_env(:ex_admin, key, true) do field else nil end end end
29.260453
99
0.636361
f790e2e4dd6d2932874a7e26c24caea417477a01
8,782
exs
Elixir
test/oban/testing_test.exs
chrismo/oban
f912ccf75a1d89e02229041d578f9263d4de0232
[ "Apache-2.0" ]
null
null
null
test/oban/testing_test.exs
chrismo/oban
f912ccf75a1d89e02229041d578f9263d4de0232
[ "Apache-2.0" ]
null
null
null
test/oban/testing_test.exs
chrismo/oban
f912ccf75a1d89e02229041d578f9263d4de0232
[ "Apache-2.0" ]
null
null
null
defmodule Oban.TestingTest do use Oban.Case use Oban.Testing, repo: Oban.Test.Repo alias Oban.TelemetryHandler @moduletag :integration defmodule InvalidWorker do def perform(_), do: :ok end defmodule OverriddenWorker do use Oban.Worker @impl Worker def new({key, val}, opts) do super(%{key => val}, opts) end @impl Worker def perform(%{args: args}) do {:ok, args} end end defmodule MyApp.Worker do defmacro __using__(_opts) do quote do @behaviour unquote(__MODULE__) end end @callback process() :: :ok end defmodule DoubleBehaviourWorker do use MyApp.Worker use Oban.Worker @impl Oban.Worker def perform(_job), do: :ok @impl MyApp.Worker def process, do: :ok end defmodule MisbehavedWorker do use Oban.Worker @impl Oban.Worker def perform(%{args: %{"action" => "bad_atom"}}), do: :bad def perform(%{args: %{"action" => "bad_string"}}), do: "bad" def perform(%{args: %{"action" => "bad_error"}}), do: :error def perform(%{args: %{"action" => "bad_tuple"}}), do: {:ok, "bad", :bad} def perform(%{args: %{"action" => "bad_snooze"}}), do: {:snooze, true} def perform(%{args: %{"action" => "bad_code"}}), do: raise(RuntimeError, "bad") def perform(%{args: %{"action" => "bad_timing"}}), do: Process.sleep(10) @impl Oban.Worker def timeout(%{args: %{"timeout" => timeout}}), do: timeout def timeout(_job), do: :infinity end defmodule AttemptDrivenWorker do use Oban.Worker @impl Oban.Worker def perform(%{attempt: attempt}) do {:ok, attempt} end end describe "perform_job/3" do test "verifying that the worker implements the Oban.Worker behaviour" do message = "worker to be a module that implements" assert_perform_error(BogusWorker, message) assert_perform_error(InvalidWorker, message) :ok = perform_job(DoubleBehaviourWorker, %{}) end test "creating a valid job out of the args and options" do assert_perform_error(Worker, %{}, [max_attempts: -1], "args and opts to build a valid job") assert_perform_error( Worker, %{}, [max_attempts: -1], "max_attempts: must be greater than 0" ) assert_perform_error(Worker, %{}, [priority: -1], "priority: must be greater than -1") end test "passing non-map args through to an overridden new/2 function" do {:ok, %{"id" => 1}} = perform_job(OverriddenWorker, {:id, 1}) end test "validating the return value of the worker's perform/1 function" do assert_perform_error(MisbehavedWorker, %{"action" => "bad_atom"}, ":bad") message = "Expected result to be one of" actions = ["bad_string", "bad_error", "bad_tuple", "bad_snooze"] for action <- actions do assert_perform_error(MisbehavedWorker, %{"action" => action}, message) end end test "returning the value of worker's perform/1 function" do assert :ok = perform_job(Worker, %{ref: 1, action: "OK"}) assert :discard = perform_job(Worker, %{ref: 1, action: "DISCARD"}) assert {:error, _} = perform_job(Worker, %{ref: 1, action: "ERROR"}) end test "not rescuing unhandled exceptions" do assert_raise RuntimeError, fn -> perform_job(MisbehavedWorker, %{"action" => "bad_code"}) end assert_raise RuntimeError, fn -> perform_job(MisbehavedWorker, %{"action" => "bad_code", "timeout" => 20}) end end test "respecting a worker's timeout" do Process.flag(:trap_exit, true) perform_job(MisbehavedWorker, %{"action" => "bad_timing", "timeout" => 1}) assert_receive {:EXIT, _pid, %Oban.TimeoutError{}} perform_job(MisbehavedWorker, %{"action" => "bad_timing", "timeout" => 20}) refute_receive {:EXIT, _pid, %Oban.TimeoutError{}} end test "defaulting the number of attempts to mimic real execution" do assert {:ok, 1} = perform_job(AttemptDrivenWorker, %{}) assert {:ok, 2} = perform_job(AttemptDrivenWorker, %{}, attempt: 2) end test "emitting appropriate telemetry events" do TelemetryHandler.attach_events("perform-job-handler") assert :ok = perform_job(Worker, %{ref: 1, action: "OK"}) assert_receive {:event, :start, _measurements, %{args: args} = _meta} assert %{"action" => "OK", "ref" => 1} = args assert_receive {:event, :stop, _measurements, %{args: args} = _meta} assert %{"action" => "OK", "ref" => 1} = args after :telemetry.detach("perform-job-handler") end end describe "all_enqueued/0,1" do test "retrieving a filtered list of enqueued jobs" do insert!(%{id: 1, ref: "a"}, worker: Ping, queue: :alpha) insert!(%{id: 2, ref: "b"}, worker: Ping, queue: :alpha) insert!(%{id: 3, ref: "c"}, worker: Pong, queue: :gamma) assert [%{args: %{"id" => 2}} | _] = all_enqueued(worker: Ping) assert [%Job{}] = all_enqueued(worker: Pong, queue: :gamma) assert [%Job{}, %Job{}, %Job{}] = all_enqueued() end end describe "assert_enqueued/1" do test "checking for jobs with matching properties" do insert!(%{id: 1}, worker: Ping, queue: :alpha) insert!(%{id: 2}, worker: Pong, queue: :gamma) insert!(%{message: "hello"}, worker: Pong, queue: :gamma) assert_enqueued worker: Ping assert_enqueued worker: Ping, queue: :alpha assert_enqueued worker: Ping, queue: :alpha, args: %{id: 1} assert_enqueued worker: Pong assert_enqueued worker: "Pong", queue: "gamma" assert_enqueued worker: "Pong", queue: "gamma", args: %{message: "hello"} assert_enqueued args: %{id: 1} assert_enqueued args: %{message: "hello"} assert_enqueued worker: Ping, prefix: "public" end test "checking for jobs with matching timestamps with delta" do insert!(%{}, worker: Ping, scheduled_at: seconds_from_now(60)) assert_enqueued worker: Ping, scheduled_at: seconds_from_now(60) end test "checking for jobs allows to configure timestamp delta" do insert!(%{}, worker: Ping, scheduled_at: seconds_from_now(60)) assert_enqueued worker: Ping, scheduled_at: {seconds_from_now(69), delta: 10} end test "asserting that jobs are now or will eventually be enqueued" do insert!(%{id: 1}, worker: Ping, queue: :alpha) Task.async(fn -> Process.sleep(50) insert!(%{id: 2}, worker: Pong, queue: :alpha) end) assert_enqueued [worker: Pong, args: %{id: 2}], 100 assert_enqueued [worker: Ping, args: %{id: 1}], 100 end test "printing a helpful error message" do insert!(%{dest: "some_node"}, worker: Ping) try do assert_enqueued worker: Ping, args: %{dest: "other_node"} rescue error in [ExUnit.AssertionError] -> expected = """ Expected a job matching: %{args: %{dest: "other_node"}, worker: Ping} to be enqueued in the "public" schema. Instead found: [%{args: %{"dest" => "some_node"}, worker: "Ping"}] """ assert error.message == expected end end end describe "refute_enqueued/1" do test "refuting jobs with specific properties have been enqueued" do insert!(%{id: 1}, worker: Ping, queue: :alpha) insert!(%{id: 2}, worker: Pong, queue: :gamma) insert!(%{id: 3}, worker: Pong, queue: :gamma, state: "completed") insert!(%{id: 4}, worker: Pong, queue: :gamma, state: "discarded") insert!(%{message: "hello"}, worker: Pong, queue: :gamma) refute_enqueued worker: Pongo refute_enqueued worker: Ping, args: %{id: 2} refute_enqueued worker: Pong, args: %{id: 3} refute_enqueued worker: Pong, args: %{id: 4} refute_enqueued worker: Ping, queue: :gamma refute_enqueued worker: Pong, queue: :gamma, args: %{message: "helo"} refute_enqueued worker: Ping, prefix: "private" end test "refuting that jobs will eventually be enqueued" do Task.async(fn -> Process.sleep(50) insert!(%{id: 1}, worker: Ping, queue: :alpha) end) refute_enqueued [worker: Ping, args: %{id: 1}], 20 end end defp assert_perform_error(worker, message) when is_binary(message) do assert_perform_error(worker, %{}, [], message) end defp assert_perform_error(worker, args, message) when is_binary(message) do assert_perform_error(worker, args, [], message) end defp assert_perform_error(worker, args, opts, message) do perform_job(worker, args, opts) assert false, "This should not be reached" rescue error in [ExUnit.AssertionError] -> assert error.message =~ message end end
31.476703
97
0.628672
f790ec2ffce39b0fbce65c36f20e5dfe669465a7
662
ex
Elixir
lib/visualixir/node_monitor.ex
markhu53/visualixir
80ce8b75fe33476fe7d17110ffc0271f71bc1f91
[ "MIT" ]
1,311
2015-11-08T02:21:41.000Z
2022-03-25T17:32:57.000Z
lib/visualixir/node_monitor.ex
markhu53/visualixir
80ce8b75fe33476fe7d17110ffc0271f71bc1f91
[ "MIT" ]
30
2015-11-09T17:07:50.000Z
2022-02-10T17:13:50.000Z
lib/visualixir/node_monitor.ex
markhu53/visualixir
80ce8b75fe33476fe7d17110ffc0271f71bc1f91
[ "MIT" ]
58
2015-11-08T16:50:00.000Z
2022-03-30T13:03:18.000Z
defmodule Visualixir.NodeMonitor do use GenServer require Logger alias VisualixirWeb.TraceChannel def start_link do GenServer.start_link(__MODULE__, []) end def init([]) do :ok = :net_kernel.monitor_nodes(true) {:ok, nil} end def handle_info({:nodeup, node}, state) do Logger.info "[Visualixir] Connection to #{node} established." VisualixirWeb.NodesChannel.refresh() {:noreply, state} end def handle_info({:nodedown, node}, state) do Logger.warn "[Visualixir] Lost connection to #{node}..." TraceChannel.announce_cleanup(node) VisualixirWeb.NodesChannel.refresh() {:noreply, state} end end
20.060606
65
0.694864
f791080a0881b4fe793604c84718e883a1b52e8a
709
ex
Elixir
lib/web_demo_web/gettext.ex
pankaryp/Phoenix-server-demo
7a028bb14ad58400e0215281e09627e168cf5792
[ "MIT" ]
null
null
null
lib/web_demo_web/gettext.ex
pankaryp/Phoenix-server-demo
7a028bb14ad58400e0215281e09627e168cf5792
[ "MIT" ]
null
null
null
lib/web_demo_web/gettext.ex
pankaryp/Phoenix-server-demo
7a028bb14ad58400e0215281e09627e168cf5792
[ "MIT" ]
null
null
null
defmodule WebDemoWeb.Gettext do @moduledoc """ A module providing Internationalization with a gettext-based API. By using [Gettext](https://hexdocs.pm/gettext), your module gains a set of macros for translations, for example: import WebDemoWeb.Gettext # Simple translation gettext "Here is the string to translate" # Plural translation ngettext "Here is the string to translate", "Here are the strings to translate", 3 # Domain-based translation dgettext "errors", "Here is the error message to translate" See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage. """ use Gettext, otp_app: :web_demo end
28.36
72
0.681241
f791377180767ac8d13ce9bce3aecf8a091ce98b
876
ex
Elixir
test/support/conn_case.ex
agleb/usho
e5971569c67e53049c8ce3ccf24d52df7e185026
[ "MIT" ]
null
null
null
test/support/conn_case.ex
agleb/usho
e5971569c67e53049c8ce3ccf24d52df7e185026
[ "MIT" ]
null
null
null
test/support/conn_case.ex
agleb/usho
e5971569c67e53049c8ce3ccf24d52df7e185026
[ "MIT" ]
null
null
null
defmodule UshoWeb.ConnCase do @moduledoc """ This module defines the test case to be used by tests that require setting up a connection. Such tests rely on `Phoenix.ConnTest` and also import other functionality to make it easier to build common data structures and query the data layer. Finally, if the test case interacts with the database, it cannot be async. For this reason, every test runs inside a transaction which is reset at the beginning of the test unless the test case is marked as async. """ use ExUnit.CaseTemplate using do quote do # Import conveniences for testing with connections use Phoenix.ConnTest alias UshoWeb.Router.Helpers, as: Routes # The default endpoint for testing @endpoint UshoWeb.Endpoint end end setup _tags do {:ok, conn: Phoenix.ConnTest.build_conn()} end end
26.545455
59
0.726027
f7914967dbd9b9bf5e25fe402b0acf067d84a839
9,719
exs
Elixir
test/vintage_net/interfaces_monitor_test.exs
nerves-networking/vintage_net
8d4251a0ec995babf8f4d7aa7cc1d74b70646c72
[ "Apache-2.0" ]
85
2019-05-09T14:54:38.000Z
2022-02-08T16:52:04.000Z
test/vintage_net/interfaces_monitor_test.exs
fhunleth/vintage_net
215495533cb642eeb172daba08208a454f19b36f
[ "Apache-2.0" ]
132
2019-05-09T15:57:59.000Z
2022-02-28T16:31:22.000Z
test/vintage_net/interfaces_monitor_test.exs
fhunleth/vintage_net
215495533cb642eeb172daba08208a454f19b36f
[ "Apache-2.0" ]
14
2019-07-08T19:18:23.000Z
2022-02-08T16:52:05.000Z
defmodule VintageNet.InterfacesMonitorTest do use ExUnit.Case import ExUnit.CaptureLog alias VintageNet.InterfacesMonitor doctest InterfacesMonitor setup do # Capture Application exited logs capture_log(fn -> Application.stop(:vintage_net) Application.start(:vintage_net) end) :ok end @tag :requires_interfaces_monitor test "interfaces known to :inet are in property table" do names = get_interfaces() # Avoid race on CircleCI Process.sleep(10) for name <- names do assert true == VintageNet.get(["interface", name, "present"]) end end test "adding and removing links" do VintageNet.subscribe(["interface", "bogus0", "present"]) send_report({:newlink, "bogus0", 56, %{}}) assert_receive {VintageNet, ["interface", "bogus0", "present"], nil, true, %{}} send_report({:dellink, "bogus0", 56, %{}}) assert_receive {VintageNet, ["interface", "bogus0", "present"], true, nil, %{}} end test "renaming links" do VintageNet.subscribe(["interface", "bogus0", "present"]) VintageNet.subscribe(["interface", "bogus2", "present"]) send_report({:newlink, "bogus0", 56, %{}}) assert_receive {VintageNet, ["interface", "bogus0", "present"], nil, true, %{}} send_report({:newlink, "bogus2", 56, %{}}) assert_receive {VintageNet, ["interface", "bogus0", "present"], true, nil, %{}} assert_receive {VintageNet, ["interface", "bogus2", "present"], nil, true, %{}} end test "link fields show up as properties" do # When adding support for fields, remember to add them to the docs fields = [{"present", true}, {"lower_up", true}, {"mac_address", "70:85:c2:8f:98:e1"}] for {field, _expected} <- fields do VintageNet.subscribe(["interface", "bogus0", field]) end # The current report from C has the following fields, but not all are exposed to Elixir. send_report( {:newlink, "bogus0", 56, %{ broadcast: true, lower_up: true, mac_address: "70:85:c2:8f:98:e1", mac_broadcast: "ff:ff:ff:ff:ff:ff", mtu: 1500, multicast: true, operstate: :down, running: false, stats: %{ collisions: 0, multicast: 0, rx_bytes: 0, rx_dropped: 0, rx_errors: 0, rx_packets: 0, tx_bytes: 0, tx_dropped: 0, tx_errors: 0, tx_packets: 0 }, type: :ethernet, up: true }} ) for {field, expected} <- fields do assert_receive {VintageNet, ["interface", "bogus0", ^field], nil, ^expected, %{}} end end test "ipv4 addresses get reported" do VintageNet.subscribe(["interface", "bogus0", "addresses"]) send_report({:newlink, "bogus0", 56, %{}}) send_report( {:newaddr, 56, %{ address: {192, 168, 9, 5}, family: :inet, label: "bogus0", local: {192, 168, 9, 5}, permanent: false, prefixlen: 24, scope: :universe }} ) expected_address_info = %{ family: :inet, scope: :universe, address: {192, 168, 9, 5}, netmask: {255, 255, 255, 0}, prefix_length: 24 } assert_receive {VintageNet, ["interface", "bogus0", "addresses"], _before, [^expected_address_info], %{}} # Send a second IP address send_report( {:newaddr, 56, %{ address: {192, 168, 10, 10}, family: :inet, label: "bogus0", local: {192, 168, 10, 10}, permanent: false, prefixlen: 24, scope: :universe }} ) assert_receive {VintageNet, ["interface", "bogus0", "addresses"], _before, [ %{ family: :inet, scope: :universe, address: {192, 168, 10, 10}, netmask: {255, 255, 255, 0}, prefix_length: 24 }, %{ family: :inet, scope: :universe, address: {192, 168, 9, 5}, netmask: {255, 255, 255, 0}, prefix_length: 24 } ], %{}} # Remove an address send_report( {:deladdr, 56, %{ address: {192, 168, 10, 10}, family: :inet, label: "bogus0", local: {192, 168, 10, 10}, permanent: false, prefixlen: 24, scope: :universe }} ) expected_address_info = %{ family: :inet, scope: :universe, address: {192, 168, 9, 5}, netmask: {255, 255, 255, 0}, prefix_length: 24 } assert_receive {VintageNet, ["interface", "bogus0", "addresses"], _before, [^expected_address_info], %{}} end test "ipv4 ppp address gets reported correctly" do VintageNet.subscribe(["interface", "bogus0", "addresses"]) send_report({:newlink, "bogus0", 56, %{}}) send_report( {:newaddr, 56, %{ address: {10, 64, 64, 64}, family: :inet, label: "bogus0", local: {10, 0, 95, 181}, permanent: true, prefixlen: 32, scope: :universe }} ) expected_address_info = %{ family: :inet, scope: :universe, address: {10, 0, 95, 181}, netmask: {255, 255, 255, 255}, prefix_length: 32 } assert_receive {VintageNet, ["interface", "bogus0", "addresses"], _before, [^expected_address_info], %{}} end test "ipv6 addresses get reported" do VintageNet.subscribe(["interface", "bogus0", "addresses"]) send_report({:newlink, "bogus0", 56, %{}}) send_report( {:newaddr, 56, %{ address: {65152, 0, 0, 0, 45461, 64234, 43649, 26057}, family: :inet6, permanent: true, prefixlen: 64, scope: :link }} ) expected_address_info = %{ family: :inet6, scope: :link, address: {65152, 0, 0, 0, 45461, 64234, 43649, 26057}, netmask: {65535, 65535, 65535, 65535, 0, 0, 0, 0}, prefix_length: 64 } assert_receive {VintageNet, ["interface", "bogus0", "addresses"], _before, [^expected_address_info], %{}} end test "address report beats link report" do # Check that the address report isn't lost if it arrives before # the initial link report VintageNet.subscribe(["interface", "bogus0", "addresses"]) send_report( {:newaddr, 56, %{ address: {192, 168, 9, 5}, family: :inet, label: "bogus0", local: {192, 168, 9, 5}, permanent: false, prefixlen: 24, scope: :universe }} ) assert VintageNet.get(["interface", "bogus0", "addresses"]) == nil send_report({:newlink, "bogus0", 56, %{}}) assert_receive {VintageNet, ["interface", "bogus0", "addresses"], _before, [ %{ family: :inet, scope: :universe, address: {192, 168, 9, 5}, netmask: {255, 255, 255, 0}, prefix_length: 24 } ], %{}} end test "address delete beats link delete" do # Check that if address removals are ignored if the link isn't around before_delete = VintageNet.get_by_prefix(["interface"]) send_report( {:deladdr, 56, %{ address: {192, 168, 9, 5}, family: :inet, label: "bogus0", local: {192, 168, 9, 5}, permanent: false, prefixlen: 24, scope: :universe }} ) after_delete = VintageNet.get_by_prefix(["interface"]) assert before_delete == after_delete end test "force clearing ipv4 addresses" do VintageNet.subscribe(["interface", "bogus0", "addresses"]) send_report({:newlink, "bogus0", 56, %{}}) send_report( {:newaddr, 56, %{ address: {192, 168, 9, 5}, family: :inet, label: "bogus0", local: {192, 168, 9, 5}, permanent: false, prefixlen: 24, scope: :universe }} ) send_report( {:newaddr, 56, %{ address: {192, 168, 10, 10}, family: :inet, label: "bogus0", local: {192, 168, 10, 10}, permanent: false, prefixlen: 24, scope: :universe }} ) # Clear out the mailbox for the above two reports (they're tested above) assert_receive {VintageNet, ["interface", "bogus0", "addresses"], nil, _one_address, %{}} assert_receive {VintageNet, ["interface", "bogus0", "addresses"], _one_address, _two_addresses, %{}} # The real test InterfacesMonitor.force_clear_ipv4_addresses("bogus0") assert_receive {VintageNet, ["interface", "bogus0", "addresses"], _two_addresses, [], %{}} # Nothing should happen this time InterfacesMonitor.force_clear_ipv4_addresses("bogus0") refute_receive {VintageNet, ["interface", "bogus0", "addresses"], _anything, _anything2, %{}} end defp get_interfaces() do {:ok, interface_infos} = :inet.getifaddrs() for {name, _info} <- interface_infos, do: to_string(name) end defp send_report(report) do # Simulate a report coming from C encoded_report = :erlang.term_to_binary(report) send(VintageNet.InterfacesMonitor, {:port, {:data, encoded_report}}) end end
27.377465
97
0.533697
f7914b5387f18524ee2bb944f8f060d226c443f2
1,274
exs
Elixir
malmo_elixir/new_try/led_demo/mix.exs
MathiasBeckius/ElixirNerves
19a0a9b9cecae1e44a0259326a91be623acba1b0
[ "MIT" ]
null
null
null
malmo_elixir/new_try/led_demo/mix.exs
MathiasBeckius/ElixirNerves
19a0a9b9cecae1e44a0259326a91be623acba1b0
[ "MIT" ]
null
null
null
malmo_elixir/new_try/led_demo/mix.exs
MathiasBeckius/ElixirNerves
19a0a9b9cecae1e44a0259326a91be623acba1b0
[ "MIT" ]
null
null
null
defmodule LedDemo.MixProject do use Mix.Project @all_targets [:bbb] def project do [ app: :led_demo, version: "0.1.0", elixir: "~> 1.8", archives: [nerves_bootstrap: "~> 1.5"], start_permanent: Mix.env() == :prod, build_embedded: true, aliases: [loadconfig: [&bootstrap/1]], deps: deps() ] end def bootstrap(args) do Application.start(:nerves_bootstrap) Mix.Task.run("loadconfig", args) end def application do [ mod: {LedDemo.Application, []}, extra_applications: [:logger, :runtime_tools] ] end defp deps do [ # Dependencies for all targets {:nerves, "~> 1.4", runtime: false}, {:shoehorn, "~> 0.4"}, {:ring_logger, "~> 0.6"}, {:toolshed, "~> 0.2"}, {:nerves_network, "~> 0.3"}, {:ui, path: "../ui"}, # Dependencies for all targets except :host {:nerves_runtime, "~> 0.6", targets: @all_targets}, {:nerves_init_gadget, "~> 0.4", targets: @all_targets}, {:nerves_leds, "~> 0.8.0", targets: @all_targets}, {:nerves_firmware_ssh, "~> 0.3", targets: @all_targets}, # Dependencies for specific targets {:nerves_system_bbb, "~> 2.0", runtime: false, targets: :bbb}, ] end end
24.5
68
0.568289
f7915a97b0748fa5493d23e6760d614ce15e8eed
672
ex
Elixir
lib/sanbase/comments/entity_modules/timeline_event_comment.ex
santiment/sanbase2
9ef6e2dd1e377744a6d2bba570ea6bd477a1db31
[ "MIT" ]
81
2017-11-20T01:20:22.000Z
2022-03-05T12:04:25.000Z
lib/sanbase/comments/entity_modules/timeline_event_comment.ex
rmoorman/sanbase2
226784ab43a24219e7332c49156b198d09a6dd85
[ "MIT" ]
359
2017-10-15T14:40:53.000Z
2022-01-25T13:34:20.000Z
lib/sanbase/comments/entity_modules/timeline_event_comment.ex
rmoorman/sanbase2
226784ab43a24219e7332c49156b198d09a6dd85
[ "MIT" ]
16
2017-11-19T13:57:40.000Z
2022-02-07T08:13:02.000Z
defmodule Sanbase.Comment.TimelineEventComment do @moduledoc ~s""" A mapping table connecting comments and timeline events. This module is used to create, update, delete and fetch timeline events comments. """ use Ecto.Schema import Ecto.Changeset schema "timeline_event_comments_mapping" do belongs_to(:comment, Sanbase.Comment) belongs_to(:timeline_event, Sanbase.Timeline.TimelineEvent) timestamps() end def changeset(%__MODULE__{} = mapping, attrs \\ %{}) do mapping |> cast(attrs, [:timeline_event_id, :comment_id]) |> validate_required([:timeline_event_id, :comment_id]) |> unique_constraint(:comment_id) end end
26.88
83
0.732143
f7919ffaa47fd1dd4c0f40664ca67d744d998468
2,292
ex
Elixir
test/support/live_views/events.ex
gaslight/live_element
78d4ab0a2daab470f2ffd25d446fbabb0d746afe
[ "MIT" ]
null
null
null
test/support/live_views/events.ex
gaslight/live_element
78d4ab0a2daab470f2ffd25d446fbabb0d746afe
[ "MIT" ]
null
null
null
test/support/live_views/events.ex
gaslight/live_element
78d4ab0a2daab470f2ffd25d446fbabb0d746afe
[ "MIT" ]
null
null
null
defmodule LiveElementTest.EventsLive do use LiveElement, namespace: LiveElementTest def render(assigns) do ~H""" count: <%= @count %> """ end def mount(_params, _session, socket) do {:ok, assign(socket, events: [], count: 0)} end def handle_event("reply", %{"count" => new_count, "reply" => reply}, socket) do {:reply, reply, assign(socket, :count, new_count)} end def handle_event("reply", %{"reply" => reply}, socket) do {:reply, reply, socket} end def handle_call({:run, func}, _, socket), do: func.(socket) def handle_info({:run, func}, socket), do: func.(socket) end defmodule LiveElementTest.EventsInMountLive do use LiveElement, namespace: LiveElementTest defmodule Child do use LiveElement, namespace: LiveElementTest def render(assigns) do ~H"hello!" end def mount(_params, _session, socket) do socket = if connected?(socket), do: push_event(socket, "child-mount", %{child: "bar"}), else: socket {:ok, socket} end end def render(assigns) do ~H"<%= live_render @socket, Child, id: :child_live %>" end def mount(_params, _session, socket) do socket = if connected?(socket), do: push_event(socket, "root-mount", %{root: "foo"}), else: socket {:ok, socket} end end defmodule LiveElementTest.EventsInComponentLive do use LiveElement, namespace: LiveElementTest defmodule Child do use Phoenix.LiveComponent def render(assigns) do ~H""" <button id="comp-reply" phx-click="reply" phx-target={@myself}> bump reply! </button> """ end def update(assigns, socket) do socket = if connected?(socket), do: push_event(socket, "component", %{count: assigns.count}), else: socket {:ok, socket} end def handle_event("reply", reply, socket) do {:reply, %{"comp-reply" => reply}, socket} end end def render(assigns) do ~H"<%= live_component Child, id: :child_live, count: @count %>" end def mount(_params, _session, socket) do {:ok, assign(socket, :count, 1)} end def handle_event("bump", _, socket) do {:noreply, update(socket, :count, &(&1 + 1))} end end
22.252427
81
0.610384
f791e6b4072addaabd02f8ccacf4f220d13c95ea
1,080
exs
Elixir
mix.exs
Awlexus/Stopsel
0d207a65e786a5a057aea76cd7fde72d22ddfd78
[ "MIT" ]
3
2019-06-13T18:17:42.000Z
2019-07-14T19:44:51.000Z
mix.exs
Awlexus/Stopsel
0d207a65e786a5a057aea76cd7fde72d22ddfd78
[ "MIT" ]
null
null
null
mix.exs
Awlexus/Stopsel
0d207a65e786a5a057aea76cd7fde72d22ddfd78
[ "MIT" ]
null
null
null
defmodule Stopsel.MixProject do use Mix.Project def project do [ app: :stopsel, version: "0.2.0", elixir: "~> 1.10", start_permanent: Mix.env() == :prod, deps: deps(), elixirc_paths: elixirc_paths(Mix.env()), package: package(), description: description(), source_url: "https://github.com/Awlexus/stopsel", docs: [ main: "Stopsel", extras: ["README.md"] ] ] end # Run "mix help compile.app" to learn about applications. def application do [ extra_applications: [:logger], mod: {Stopsel.Application, []} ] end # Run "mix help deps" to learn about dependencies. defp deps do [ {:ex_doc, "~> 0.23", only: :dev, runtime: false} ] end defp elixirc_paths(:test), do: ~w"lib test/support" defp elixirc_paths(_), do: ~w"lib" defp description() do "A platform independent text message router" end defp package() do [ licenses: ["MIT"], links: %{"GitHub" => "https://github.com/Awlexus/stopsel"} ] end end
21.176471
64
0.581481
f791f78c2eaee4ff95fc91102a8f2c6dc86b8aab
1,127
ex
Elixir
questions/updates/00070000-delete.ex
zachvalenta/pgexercises
b16d5c28bb5e21732e6fb39090fa5902d0016e78
[ "BSD-2-Clause" ]
null
null
null
questions/updates/00070000-delete.ex
zachvalenta/pgexercises
b16d5c28bb5e21732e6fb39090fa5902d0016e78
[ "BSD-2-Clause" ]
null
null
null
questions/updates/00070000-delete.ex
zachvalenta/pgexercises
b16d5c28bb5e21732e6fb39090fa5902d0016e78
[ "BSD-2-Clause" ]
1
2021-05-14T17:45:40.000Z
2021-05-14T17:45:40.000Z
|QUESTIONNAME| Delete all bookings |QUESTION| As part of a clearout of our database, we want to delete all bookings from the cd.bookings table. How can we accomplish this? |QUERY| delete from cd.bookings; |ANSWER| <p>The <c>DELETE</c> statement does what it says on the tin: deletes rows from the table. Here, we show the command in its simplest form, with no qualifiers. In this case, it deletes everything from the table. Obviously, you should be careful with your deletes and make sure they're always limited - we'll see how to do that in the next exercise. <p>An alternative to unqualified <c>DELETEs</c> is the following: <sql> truncate cd.bookings; </sql> <p><c>TRUNCATE</c> also deletes everything in the table, but does so using a quicker underlying mechanism. It's not <a href="https://www.postgresql.org/docs/9.6/static/mvcc-caveats.html">perfectly safe in all circumstances</a>, though, so use judiciously. When in doubt, use <c>DELETE</c>. |HINT| Take a look at the <c>DELETE</c> statement in the PostgreSQL docs. |SORTED| 1 |PAGEID| a281e531-33d4-4672-93d9-428982bdb75a |WRITEABLE| 1 |RETURNTABLE| cd.bookings
40.25
346
0.759539
f7920facadc2f85cd20292fa1992582f64be544a
184
ex
Elixir
web/controllers/page_controller.ex
harmon25/Reactpxbp
7b895eb507d23c005229a2dee1ed078cc8291230
[ "MIT" ]
null
null
null
web/controllers/page_controller.ex
harmon25/Reactpxbp
7b895eb507d23c005229a2dee1ed078cc8291230
[ "MIT" ]
null
null
null
web/controllers/page_controller.ex
harmon25/Reactpxbp
7b895eb507d23c005229a2dee1ed078cc8291230
[ "MIT" ]
null
null
null
defmodule Reactpxbp.PageController do use Reactpxbp.Web, :controller plug :put_layout, "app_layout.html" def main_app(conn, _params) do render conn, "app.html" end end
15.333333
37
0.728261
f7925a2aaf6a54363663bad897a1d4c4fe63088f
55,920
exs
Elixir
test/query_test.exs
enter-haken/postgrex
fb3438d4e6a56db81ddd0d578cdfc0484909c233
[ "Apache-2.0" ]
null
null
null
test/query_test.exs
enter-haken/postgrex
fb3438d4e6a56db81ddd0d578cdfc0484909c233
[ "Apache-2.0" ]
1
2020-05-26T02:38:50.000Z
2020-05-26T02:38:50.000Z
test/query_test.exs
enter-haken/postgrex
fb3438d4e6a56db81ddd0d578cdfc0484909c233
[ "Apache-2.0" ]
null
null
null
defmodule QueryTest do use ExUnit.Case, async: true import Postgrex.TestHelper import ExUnit.CaptureLog alias Postgrex, as: P setup context do opts = [ database: "postgrex_test", backoff_type: :stop, prepare: context[:prepare] || :named, max_restarts: 0 ] {:ok, pid} = P.start_link(opts) {:ok, [pid: pid, options: opts]} end test "iodata", context do assert [[123]] = query(["S", ?E, ["LEC" | "T"], " ", '123'], []) end test "decode basic types", context do assert [[nil]] = query("SELECT NULL", []) assert [[true, false]] = query("SELECT true, false", []) assert [["e"]] = query("SELECT 'e'::char", []) assert [["ẽ"]] = query("SELECT 'ẽ'::char", []) assert [[42]] = query("SELECT 42", []) assert [[42.0]] = query("SELECT 42::float", []) assert [[:NaN]] = query("SELECT 'NaN'::float", []) assert [[:inf]] = query("SELECT 'inf'::float", []) assert [[:"-inf"]] = query("SELECT '-inf'::float", []) assert [["ẽric"]] = query("SELECT 'ẽric'", []) assert [["ẽric"]] = query("SELECT 'ẽric'::varchar", []) assert [[<<1, 2, 3>>]] = query("SELECT '\\001\\002\\003'::bytea", []) end test "decode numeric", context do assert [[Decimal.new("42")]] == query("SELECT 42::numeric", []) assert [[Decimal.new("42.0000000000")]] == query("SELECT 42.0::numeric(100, 10)", []) assert [[Decimal.new("1.001")]] == query("SELECT 1.001", []) assert [[Decimal.new("0.4242")]] == query("SELECT 0.4242", []) assert [[Decimal.new("42.4242")]] == query("SELECT 42.4242", []) assert [[Decimal.new("12345.12345")]] == query("SELECT 12345.12345", []) assert [[Decimal.new("0.00012345")]] == query("SELECT 0.00012345", []) assert [[Decimal.new("1000000000.0")]] == query("SELECT 1000000000.0", []) assert [[Decimal.new("1000000000.1")]] == query("SELECT 1000000000.1", []) assert [[Decimal.new("123456789123456789123456789")]] == query("SELECT 123456789123456789123456789::numeric", []) assert [[Decimal.new("123456789123456789123456789.123456789")]] == query("SELECT 123456789123456789123456789.123456789", []) assert [[Decimal.new("1.1234500000")]] == query("SELECT 1.1234500000", []) assert [[Decimal.new("NaN")]] == query("SELECT 'NaN'::numeric", []) end @tag min_pg_version: "9.5" test "decode json/jsonb", context do assert [[%{"foo" => 42}]] == query("SELECT '{\"foo\": 42}'::json", []) assert [[%{"foo" => 42}]] == query("SELECT '{\"foo\": 42}'::jsonb", []) end test "decode uuid", context do uuid = <<160, 238, 188, 153, 156, 11, 78, 248, 187, 109, 107, 185, 189, 56, 10, 17>> assert [[^uuid]] = query("SELECT 'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11'::uuid", []) end test "decode arrays", context do assert [[[]]] = query("SELECT ARRAY[]::integer[]", []) assert [[[1]]] = query("SELECT ARRAY[1]", []) assert [[[1, 2]]] = query("SELECT ARRAY[1,2]", []) assert [[[[0], [1]]]] = query("SELECT ARRAY[[0],[1]]", []) assert [[[[0]]]] = query("SELECT ARRAY[ARRAY[0]]", []) end test "decode array domain", context do assert [[[1.0, 2.0, 3.0]]] = query("SELECT ARRAY[1, 2, 3]::floats_domain", []) assert [ [ [ %Postgrex.Point{x: 1.0, y: 1.0}, %Postgrex.Point{x: 2.0, y: 2.0}, %Postgrex.Point{x: 3.0, y: 3.0} ] ] ] = query("SELECT ARRAY[point '1,1', point '2,2', point '3,3']::points_domain", []) end test "encode array domain", context do floats = [1.0, 2.0, 3.0] floats_string = "{1,2,3}" assert [[^floats_string]] = query("SELECT $1::floats_domain::text", [floats]) points = [ %Postgrex.Point{x: 1.0, y: 1.0}, %Postgrex.Point{x: 2.0, y: 2.0}, %Postgrex.Point{x: 3.0, y: 3.0} ] points_string = "{\"(1,1)\",\"(2,2)\",\"(3,3)\"}" assert [[^points_string]] = query("SELECT $1::points_domain::text", [points]) end test "decode interval", context do assert [[%Postgrex.Interval{months: 0, days: 0, secs: 0, microsecs: 0}]] = query("SELECT interval '0'", []) assert [[%Postgrex.Interval{months: 100, days: 0, secs: 0, microsecs: 0}]] = query("SELECT interval '100 months'", []) assert [[%Postgrex.Interval{months: 0, days: 100, secs: 0, microsecs: 0}]] = query("SELECT interval '100 days'", []) assert [[%Postgrex.Interval{months: 0, days: 0, secs: 100, microsecs: 0}]] = query("SELECT interval '100 secs'", []) assert [[%Postgrex.Interval{months: 14, days: 40, secs: 10920, microsecs: 0}]] = query("SELECT interval '1 year 2 months 40 days 3 hours 2 minutes'", []) assert [[%Postgrex.Interval{months: 0, days: 0, secs: 53, microsecs: 204_800}]] = query("SELECT interval '53 secs 204800 microseconds'", []) assert [[%Postgrex.Interval{months: 0, days: 0, secs: 10, microsecs: 240_000}]] = query("SELECT interval '10240000 microseconds'", []) end test "decode point", context do assert [[%Postgrex.Point{x: -97.5, y: 100.1}]] == query("SELECT point(-97.5, 100.1)::point", []) end test "encode point", context do assert [[%Postgrex.Point{x: -97.0, y: 100.0}]] == query("SELECT $1::point", [%Postgrex.Point{x: -97, y: 100}]) end test "decode polygon", context do p1 = %Postgrex.Point{x: 100.0, y: 101.5} p2 = %Postgrex.Point{x: 100.0, y: -99.1} p3 = %Postgrex.Point{x: -91.1, y: -101.1} p4 = %Postgrex.Point{x: -100.0, y: 99.9} polygon = %Postgrex.Polygon{vertices: [p1, p2, p3, p4]} polystring = "((100.0,101.5),(100.0,-99.1),(-91.1,-101.1),(-100.0,99.9))" assert [[polygon]] == query("SELECT '#{polystring}'" <> "::polygon", []) end test "encode polygon", context do p1 = %Postgrex.Point{x: 100.0, y: 101.5} p2 = %Postgrex.Point{x: 100.0, y: -99.1} p3 = %Postgrex.Point{x: -91.1, y: -101.1} p4 = %Postgrex.Point{x: -100.0, y: 99.9} polygon = %Postgrex.Polygon{vertices: [p1, p2, p3, p4]} assert [[polygon]] == query("SELECT $1::polygon", [polygon]) assert %DBConnection.EncodeError{} = catch_error(query("SELECT $1::polygon", [1])) bad_polygon = %Postgrex.Polygon{vertices: ["x"]} assert %DBConnection.EncodeError{} = catch_error(query("SELECT $1::polygon", [bad_polygon])) end @tag min_pg_version: "9.4" test "decode line", context do # 98.6x - y = 0 <=> y = 98.6x line = %Postgrex.Line{a: 98.6, b: -1.0, c: 0.0} assert [[line]] == query("SELECT '{98.6,-1.0,0.0}'::line", []) assert [[line]] == query("SELECT '(0.0,0.0),(1.0,98.6)'::line", []) end @tag min_pg_version: "9.4" test "encode line", context do # 98.6x - y = 0 <=> y = 98.6x line = %Postgrex.Line{a: 98.6, b: -1.0, c: 0.0} assert [[line]] == query("SELECT $1::line", [line]) assert %DBConnection.EncodeError{} = catch_error(query("SELECT $1::line", ["foo"])) bad_line = %Postgrex.Line{a: nil, b: "foo"} assert %DBConnection.EncodeError{} = catch_error(query("SELECT $1::line", [bad_line])) end test "decode line segment", context do segment = %Postgrex.LineSegment{ point1: %Postgrex.Point{x: 0.0, y: 0.0}, point2: %Postgrex.Point{x: 1.0, y: 1.0} } assert [[segment]] == query("SELECT '(0.0,0.0)(1.0,1.0)'::lseg", []) end test "encode line segment", context do segment = %Postgrex.LineSegment{ point1: %Postgrex.Point{x: 0.0, y: 0.0}, point2: %Postgrex.Point{x: 1.0, y: 1.0} } assert [[segment]] == query("SELECT $1::lseg", [segment]) assert %DBConnection.EncodeError{} = catch_error(query("SELECT $1::lseg", [1.0])) assert %DBConnection.EncodeError{} = catch_error(query("SELECT $1::lseg", [%Postgrex.LineSegment{}])) end test "decode box", context do box = %Postgrex.Box{ upper_right: %Postgrex.Point{x: 1.0, y: 1.0}, bottom_left: %Postgrex.Point{x: 0.0, y: 0.0} } # postgres automatically sorts the points so that we get UR/BL assert [[box]] == query("SELECT '(0.0,0.0)(1.0,1.0)'::box", []) assert [[box]] == query("SELECT '(1.0,1.0)(0.0,0.0)'::box", []) assert [[box]] == query("SELECT '(1.0,0.0)(0.0,1.0)'::box", []) assert [[box]] == query("SELECT '(0.0,1.0)(1.0,0.0)'::box", []) end test "encode box", context do box = %Postgrex.Box{ upper_right: %Postgrex.Point{x: 1.0, y: 1.0}, bottom_left: %Postgrex.Point{x: 0.0, y: 0.0} } assert [[box]] == query("SELECT $1::box", [box]) assert %DBConnection.EncodeError{} = catch_error(query("SELECT $1::box", [1.0])) assert %DBConnection.EncodeError{} = catch_error(query("SELECT $1::box", [%Postgrex.Box{}])) end test "decode path", context do p1 = %Postgrex.Point{x: 0.0, y: 0.0} p2 = %Postgrex.Point{x: 1.0, y: 3.0} p3 = %Postgrex.Point{x: -4.0, y: 3.14} path = %Postgrex.Path{points: [p1, p2, p3], open: true} assert [[path]] == query("SELECT '[(0.0,0.0),(1.0,3.0),(-4.0,3.14)]'::path", []) assert [[%{path | open: false}]] == query("SELECT '((0.0,0.0),(1.0,3.0),(-4.0,3.14))'::path", []) assert %DBConnection.EncodeError{} = catch_error(query("SELECT $1::path", [1.0])) bad_path = %Postgrex.Path{points: "foo", open: false} assert %DBConnection.EncodeError{} = catch_error(query("SELECT $1::path", [bad_path])) # open must be true/false bad_path = %Postgrex.Path{points: []} assert %DBConnection.EncodeError{} = catch_error(query("SELECT $1::path", [bad_path])) end test "encode path", context do p1 = %Postgrex.Point{x: 0.0, y: 0.0} p2 = %Postgrex.Point{x: 1.0, y: 3.0} p3 = %Postgrex.Point{x: -4.0, y: 3.14} path = %Postgrex.Path{points: [p1, p2, p3], open: false} assert [[path]] == query("SELECT $1::path", [path]) end test "decode circle", context do center = %Postgrex.Point{x: 1.0, y: -3.5} circle = %Postgrex.Circle{center: center, radius: 100.0} assert [[circle]] == query("SELECT '<(1.0,-3.5),100.0>'::circle", []) end test "encode circle", context do center = %Postgrex.Point{x: 1.0, y: -3.5} circle = %Postgrex.Circle{center: center, radius: 100.0} assert [[circle]] == query("SELECT $1::circle", [circle]) assert %DBConnection.EncodeError{} = catch_error(query("SELECT $1::path", ["snu"])) bad_circle = %Postgrex.Circle{center: 1.5, radius: 1.0} assert %DBConnection.EncodeError{} = catch_error(query("SELECT $1::path", [bad_circle])) bad_circle = %Postgrex.Circle{center: %Postgrex.Point{x: 1.0, y: 0.0}, radius: "five"} assert %DBConnection.EncodeError{} = catch_error(query("SELECT $1::path", [bad_circle])) end test "decode name", context do assert [["test"]] == query("SELECT 'test'::name", []) end test "encode name", context do assert [["test"]] == query("SELECT $1::name", ["test"]) end test "decode \"char\"", context do assert [["X"]] == query("SELECT 'X'::\"char\"", []) end test "encode \"char\"", context do assert [["x"]] == query("SELECT $1::\"char\"", ["x"]) end @tag :capture_log test "decode record", context do assert [[{1, "2"}]] = query("SELECT (1, '2')::composite1", []) assert [[[{1, "2"}]]] = query("SELECT ARRAY[(1, '2')::composite1]", []) end test "decode enum", context do assert [["elixir"]] = query("SELECT 'elixir'::enum1", []) end @tag min_pg_version: "9.2" test "decode range", context do assert [[%Postgrex.Range{lower: 2, upper: 5, lower_inclusive: true, upper_inclusive: false}]] = query("SELECT '(1,5)'::int4range", []) assert [[%Postgrex.Range{lower: 1, upper: 7, lower_inclusive: true, upper_inclusive: false}]] = query("SELECT '[1,6]'::int4range", []) assert [ [ %Postgrex.Range{ lower: :unbound, upper: 5, lower_inclusive: false, upper_inclusive: false } ] ] = query("SELECT '(,5)'::int4range", []) assert [ [ %Postgrex.Range{ lower: 1, upper: :unbound, lower_inclusive: true, upper_inclusive: false } ] ] = query("SELECT '[1,)'::int4range", []) assert [ [ %Postgrex.Range{ lower: :empty, upper: :empty, lower_inclusive: false, upper_inclusive: false } ] ] = query("SELECT '(1,1)'::int4range", []) assert [[%Postgrex.Range{lower: 1, upper: 2, lower_inclusive: true, upper_inclusive: false}]] = query("SELECT '[1,1]'::int4range", []) assert [ [ %Postgrex.Range{ lower: :unbound, upper: :unbound, lower_inclusive: false, upper_inclusive: false } ] ] = query("SELECT '(,)'::int4range", []) assert [ [ %Postgrex.Range{ lower: :unbound, upper: :unbound, lower_inclusive: false, upper_inclusive: false } ] ] = query("SELECT '[,]'::int4range", []) assert [[%Postgrex.Range{lower: 3, upper: 8, lower_inclusive: true, upper_inclusive: false}]] = query("SELECT '(2,8)'::int8range", []) assert [ [ %Postgrex.Range{ lower: Decimal.new("1.2"), upper: Decimal.new("3.4"), lower_inclusive: false, upper_inclusive: false } ] ] == query("SELECT '(1.2,3.4)'::numrange", []) assert [ [ %Postgrex.Range{ lower: %Date{year: 2014, month: 1, day: 1}, upper: %Date{year: 2014, month: 12, day: 31} } ] ] = query("SELECT '[2014-1-1,2014-12-31)'::daterange", []) assert [[%Postgrex.Range{lower: :unbound, upper: %Date{year: 2014, month: 12, day: 31}}]] = query("SELECT '(,2014-12-31)'::daterange", []) assert [[%Postgrex.Range{lower: %Date{year: 2014, month: 1, day: 2}, upper: :unbound}]] = query("SELECT '(2014-1-1,]'::daterange", []) assert [ [ %Postgrex.Range{ lower: :unbound, upper: :unbound, lower_inclusive: false, upper_inclusive: false } ] ] = query("SELECT '(,)'::daterange", []) assert [ [ %Postgrex.Range{ lower: :unbound, upper: :unbound, lower_inclusive: false, upper_inclusive: false } ] ] = query("SELECT '[,]'::daterange", []) end @tag min_pg_version: "9.0" test "decode network types", context do assert [[%Postgrex.INET{address: {127, 0, 0, 1}, netmask: nil}]] = query("SELECT '127.0.0.1'::inet", []) assert [[%Postgrex.INET{address: {127, 0, 0, 1}, netmask: nil}]] = query("SELECT '127.0.0.1/32'::inet", []) assert [[%Postgrex.INET{address: {127, 0, 0, 1}, netmask: 32}]] = query("SELECT '127.0.0.1/32'::inet::cidr", []) assert [[%Postgrex.INET{address: {127, 0, 0, 1}, netmask: 32}]] = query("SELECT '127.0.0.1/32'::cidr", []) assert [[%Postgrex.INET{address: {127, 0, 0, 1}, netmask: 4}]] = query("SELECT '127.0.0.1/4'::inet", []) assert [[%Postgrex.INET{address: {112, 0, 0, 0}, netmask: 4}]] = query("SELECT '127.0.0.1/4'::inet::cidr", []) assert %Postgrex.Error{ postgres: %{ code: :invalid_text_representation, detail: "Value has bits set to right of mask.", message: "invalid cidr value: \"127.0.0.1/4\"" }, query: "SELECT '127.0.0.1/4'::cidr" } = query("SELECT '127.0.0.1/4'::cidr", []) assert [[%Postgrex.INET{address: {112, 0, 0, 0}, netmask: 4}]] = query("SELECT '112.0.0.0/4'::cidr", []) assert [[%Postgrex.INET{address: {0, 0, 0, 0, 0, 0, 0, 1}, netmask: nil}]] = query("SELECT '::1'::inet", []) assert [[%Postgrex.INET{address: {0, 0, 0, 0, 0, 0, 0, 1}, netmask: nil}]] = query("SELECT '::1/128'::inet", []) assert [[%Postgrex.INET{address: {0, 0, 0, 0, 0, 0, 0, 1}, netmask: 128}]] = query("SELECT '::1/128'::inet::cidr", []) assert [[%Postgrex.INET{address: {8193, 43981, 0, 0, 0, 0, 0, 0}, netmask: 8}]] = query("SELECT '2001:abcd::/8'::inet", []) assert [[%Postgrex.INET{address: {8192, 0, 0, 0, 0, 0, 0, 0}, netmask: 8}]] = query("SELECT '2001:abcd::/8'::inet::cidr", []) assert %Postgrex.Error{ postgres: %{ code: :invalid_text_representation, detail: "Value has bits set to right of mask.", message: "invalid cidr value: \"2001:abcd::/8\"" }, query: "SELECT '2001:abcd::/8'::cidr" } = query("SELECT '2001:abcd::/8'::cidr", []) assert [[%Postgrex.INET{address: {8192, 0, 0, 0, 0, 0, 0, 0}, netmask: 8}]] = query("SELECT '2000::/8'::cidr", []) assert [[%Postgrex.MACADDR{address: {8, 1, 43, 5, 7, 9}}]] = query("SELECT '08:01:2b:05:07:09'::macaddr", []) end test "decode oid and its aliases", context do assert [[4_294_967_295]] = query("select 4294967295::oid;", []) assert [["-"]] = query("select '-'::regproc::text;", []) assert [["sum(integer)"]] = query("select 'sum(int4)'::regprocedure::text;", []) assert [["||/"]] = query("select 'pg_catalog.||/'::regoper::text;", []) assert [["+(integer,integer)"]] = query("select '+(integer,integer)'::regoperator::text;", []) assert [["pg_type"]] = query("select 'pg_type'::regclass::text;", []) assert [["integer"]] = query("select 'int4'::regtype::text;", []) assert [[0]] = query("select '-'::regproc;", []) assert [[44]] = query("select 'regprocin'::regproc;", []) assert [[2108]] = query("select 'sum(int4)'::regprocedure;", []) assert [[597]] = query("select 'pg_catalog.||/'::regoper;", []) assert [[551]] = query("select '+(integer,integer)'::regoperator;", []) assert [[1247]] = query("select 'pg_type'::regclass;", []) assert [[23]] = query("select 'int4'::regtype;", []) # xid type assert [[xmin, xmax]] = query("select xmin, xmax from pg_type limit 1;", []) assert is_number(xmin) and is_number(xmax) # cid type assert [[cmin, cmax]] = query("select cmin, cmax from pg_type limit 1;", []) assert is_number(cmin) and is_number(cmax) end @tag min_pg_version: "9.0" test "hstore copies binaries by default", context do # For OTP 20+ refc binaries up to 64 bytes might be copied during a GC text = String.duplicate("hello world", 6) assert [[bin]] = query("SELECT $1::text", [text]) assert :binary.referenced_byte_size(bin) == byte_size(text) assert [[%{"hello" => value}]] = query("SELECT $1::hstore", [%{"hello" => text}]) assert :binary.referenced_byte_size(value) == byte_size(text) end test "decode bit string", context do assert [[<<1::1, 0::1, 1::1>>]] == query("SELECT bit '101'", []) assert [[<<1::1, 1::1, 0::1>>]] == query("SELECT bit '110'", []) assert [[<<1::1, 1::1, 0::1>>]] == query("SELECT bit '110' :: varbit", []) assert [[<<1::1, 0::1, 1::1, 1::1, 0::1>>]] == query("SELECT bit '10110'", []) assert [[<<1::1, 0::1, 1::1, 0::1, 0::1>>]] == query("SELECT bit '101' :: bit(5)", []) assert [[<<1::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 1::1, 0::1, 1::1>>]] == query("SELECT bit '10000000101'", []) assert [ [ <<0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 1::1, 0::1, 1::1>> ] ] == query("SELECT bit '0000000000000000101'", []) assert [ [ <<1::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 1::1, 0::1, 1::1>> ] ] == query("SELECT bit '1000000000000000101'", []) assert [ [ <<1::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 1::1, 1::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 1::1, 0::1, 1::1>> ] ] == query("SELECT bit '1000000110000000101'", []) end test "encode oid and its aliases", context do # oid's range is 0 to 4294967295 assert [[0]] = query("select $1::oid;", [0]) assert [[4_294_967_295]] = query("select $1::oid;", [4_294_967_295]) assert %DBConnection.EncodeError{} = catch_error(query("SELECT $1::oid", [0 - 1])) assert %DBConnection.EncodeError{} = catch_error(query("SELECT $1::oid", [4_294_967_295 + 1])) assert [["-"]] = query("select $1::regproc::text;", [0]) assert [["regprocin"]] = query("select $1::regproc::text;", [44]) assert [["sum(integer)"]] = query("select $1::regprocedure::text;", [2108]) assert [["||/"]] = query("select $1::regoper::text;", [597]) assert [["+(integer,integer)"]] = query("select $1::regoperator::text;", [551]) assert [["pg_type"]] = query("select $1::regclass::text;", [1247]) assert [["integer"]] = query("select $1::regtype::text;", [23]) assert [[0]] = query("select $1::text::regproc;", ["-"]) assert [[44]] = query("select $1::text::regproc;", ["regprocin"]) assert [[2108]] = query("select $1::text::regprocedure;", ["sum(int4)"]) assert [[597]] = query("select $1::text::regoper;", ["pg_catalog.||/"]) assert [[551]] = query("select $1::text::regoperator;", ["+(integer,integer)"]) assert [[1247]] = query("select $1::text::regclass;", ["pg_type"]) assert [[23]] = query("select $1::text::regtype;", ["int4"]) end test "tuple ids", context do assert [[_tid]] = query("select ctid from pg_type limit 1;", []) assert [[{5, 10}]] = query("select $1::tid;", [{5, 10}]) end test "encoding oids as binary fails with a helpful error message", context do assert_raise ArgumentError, ~r"See https://github.com/elixir-ecto/postgrex#oid-type-encoding", fn -> query("select $1::regclass;", ["pg_type"]) end end test "fail on encoding wrong value", context do assert %DBConnection.EncodeError{message: message} = catch_error(query("SELECT $1::integer", ["123"])) assert message =~ "Postgrex expected an integer in -2147483648..2147483647" end @tag min_pg_version: "9.0" test "decode hstore", context do assert [[%{}]] = query(~s|SELECT ''::hstore|, []) assert [[%{"Bubbles" => "7", "Name" => "Frank"}]] = query(~s|SELECT '"Name" => "Frank", "Bubbles" => "7"'::hstore|, []) assert [[%{"non_existant" => nil, "present" => "&accounted_for"}]] = query(~s|SELECT '"non_existant" => NULL, "present" => "&accounted_for"'::hstore|, []) assert [[%{"spaces in the key" => "are easy!", "floats too" => "66.6"}]] = query( ~s|SELECT '"spaces in the key" => "are easy!", "floats too" => "66.6"'::hstore|, [] ) assert [[%{"this is true" => "true", "though not this" => "false"}]] = query( ~s|SELECT '"this is true" => "true", "though not this" => "false"'::hstore|, [] ) end test "encode basic types", context do assert [[nil, nil]] = query("SELECT $1::text, $2::int", [nil, nil]) assert [[true, false]] = query("SELECT $1::bool, $2::bool", [true, false]) assert [["ẽ"]] = query("SELECT $1::char", ["ẽ"]) assert [[42]] = query("SELECT $1::int", [42]) assert [[42.0, 43.0]] = query("SELECT $1::float, $2::float", [42, 43.0]) assert [[:NaN]] = query("SELECT $1::float", [:NaN]) assert [[:inf]] = query("SELECT $1::float", [:inf]) assert [[:"-inf"]] = query("SELECT $1::float", [:"-inf"]) assert [["ẽric"]] = query("SELECT $1::varchar", ["ẽric"]) assert [[<<1, 2, 3>>]] = query("SELECT $1::bytea", [<<1, 2, 3>>]) end test "encode numeric", context do nums = [ "42", "0.4242", "42.4242", "1.001", "1.00123", "0.01", "0.00012345", "1000000000", "1000000000.0", "123456789123456789123456789", "123456789123456789123456789.123456789", "1.1234500000", "1.0000000000", "1.111101", "1.1111111101", "1.11110001", "NaN", "-42" ] Enum.each(nums, fn num -> dec = Decimal.new(num) assert [[dec]] == query("SELECT $1::numeric", [dec]) end) end test "encode numeric rises for infinite values", context do assert_raise ArgumentError, "cannot represent #Decimal<Infinity> as numeric type", fn -> query("SELECT $1::numeric", [Decimal.new("Infinity")]) end assert_raise ArgumentError, "cannot represent #Decimal<-Infinity> as numeric type", fn -> query("SELECT $1::numeric", [Decimal.new("-Infinity")]) end end test "encode integers and floats as numeric", context do dec = Decimal.new(1) assert [[dec]] == query("SELECT $1::numeric", [1]) dec = Decimal.from_float(1.0) assert [[dec]] == query("SELECT $1::numeric", [1.0]) end @tag min_pg_version: "9.5" test "encode json/jsonb", context do json = %{"foo" => 42} assert [[json]] == query("SELECT $1::json", [json]) assert [[json]] == query("SELECT $1::jsonb", [json]) end test "encode custom numerics", context do assert [[%Decimal{sign: 1, coef: 1500, exp: 0}]] == query("SELECT $1::numeric", [Decimal.from_float(1500.0)]) assert [[%Decimal{sign: 1, coef: 1, exp: 0}]] == query("SELECT $1::numeric", [Decimal.new(1, 1, 0)]) assert [[%Decimal{sign: 1, coef: 10, exp: 0}]] == query("SELECT $1::numeric", [Decimal.new(1, 1, 1)]) assert [[%Decimal{sign: 1, coef: 100, exp: 0}]] == query("SELECT $1::numeric", [Decimal.new(1, 1, 2)]) assert [[%Decimal{sign: 1, coef: 1000, exp: 0}]] == query("SELECT $1::numeric", [Decimal.new(1, 1, 3)]) assert [[%Decimal{sign: 1, coef: 10000, exp: 0}]] == query("SELECT $1::numeric", [Decimal.new(1, 1, 4)]) assert [[%Decimal{sign: 1, coef: 100_000, exp: 0}]] == query("SELECT $1::numeric", [Decimal.new(1, 1, 5)]) assert [[%Decimal{sign: 1, coef: 1, exp: -5}]] == query("SELECT $1::numeric", [Decimal.new(1, 1, -5)]) end test "encode enforces bounds on integers", context do # int2's range is -32768 to +32767 assert [[-32768]] = query("SELECT $1::int2", [-32768]) assert [[32767]] = query("SELECT $1::int2", [32767]) assert %DBConnection.EncodeError{} = catch_error(query("SELECT $1::int2", [32767 + 1])) assert %DBConnection.EncodeError{} = catch_error(query("SELECT $1::int2", [-32768 - 1])) # int4's range is -2147483648 to +2147483647 assert [[-2_147_483_648]] = query("SELECT $1::int4", [-2_147_483_648]) assert [[2_147_483_647]] = query("SELECT $1::int4", [2_147_483_647]) assert %DBConnection.EncodeError{} = catch_error(query("SELECT $1::int4", [2_147_483_647 + 1])) assert %DBConnection.EncodeError{} = catch_error(query("SELECT $1::int4", [-2_147_483_648 - 1])) # int8's range is -9223372036854775808 to 9223372036854775807 assert [[-9_223_372_036_854_775_808]] = query("SELECT $1::int8", [-9_223_372_036_854_775_808]) assert [[9_223_372_036_854_775_807]] = query("SELECT $1::int8", [9_223_372_036_854_775_807]) assert %DBConnection.EncodeError{} = catch_error(query("SELECT $1::int8", [9_223_372_036_854_775_807 + 1])) assert %DBConnection.EncodeError{} = catch_error(query("SELECT $1::int8", [-9_223_372_036_854_775_808 - 1])) end test "encode uuid", context do uuid = <<0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15>> assert [[^uuid]] = query("SELECT $1::uuid", [uuid]) end test "encode interval", context do assert [[%Postgrex.Interval{months: 0, days: 0, secs: 0, microsecs: 0}]] = query("SELECT $1::interval", [ %Postgrex.Interval{months: 0, days: 0, secs: 0, microsecs: 0} ]) assert [[%Postgrex.Interval{months: 100, days: 0, secs: 0, microsecs: 0}]] = query("SELECT $1::interval", [ %Postgrex.Interval{months: 100, days: 0, secs: 0, microsecs: 0} ]) assert [[%Postgrex.Interval{months: 0, days: 100, secs: 0, microsecs: 0}]] = query("SELECT $1::interval", [ %Postgrex.Interval{months: 0, days: 100, secs: 0, microsecs: 0} ]) assert [[%Postgrex.Interval{months: 0, days: 0, secs: 100, microsecs: 0}]] = query("SELECT $1::interval", [ %Postgrex.Interval{months: 0, days: 0, secs: 100, microsecs: 0} ]) assert [[%Postgrex.Interval{months: 14, days: 40, secs: 10920, microsecs: 0}]] = query("SELECT $1::interval", [ %Postgrex.Interval{months: 14, days: 40, secs: 10920, microsecs: 0} ]) assert [[%Postgrex.Interval{months: 14, days: 40, secs: 10921, microsecs: 24000}]] = query("SELECT $1::interval", [ %Postgrex.Interval{months: 14, days: 40, secs: 10920, microsecs: 1_024_000} ]) end test "encode arrays", context do assert [[[]]] = query("SELECT $1::integer[]", [[]]) assert [[[1]]] = query("SELECT $1::integer[]", [[1]]) assert [[[1, 2]]] = query("SELECT $1::integer[]", [[1, 2]]) assert [[[[0], [1]]]] = query("SELECT $1::integer[]", [[[0], [1]]]) assert [[[[0]]]] = query("SELECT $1::integer[]", [[[0]]]) assert [[[1, nil, 3]]] = query("SELECT $1::integer[]", [[1, nil, 3]]) end @tag :capture_log test "encode record", context do assert [[{1, "2"}]] = query("SELECT $1::composite1", [{1, "2"}]) assert [[[{1, "2"}]]] = query("SELECT $1::composite1[]", [[{1, "2"}]]) assert [[{1, nil, 3}]] = query("SELECT $1::composite2", [{1, nil, 3}]) end test "encode enum", context do assert [["elixir"]] = query("SELECT $1::enum1", ["elixir"]) end @tag min_pg_version: "9.2" test "encode range", context do assert [[%Postgrex.Range{lower: 1, upper: 4, lower_inclusive: true, upper_inclusive: false}]] = query("SELECT $1::int4range", [ %Postgrex.Range{lower: 1, upper: 3, lower_inclusive: true, upper_inclusive: true} ]) assert [ [ %Postgrex.Range{ lower: :unbound, upper: 6, lower_inclusive: false, upper_inclusive: false } ] ] = query("SELECT $1::int4range", [ %Postgrex.Range{ lower: :unbound, upper: 5, lower_inclusive: false, upper_inclusive: true } ]) assert [ [ %Postgrex.Range{ lower: 3, upper: :unbound, lower_inclusive: true, upper_inclusive: false } ] ] = query("SELECT $1::int4range", [ %Postgrex.Range{ lower: 3, upper: :unbound, lower_inclusive: true, upper_inclusive: true } ]) assert [[%Postgrex.Range{lower: 4, upper: 5, lower_inclusive: true, upper_inclusive: false}]] = query("SELECT $1::int4range", [ %Postgrex.Range{lower: 3, upper: 5, lower_inclusive: false, upper_inclusive: false} ]) assert [ [ %Postgrex.Range{ lower: :unbound, upper: :unbound, lower_inclusive: false, upper_inclusive: false } ] ] = query("SELECT $1::int4range", [ %Postgrex.Range{ lower: :unbound, upper: :unbound, lower_inclusive: false, upper_inclusive: false } ]) assert [ [ %Postgrex.Range{ lower: :unbound, upper: :unbound, lower_inclusive: false, upper_inclusive: false } ] ] = query("SELECT $1::int4range", [ %Postgrex.Range{ lower: :unbound, upper: :unbound, lower_inclusive: true, upper_inclusive: true } ]) assert [ [ %Postgrex.Range{ lower: :empty, upper: :empty, lower_inclusive: false, upper_inclusive: false } ] ] = query("SELECT $1::int4range", [ %Postgrex.Range{ lower: :empty, upper: :empty, lower_inclusive: true, upper_inclusive: true } ]) assert [[%Postgrex.Range{lower: 1, upper: 4, lower_inclusive: true, upper_inclusive: false}]] = query("SELECT $1::int8range", [ %Postgrex.Range{lower: 1, upper: 3, lower_inclusive: true, upper_inclusive: true} ]) assert [ [ %Postgrex.Range{ lower: Decimal.new("1.2"), upper: Decimal.new("3.4"), lower_inclusive: true, upper_inclusive: true } ] ] == query("SELECT $1::numrange", [ %Postgrex.Range{ lower: Decimal.new("1.2"), upper: Decimal.new("3.4"), lower_inclusive: true, upper_inclusive: true } ]) assert [ [ %Postgrex.Range{ lower: %Date{year: 2014, month: 1, day: 1}, upper: %Date{year: 2015, month: 1, day: 1} } ] ] = query("SELECT $1::daterange", [ %Postgrex.Range{ lower: %Date{year: 2014, month: 1, day: 1}, upper: %Date{year: 2014, month: 12, day: 31} } ]) assert [[%Postgrex.Range{lower: :unbound, upper: %Date{year: 2015, month: 1, day: 1}}]] = query("SELECT $1::daterange", [ %Postgrex.Range{lower: :unbound, upper: %Date{year: 2014, month: 12, day: 31}} ]) assert [[%Postgrex.Range{lower: %Date{year: 2014, month: 1, day: 1}, upper: :unbound}]] = query("SELECT $1::daterange", [ %Postgrex.Range{lower: %Date{year: 2014, month: 1, day: 1}, upper: :unbound} ]) assert [ [ %Postgrex.Range{ lower: :unbound, upper: :unbound, lower_inclusive: false, upper_inclusive: false } ] ] = query("SELECT $1::daterange", [ %Postgrex.Range{ lower: :unbound, upper: :unbound, lower_inclusive: false, upper_inclusive: false } ]) assert [ [ %Postgrex.Range{ lower: :unbound, upper: :unbound, lower_inclusive: false, upper_inclusive: false } ] ] = query("SELECT $1::daterange", [ %Postgrex.Range{ lower: :unbound, upper: :unbound, lower_inclusive: true, upper_inclusive: true } ]) end @tag min_pg_version: "9.2" test "encode enforces bounds on integer ranges", context do # int4's range is -2147483648 to +2147483647 assert [[%Postgrex.Range{lower: -2_147_483_648}]] = query("SELECT $1::int4range", [%Postgrex.Range{lower: -2_147_483_648}]) assert [[%Postgrex.Range{upper: 2_147_483_647}]] = query("SELECT $1::int4range", [ %Postgrex.Range{upper: 2_147_483_647, upper_inclusive: false} ]) assert %DBConnection.EncodeError{} = catch_error(query("SELECT $1::int4range", [%Postgrex.Range{lower: -2_147_483_649}])) assert %DBConnection.EncodeError{} = catch_error(query("SELECT $1::int4range", [%Postgrex.Range{upper: 2_147_483_648}])) # int8's range is -9223372036854775808 to 9223372036854775807 assert [[%Postgrex.Range{lower: -9_223_372_036_854_775_807}]] = query("SELECT $1::int8range", [%Postgrex.Range{lower: -9_223_372_036_854_775_807}]) assert [[%Postgrex.Range{upper: 9_223_372_036_854_775_806}]] = query("SELECT $1::int8range", [ %Postgrex.Range{upper: 9_223_372_036_854_775_806, upper_inclusive: false} ]) assert %DBConnection.EncodeError{} = catch_error( query("SELECT $1::int8range", [%Postgrex.Range{lower: -9_223_372_036_854_775_809}]) ) assert %DBConnection.EncodeError{} = catch_error( query("SELECT $1::int8range", [%Postgrex.Range{upper: 9_223_372_036_854_775_808}]) ) end @tag min_pg_version: "9.0" test "encode hstore", context do assert [ [ %{ "name" => "Frank", "bubbles" => "7", "limit" => nil, "chillin" => "true", "fratty" => "false", "atom" => "bomb" } ] ] = query(~s(SELECT $1::hstore), [ %{ "name" => "Frank", "bubbles" => "7", "limit" => nil, "chillin" => "true", "fratty" => "false", "atom" => "bomb" } ]) end @tag min_pg_version: "9.0" test "encode network types", context do assert [["127.0.0.1/32"]] = query("SELECT $1::inet::text", [ %Postgrex.INET{address: {127, 0, 0, 1}, netmask: nil} ]) assert [["127.0.0.1/32"]] = query("SELECT $1::inet::text", [%Postgrex.INET{address: {127, 0, 0, 1}, netmask: 32}]) assert [["127.0.0.1/32"]] = query("SELECT $1::inet::cidr::text", [ %Postgrex.INET{address: {127, 0, 0, 1}, netmask: 32} ]) assert [["127.0.0.1/32"]] = query("SELECT $1::cidr::text", [%Postgrex.INET{address: {127, 0, 0, 1}, netmask: 32}]) assert [["127.0.0.1/4"]] = query("SELECT $1::inet::text", [%Postgrex.INET{address: {127, 0, 0, 1}, netmask: 4}]) assert %Postgrex.Error{ postgres: %{ code: :invalid_binary_representation, detail: "Value has bits set to right of mask.", message: "invalid external \"cidr\" value" } } = query("SELECT $1::cidr::text", [%Postgrex.INET{address: {127, 0, 0, 1}, netmask: 4}]) assert [["112.0.0.0/4"]] = query("SELECT $1::cidr::text", [%Postgrex.INET{address: {112, 0, 0, 0}, netmask: 4}]) assert [["::1/128"]] = query("SELECT $1::inet::text", [ %Postgrex.INET{address: {0, 0, 0, 0, 0, 0, 0, 1}, netmask: nil} ]) assert [["::1/128"]] = query("SELECT $1::inet::text", [ %Postgrex.INET{address: {0, 0, 0, 0, 0, 0, 0, 1}, netmask: 128} ]) assert [["::1/128"]] = query("SELECT $1::inet::cidr::text", [ %Postgrex.INET{address: {0, 0, 0, 0, 0, 0, 0, 1}, netmask: 128} ]) assert [["2001:abcd::/8"]] = query("SELECT $1::inet::text", [ %Postgrex.INET{address: {8193, 43981, 0, 0, 0, 0, 0, 0}, netmask: 8} ]) assert [["2000::/8"]] = query("SELECT $1::inet::cidr::text", [ %Postgrex.INET{address: {8192, 0, 0, 0, 0, 0, 0, 0}, netmask: 8} ]) assert %Postgrex.Error{ postgres: %{ code: :invalid_binary_representation, detail: "Value has bits set to right of mask.", message: "invalid external \"cidr\" value" } } = query("SELECT $1::cidr::text", [ %Postgrex.INET{address: {8193, 43981, 0, 0, 0, 0, 0, 0}, netmask: 8} ]) assert [["2000::/8"]] = query("SELECT $1::cidr::text", [ %Postgrex.INET{address: {8192, 0, 0, 0, 0, 0, 0, 0}, netmask: 8} ]) assert [["08:01:2b:05:07:09"]] = query("SELECT $1::macaddr::text", [%Postgrex.MACADDR{address: {8, 1, 43, 5, 7, 9}}]) end test "encode bit string", context do assert [["110"]] == query("SELECT $1::bit(3)::text", [<<1::1, 1::1, 0::1>>]) assert [["110"]] == query("SELECT $1::varbit::text", [<<1::1, 1::1, 0::1>>]) assert [["101"]] == query("SELECT $1::bit(3)::text", [<<1::1, 0::1, 1::1>>]) assert [["11010"]] == query("SELECT $1::bit(5)::text", [<<1::1, 1::1, 0::1, 1::1>>]) assert [["10000000101"]] == query( "SELECT $1::bit(11)::text", [<<1::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 1::1, 0::1, 1::1>>] ) assert [["0000000000000000101"]] == query( "SELECT $1::bit(19)::text", [ <<0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 1::1, 0::1, 1::1>> ] ) assert [["1000000000000000101"]] == query( "SELECT $1::bit(19)::text", [ <<1::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 1::1, 0::1, 1::1>> ] ) assert [["1000000110000000101"]] == query( "SELECT $1::bit(19)::text", [ <<1::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 1::1, 1::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 0::1, 1::1, 0::1, 1::1>> ] ) end test "fail on encode arrays", context do assert_raise ArgumentError, "nested lists must have lists with matching lengths", fn -> query("SELECT $1::integer[]", [[[1], [1, 2]]]) end assert [[42]] = query("SELECT 42", []) end test "fail on parameter length mismatch", context do assert_raise ArgumentError, ~r"parameters must be of length 1 for query", fn -> query("SELECT $1::integer", [1, 2]) end assert_raise ArgumentError, ~r"parameters must be of length 0 for query", fn -> query("SELECT 42", [1]) end assert [[42]] = query("SELECT 42", []) end test "non data statement", context do assert :ok = query("BEGIN", []) assert :ok = query("COMMIT", []) end test "result struct", context do assert {:ok, res} = P.query(context[:pid], "SELECT 123 AS a, 456 AS b", []) assert %Postgrex.Result{} = res assert res.command == :select assert res.columns == ["a", "b"] assert res.num_rows == 1 end test "error struct", context do assert {:error, %Postgrex.Error{}} = P.query(context[:pid], "SELECT 123 + 'a'", []) end test "multi row result struct", context do assert {:ok, res} = P.query(context[:pid], "VALUES (1, 2), (3, 4)", []) assert res.num_rows == 2 assert res.rows == [[1, 2], [3, 4]] end test "multi row result struct with decode mapper", context do map = &Enum.map(&1, fn x -> x * 2 end) assert [[2, 4], [6, 8]] = query("VALUES (1, 2), (3, 4)", [], decode_mapper: map) end test "insert", context do :ok = query("CREATE TABLE test (id int, text text)", []) [] = query("SELECT * FROM test", []) :ok = query("INSERT INTO test VALUES ($1, $2)", [42, "fortytwo"], []) [[42, "fortytwo"]] = query("SELECT * FROM test", []) end test "prepare, execute and close", context do assert (%Postgrex.Query{} = query) = prepare("42", "SELECT 42") assert [[42]] = execute(query, []) assert [[42]] = execute(query, []) assert :ok = close(query) assert [[42]] = query("SELECT 42", []) end test "prepare_execute, execute and close", context do assert {query, [[42]]} = prepare_execute("42", "SELECT $1::int", [42]) assert [[41]] = execute(query, [41]) assert :ok = close(query) assert [[43]] = execute(query, [43]) end test "prepare and execute different queries with same name", context do assert (%Postgrex.Query{name: "select"} = query42) = prepare("select", "SELECT 42") assert close(query42) == :ok assert %Postgrex.Query{} = prepare("select", "SELECT 41") assert [[42]] = execute(query42, []) assert [[42]] = query("SELECT 42", []) end test "prepare, close and execute", context do assert (%Postgrex.Query{} = query) = prepare("reuse", "SELECT $1::int") assert [[42]] = execute(query, [42]) assert :ok = close(query) assert [[42]] = execute(query, [42]) end test "closing prepared query that does not exist succeeds", context do assert (%Postgrex.Query{} = query) = prepare("42", "SELECT 42") assert :ok = close(query) assert :ok = close(query) end @tag prepare: :unnamed test "prepare named is unnamed when named not allowed", context do assert (%Postgrex.Query{name: ""} = query) = prepare("42", "SELECT 42") assert [[42]] = execute(query, []) assert [[42]] = execute(query, []) assert :ok = close(query) assert [[42]] = query("SELECT 42", []) end test "execute prepared query on another connection", context do query = prepare("S42", "SELECT 42") {:ok, pid2} = Postgrex.start_link(context[:options]) assert {:ok, ^query, %Postgrex.Result{rows: [[42]]}} = Postgrex.execute(pid2, query, []) assert {:ok, %Postgrex.Result{rows: [[41]]}} = Postgrex.query(pid2, "SELECT 41", []) end test "execute prepared query when deallocated", context do query = prepare("S42", "SELECT 42") assert query("DEALLOCATE ALL", []) == :ok assert %Postgrex.Error{} = execute(query, []) assert execute(query, []) == [[42]] end test "error codes are translated", context do assert %Postgrex.Error{postgres: %{code: :syntax_error}} = query("wat", []) end test "connection works after failure in parsing state", context do assert %Postgrex.Error{} = query("wat", []) assert [[42]] = query("SELECT 42", []) end test "connection works after failure in binding state", context do assert %Postgrex.Error{postgres: %{code: :invalid_text_representation}} = query("insert into uniques values (CAST($1::text AS int))", ["invalid"]) assert [[42]] = query("SELECT 42", []) end test "connection works after failure in executing state", context do assert %Postgrex.Error{postgres: %{code: :unique_violation}} = query("insert into uniques values (1), (1);", []) assert [[42]] = query("SELECT 42", []) end test "connection works after failure during transaction", context do assert :ok = query("BEGIN", []) assert %Postgrex.Error{postgres: %{code: :unique_violation}} = query("insert into uniques values (1), (1);", []) assert %Postgrex.Error{postgres: %{code: :in_failed_sql_transaction}} = query("SELECT 42", []) assert :ok = query("ROLLBACK", []) assert [[42]] = query("SELECT 42", []) end test "connection works on custom transactions", context do assert :ok = query("BEGIN", []) assert :ok = query("COMMIT", []) assert :ok = query("BEGIN", []) assert :ok = query("ROLLBACK", []) assert [[42]] = query("SELECT 42", []) end test "connection works after failure in prepare", context do assert %Postgrex.Error{} = prepare("bad", "wat") assert [[42]] = query("SELECT 42", []) end test "connection works after failure in execute", context do %Postgrex.Query{} = query = prepare("unique", "insert into uniques values (1), (1);") assert %Postgrex.Error{postgres: %{code: :unique_violation}} = execute(query, []) assert %Postgrex.Error{postgres: %{code: :unique_violation}} = execute(query, []) assert [[42]] = query("SELECT 42", []) end test "connection reuses prepared query after query", context do %Postgrex.Query{} = query = prepare("", "SELECT 41") assert [[42]] = query("SELECT 42", []) assert [[41]] = execute(query, []) end test "connection reuses prepared query after failure in unnamed preparing state", context do %Postgrex.Query{} = query = prepare("", "SELECT 41") assert %Postgrex.Error{postgres: %{code: :syntax_error}} = query("wat", []) assert [[41]] = execute(query, []) end test "connection reuses prepared query after failure in named preparing state", context do %Postgrex.Query{} = query = prepare("named", "SELECT 41") assert %Postgrex.Error{postgres: %{code: :syntax_error}} = prepare("named", "wat") assert [[41]] = execute(query, []) end test "connection reuses prepared query after failure in executing state", context do %Postgrex.Query{} = query = prepare("", "SELECT 41") assert %Postgrex.Error{postgres: %{code: :unique_violation}} = query("insert into uniques values (1), (1);", []) assert [[41]] = execute(query, []) end test "connection forces prepare on execute after prepare of same name", context do %Postgrex.Query{} = query41 = prepare("", "SELECT 41") assert %Postgrex.Query{} = query42 = prepare("", "SELECT 42") assert [[42]] = execute(query42, []) assert [[41]] = execute(query41, []) end test "connection describes query when already prepared", context do %Postgrex.Query{} = prepare("", "SELECT 41") %Postgrex.Query{} = query = prepare("", "SELECT 41") assert [[41]] = execute(query, []) assert [[42]] = query("SELECT 42", []) end test "async test", context do self_pid = self() Enum.each(1..10, fn _ -> spawn_link(fn -> send(self_pid, query("SELECT pg_sleep(0.05)", [])) end) end) assert [[42]] = query("SELECT 42", []) Enum.each(1..10, fn _ -> assert_receive [[:void]] end) end test "raise when trying to execute unprepared query", context do assert_raise ArgumentError, ~r/has not been prepared/, fn -> execute(%Postgrex.Query{name: "hi", statement: "BEGIN"}, []) end end test "raise when trying to parse prepared query", context do assert_raise ArgumentError, ~r/has already been prepared/, fn -> DBConnection.Query.parse(prepare("SELECT 42", []), []) end end test "query struct interpolates to statement" do assert "#{%Postgrex.Query{statement: "BEGIN"}}" == "BEGIN" end test "connection_id", context do assert {:ok, %Postgrex.Result{connection_id: connection_id, rows: [[backend_pid]]}} = Postgrex.query(context[:pid], "SELECT pg_backend_pid()", []) assert is_integer(connection_id) assert connection_id == backend_pid assert {:error, %Postgrex.Error{connection_id: connection_id}} = Postgrex.query(context[:pid], "FOO BAR", []) assert is_integer(connection_id) end test "empty query", context do assert %Postgrex.Result{command: nil, rows: nil, num_rows: 0} = Postgrex.query!(context[:pid], "", []) end test "query from child spec", %{options: opts, test: test} do child_spec = Postgrex.child_spec([name: test] ++ opts) Supervisor.start_link([child_spec], strategy: :one_for_one) %Postgrex.Result{rows: [[42]]} = Postgrex.query!(test, "SELECT 42", []) end test "query before and after idle ping" do opts = [database: "postgrex_test", backoff_type: :stop, idle_interval: 1] {:ok, pid} = P.start_link(opts) assert {:ok, _} = P.query(pid, "SELECT 42", []) :timer.sleep(20) assert {:ok, _} = P.query(pid, "SELECT 42", []) :timer.sleep(20) assert {:ok, _} = P.query(pid, "SELECT 42", []) end test "too many parameters query disconnects", context do Process.flag(:trap_exit, true) params = 1..0x10000 query = ["INSERT INTO uniques VALUES (0)" | Enum.map(params, &[", ($#{&1}::int4)"])] params = Enum.into(params, []) message = "postgresql protocol can not handle 65536 parameters, the maximum is 65535" assert capture_log(fn -> %Postgrex.QueryError{message: ^message} = query(query, params) pid = context[:pid] assert_receive {:EXIT, ^pid, :killed} end) =~ message end test "COPY FROM STDIN disconnects", context do Process.flag(:trap_exit, true) message = "trying to copy in but no copy data to send" assert capture_log(fn -> assert %RuntimeError{message: runtime} = query("COPY uniques FROM STDIN", []) assert runtime =~ message pid = context[:pid] assert_receive {:EXIT, ^pid, :killed} end) =~ message end test "COPY TO STDOUT", context do assert [] = query("COPY uniques TO STDOUT", []) assert ["1\t2\n"] = query("COPY (VALUES (1, 2)) TO STDOUT", []) assert ["1\t2\n", "3\t4\n"] = query("COPY (VALUES (1, 2), (3, 4)) TO STDOUT", []) end test "COPY TO STDOUT with decoder_mapper", context do opts = [decode_mapper: &String.split/1] assert [["1", "2"], ["3", "4"]] = query("COPY (VALUES (1, 2), (3, 4)) TO STDOUT", [], opts) end test "receive packet with remainder greater than 64MB", context do # to ensure remainder is more than 64MB use 64MBx2+1 big_binary = :binary.copy(<<1>>, 128 * 1024 * 1024 + 1) assert [[binary]] = query("SELECT $1::bytea;", [big_binary]) assert byte_size(binary) == 128 * 1024 * 1024 + 1 end test "terminate backend", context do Process.flag(:trap_exit, true) assert {:ok, pid} = P.start_link([idle_interval: 10] ++ context[:options]) %Postgrex.Result{connection_id: connection_id} = Postgrex.query!(pid, "SELECT 42", []) assert capture_log(fn -> assert [[true]] = query("SELECT pg_terminate_backend($1)", [connection_id]) assert_receive {:EXIT, ^pid, :killed}, 5000 end) =~ "** (Postgrex.Error) FATAL 57P01 (admin_shutdown)" end test "terminate backend with socket", context do Process.flag(:trap_exit, true) socket = System.get_env("PG_SOCKET_DIR") || "/tmp" assert {:ok, pid} = P.start_link([idle_interval: 10, socket_dir: socket] ++ context[:options]) %Postgrex.Result{connection_id: connection_id} = Postgrex.query!(pid, "SELECT 42", []) capture_log(fn -> assert [[true]] = query("SELECT pg_terminate_backend($1)", [connection_id]) assert_receive {:EXIT, ^pid, :killed}, 5000 end) end end
36.837945
99
0.528344
f792603df214cedc0332f01fdae0ade079d2c516
2,955
ex
Elixir
apps/smtp_recv/lib/smtp_recv/smtp_server.ex
shymega/dialoguex
974bd195780aea952497913537b3386fb6875977
[ "Apache-2.0" ]
2
2018-03-20T17:28:58.000Z
2018-05-07T14:13:21.000Z
apps/smtp_recv/lib/smtp_recv/smtp_server.ex
shymega/dialoguex
974bd195780aea952497913537b3386fb6875977
[ "Apache-2.0" ]
1
2018-03-20T17:54:56.000Z
2018-03-31T16:27:15.000Z
apps/smtp_recv/lib/smtp_recv/smtp_server.ex
shymega/dialoguex
974bd195780aea952497913537b3386fb6875977
[ "Apache-2.0" ]
null
null
null
defmodule SMTPRecv.SMTPServer do @moduledoc """ GenServer module for the SMTP server of the SMTPRecv app. Part of Dialoguex. """ alias Mail.Parsers.RFC2822 require Logger @behaviour :gen_smtp_server_session def init(hostname, session_count, _address, _options) do if session_count > 6 do Logger.warn(fn -> "SMTP server connection limit exceeded!" end) Logger.warn(fn -> "Rejecting." end) {:stop, :normal, ["421", hostname, " is unable to accept mail right now. Try again later."]} else banner = [hostname, " ESMTP"] {:ok, banner, %{}} end end def handle_DATA(_from, _to, data, state) do Logger.debug(fn -> "Received DATA message. Processing." end) Logger.debug(fn -> "Parsing message." end) Logger.debug(fn -> "Message parsed!" end) Logger.debug(fn -> "Updating state." end) state = state |> Map.put(:parsed, parse_email(data)) |> Map.put(:raw, data) Logger.debug(fn -> "State updated." end) Logger.debug(fn -> "Finished DATA handling." end) {:ok, data, state} end def handle_EHLO(hostname, extensions, state) do Logger.info(fn -> "Received connection from #{hostname} (EHLO)" end) {:ok, extensions, state} end def handle_HELO(hostname, state) do Logger.info(fn -> "Received connection from #{hostname} (HELO)" end) {:ok, 655_360, state} end def handle_MAIL(from, state) do Logger.debug(fn -> "Received MAIL command from #{from}" end) {:ok, Map.put(state, :from, from)} end def handle_MAIL_extension(extension, state) do Logger.debug(fn -> "Received MAIL extension: #{extension}" end) {:ok, state} end def handle_RCPT(to, state) do Logger.debug(fn -> "Received RCPT TO: #{to}" end) {:ok, Map.put(state, :to, to)} end def handle_RCPT_extension(extension, state) do Logger.debug(fn -> "Received RCPT extension: #{extension}" end) {:ok, state} end def handle_RSET(state) do Logger.debug(fn -> "Transmission reset." end) {:ok, state} end def handle_VRFY(address, state) do Logger.debug(fn -> "Received VRFY request for address: #{address}" end) {:ok, ["500: Command not recognised."], state} end def handle_other(command, _args, state) do Logger.error(fn -> "Command not recongised. Implement: #{command}" end) {["500 Error: command not recognized : #{command}"], state} end def code_change(_old, state, _extra) do {:ok, state} end def terminate(reason, state) do Logger.info("Terminating Session: #{reason}") {:ok, state} end defp parse_email(data) when is_binary(data) do if String.contains?(data, "\r") do data |> RFC2822.parse() else data |> convert_crlf() |> RFC2822.parse() end end defp convert_crlf(text) when is_binary(text) do text |> String.replace("\n", "\r\n") end end
23.267717
98
0.62775
f792a5c06bc4427b3e081d4c6a27617ca902b327
2,004
exs
Elixir
elixir-for-programmers/game/hangman/test/game_test.exs
GimliLongBow/exercises
e06517eacccd37a889c5d68a702de7ffb7f4bf37
[ "MIT" ]
2
2017-05-19T18:31:38.000Z
2017-05-19T18:31:41.000Z
elixir-for-programmers/game/hangman/test/game_test.exs
GimliLongBow/exercises
e06517eacccd37a889c5d68a702de7ffb7f4bf37
[ "MIT" ]
null
null
null
elixir-for-programmers/game/hangman/test/game_test.exs
GimliLongBow/exercises
e06517eacccd37a889c5d68a702de7ffb7f4bf37
[ "MIT" ]
null
null
null
defmodule GameTest do use ExUnit.Case doctest Hangman.Game alias Hangman.Game test "new_game returns structure" do game = Game.new_game() assert game.turns_left == 7 assert game.game_state == :initializing assert length(game.letters) > 0 end test "new_game returns only a string" do game = Game.new_game() letters = game.letters |> List.to_string assert Regex.match?(~r/^[a-z]+$/, letters) == true end test "state hasn't changed for :won and lost game" do for state <- [ :won, :lost ] do game = Game.new_game |> Map.put(:game_state, state) assert { ^game, _ } = Game.make_move(game, "blah") end end test "first occurrence of a letter is not already used" do game = Game.new_game { game, _tally } = Game.make_move(game, "x") assert game.game_state != :already_used end test "second occurrence of a letter is already used" do game = Game.new_game { game, _tally } = Game.make_move(game, "x") assert game.game_state != :already_used { game, _tally } = Game.make_move(game, "x") assert game.game_state == :already_used end test "good guess is recognized" do game = Game.new_game("wibble") { game, _ } = Game.make_move(game, "w") assert game.game_state == :good_guess assert game.turns_left == 7 end test "a guessed word is a won game" do game = Game.new_game("wibble") { game, _ } = Game.make_move(game, "w") assert game.game_state == :good_guess assert game.turns_left == 7 { game, _ } = Game.make_move(game, "i") assert game.game_state == :good_guess assert game.turns_left == 7 { game, _ } = Game.make_move(game, "b") assert game.game_state == :good_guess assert game.turns_left == 7 { game, _ } = Game.make_move(game, "l") assert game.game_state == :good_guess assert game.turns_left == 7 { game, _ } = Game.make_move(game, "e") assert game.game_state == :won assert game.turns_left == 7 end end
27.452055
60
0.643214
f792b8a790f8dd4ab842c276e362b4df9aa6c4e6
454
exs
Elixir
test/models/user_test.exs
hackersguildco/flaggy
5c52879f0f8541bba5603e6a678d9527e76507db
[ "MIT" ]
1
2018-04-23T14:55:58.000Z
2018-04-23T14:55:58.000Z
test/models/user_test.exs
hackersguildco/flaggy
5c52879f0f8541bba5603e6a678d9527e76507db
[ "MIT" ]
null
null
null
test/models/user_test.exs
hackersguildco/flaggy
5c52879f0f8541bba5603e6a678d9527e76507db
[ "MIT" ]
null
null
null
defmodule Flaggy.UserTest do use Flaggy.ModelCase alias Flaggy.User @valid_attrs %{image: "some content", name: "some content", token: "some content"} @invalid_attrs %{} test "changeset with valid attributes" do changeset = User.changeset(%User{}, @valid_attrs) assert changeset.valid? end test "changeset with invalid attributes" do changeset = User.changeset(%User{}, @invalid_attrs) refute changeset.valid? end end
23.894737
84
0.713656
f792be7536feadd947511eb197ce74c492389d6d
82
exs
Elixir
test/lib/spender_web/views/layout_view_test.exs
LittleKidogo/Budgetinization
eae6dd62208ec7fb43c8c212f40611f8635205d5
[ "MIT" ]
2
2018-02-27T06:29:59.000Z
2018-06-09T16:53:49.000Z
test/lib/spender_web/views/layout_view_test.exs
LittleKidogo/Budgetinization
eae6dd62208ec7fb43c8c212f40611f8635205d5
[ "MIT" ]
94
2018-02-22T06:03:19.000Z
2018-06-28T14:30:31.000Z
test/lib/spender_web/views/layout_view_test.exs
LittleKidogo/Budgetinization
eae6dd62208ec7fb43c8c212f40611f8635205d5
[ "MIT" ]
1
2020-03-04T19:41:06.000Z
2020-03-04T19:41:06.000Z
defmodule SpenderWeb.LayoutViewTest do use SpenderWeb.ConnCase, async: true end
20.5
38
0.829268
f792c427d1bc733109fcde0824ca89ab0ce4d5d0
10,200
exs
Elixir
test/nys_etl/commcare/api_test.exs
RatioPBC/epi-viaduct-nys
99fb637785ea207aee5449fa01fa59dd18ec8bf2
[ "MIT" ]
2
2021-06-22T21:01:49.000Z
2021-11-04T18:36:48.000Z
test/nys_etl/commcare/api_test.exs
RatioPBC/epi-viaduct-nys
99fb637785ea207aee5449fa01fa59dd18ec8bf2
[ "MIT" ]
null
null
null
test/nys_etl/commcare/api_test.exs
RatioPBC/epi-viaduct-nys
99fb637785ea207aee5449fa01fa59dd18ec8bf2
[ "MIT" ]
null
null
null
defmodule NYSETL.Commcare.ApiTest do use NYSETL.DataCase, async: false alias NYSETL.Commcare alias NYSETL.Test describe "get_county_list" do setup :set_mox_from_context setup :mock_county_list test "returns list of counties" do {:ok, counties, :cache_skip} = Commcare.Api.get_county_list() assert length(counties) == 65 assert counties |> Enum.map(& &1["fixture_type"]) |> Enum.uniq() == ["county_list"] end test "caches the results" do name = {:global, "A#{:rand.uniform()}"} Supervisor.start_link([Commcare.Api.cache_spec(name)], strategy: :one_for_all) # Always a hit since starting the cache pre-caches the list {:ok, counties, :cache_hit} = Commcare.Api.get_county_list(:cache_enabled, name) assert hd(counties)["fields"]["domain"] == "ny-allegany-cdcms" end end describe "post_case" do test "handles success response" do NYSETL.HTTPoisonMock |> expect(:post, fn "http://commcare.test.host/a/uk-midsomer-cdcms/receiver/", "<xml>" = _body, _headers -> {:ok, %{status_code: 201, body: Test.Fixtures.commcare_submit_response(:success)}} end) assert {:ok, response} = Commcare.Api.post_case("<xml>", Test.Fixtures.test_county_1_domain()) assert response.body =~ "submit_success" end test "handles error response" do NYSETL.HTTPoisonMock |> expect(:post, fn "http://commcare.test.host/a/uk-midsomer-cdcms/receiver/", "<xml>" = _body, _headers -> {:ok, %{status_code: 201, body: Test.Fixtures.commcare_submit_response(:error)}} end) assert {:error, response} = Commcare.Api.post_case("<xml>", Test.Fixtures.test_county_1_domain()) assert response.body =~ "submit_error" end test "handles rate limit response" do NYSETL.HTTPoisonMock |> expect(:post, fn "http://commcare.test.host/a/uk-midsomer-cdcms/receiver/", "<xml>" = _body, _headers -> {:ok, %{status_code: 429, body: Test.Fixtures.commcare_submit_response(:error)}} end) assert {:error, :rate_limited} = Commcare.Api.post_case("<xml>", Test.Fixtures.test_county_1_domain()) end test "handles other status codes that aren't HttpPoison errors" do NYSETL.HTTPoisonMock |> expect(:post, fn "http://commcare.test.host/a/uk-midsomer-cdcms/receiver/", "<xml>" = _body, _headers -> {:ok, %{status_code: 202, body: "some other semi-successful error"}} end) assert {:error, %{status_code: 202, body: "some other semi-successful error"}} = Commcare.Api.post_case("<xml>", Test.Fixtures.test_county_1_domain()) end test "handles error responses" do NYSETL.HTTPoisonMock |> expect(:post, fn "http://commcare.test.host/a/uk-midsomer-cdcms/receiver/", "<xml>" = _body, _headers -> {:error, %{status_code: 500, body: "definitely an error"}} end) assert {:error, %{status_code: 500, body: "definitely an error"}} = Commcare.Api.post_case("<xml>", Test.Fixtures.test_county_1_domain()) end end describe "get_case" do test "gets a case using case_id" do url = "http://commcare.test.host/a/nj-covid-camden/api/v0.5/case/12345678-1234-1234-1234-123456789012/?format=json&child_cases__full=true" NYSETL.HTTPoisonMock |> stub(:get, fn ^url, _headers, _opts -> body = Test.Fixtures.case_response("nj-covid-camden", "12345678-1234-1234-1234-123456789012") {:ok, %{body: body, status_code: 200, request_url: url}} end) assert {:ok, response} = Commcare.Api.get_case(commcare_case_id: Test.Fixtures.commcare_case_id(), county_domain: Test.Fixtures.county_domain()) response |> Map.get("case_id") |> assert_eq(Test.Fixtures.commcare_case_id()) end test "returns {:error, :not_found} when the case is not found" do url = "http://commcare.test.host/a/nj-covid-camden/api/v0.5/case/non-existent-case/?format=json&child_cases__full=true" NYSETL.HTTPoisonMock |> stub(:get, fn ^url, _headers, _opts -> {:ok, %{body: "", status_code: 404, request_url: url}} end) Commcare.Api.get_case(commcare_case_id: "non-existent-case", county_domain: Test.Fixtures.county_domain()) |> assert_eq({:error, :not_found}) end test "returns {:error, :rate_limited} when the result is :ok but the status code is not 429" do url = "http://commcare.test.host/a/nj-covid-camden/api/v0.5/case/12345678-1234-1234-1234-123456789012/?format=json&child_cases__full=true" NYSETL.HTTPoisonMock |> stub(:get, fn ^url, _headers, _opts -> {:ok, %{body: "Too many!", status_code: 429, request_url: url}} end) assert {:error, :rate_limited} = Commcare.Api.get_case(commcare_case_id: Test.Fixtures.commcare_case_id(), county_domain: Test.Fixtures.county_domain()) end test "returns {:error, _} when the result is :ok but the status code is not 404 or 200" do url = "http://commcare.test.host/a/nj-covid-camden/api/v0.5/case/12345678-1234-1234-1234-123456789012/?format=json&child_cases__full=true" response = %{body: "Error!", status_code: 500, request_url: url} NYSETL.HTTPoisonMock |> stub(:get, fn ^url, _headers, _opts -> {:ok, response} end) assert {:error, ^response} = Commcare.Api.get_case(commcare_case_id: Test.Fixtures.commcare_case_id(), county_domain: Test.Fixtures.county_domain()) end test "returns {:error, _} when the result is :error " do url = "http://commcare.test.host/a/nj-covid-camden/api/v0.5/case/12345678-1234-1234-1234-123456789012/?format=json&child_cases__full=true" response = %{body: "Error!", status_code: 501, request_url: url} NYSETL.HTTPoisonMock |> stub(:get, fn ^url, _headers, _opts -> {:error, response} end) assert {:error, ^response} = Commcare.Api.get_case(commcare_case_id: Test.Fixtures.commcare_case_id(), county_domain: Test.Fixtures.county_domain()) end end describe "get_cases" do setup do NYSETL.HTTPoisonMock |> stub(:get, fn "http://commcare.test.host/a/nj-covid-camden/api/v0.5/case/?child_cases__full=true&type=patient&limit=100&offset=0", _headers, _options -> {:ok, %{status_code: 200, body: ~s|{"objects":["case has children"],"meta":{"next":null}}|}} "http://commcare.test.host/a/nj-covid-camden/api/v0.5/case/?server_date_modified_start=2013-09-29&type=patient&limit=100&offset=0", _headers, _options -> {:ok, %{status_code: 200, body: ~s|{"objects":["case after 2013-09-29T10:40Z"],"meta":{"next":null}}|}} "http://commcare.test.host/a/nj-covid-camden/api/v0.5/case/?type=patient&limit=100&offset=" <> offset = url, _headers, _opts -> body = Test.Fixtures.cases_response("nj-covid-camden", "patient", offset) {:ok, %{body: body, status_code: 200, request_url: url}} end) :ok end test "gets a page of cases for the given type from offset" do assert {:ok, response} = Commcare.Api.get_cases(county_domain: Test.Fixtures.county_domain(), type: "patient") response |> Map.get("next_offset") |> assert_eq(100) response |> Map.get("objects") |> Enum.map(fn case -> case["case_id"] end) |> assert_eq(["commcare_case_id_1", "commcare_case_id_2", "commcare_case_id_3"]) end test "gets a page of cases for the given type after server modified at" do assert {:ok, response} = Commcare.Api.get_cases(county_domain: Test.Fixtures.county_domain(), type: "patient", modified_since: ~D[2013-09-29]) response |> Map.get("objects") |> assert_eq(["case after 2013-09-29T10:40Z"]) end test "optionally includes child cases" do assert {:ok, response} = Commcare.Api.get_cases(county_domain: Test.Fixtures.county_domain(), type: "patient", full: true) response |> Map.get("objects") |> assert_eq(["case has children"]) end test "when there are no more pages to fetch, next_offset is nil" do assert {:ok, response} = Commcare.Api.get_cases(county_domain: Test.Fixtures.county_domain(), type: "patient", offset: 100) response |> Map.get("next_offset") |> assert_eq(nil) response |> Map.get("objects") |> Enum.map(fn case -> case["case_id"] end) |> assert_eq(["commcare_case_id_4"]) end end describe "get_transfer_cases" do test "gets a case using external_id" do NYSETL.HTTPoisonMock |> stub(:get, fn "http://commcare.test.host/a/nj-covid-camden/api/v0.5/case/?external_id=1234567" = url, _headers, _opts -> body = Test.Fixtures.case_external_id_response("nj-covid-camden", "1234567") {:ok, %{body: body, status_code: 200, request_url: url}} end) assert {:ok, [response]} = Commcare.Api.get_transfer_cases(external_id: Test.Fixtures.external_id(), county_domain: Test.Fixtures.county_domain()) response |> Map.get("properties") |> Map.get("external_id") |> assert_eq(Test.Fixtures.external_id()) end end describe "get_case_list" do setup do NYSETL.HTTPoisonMock |> stub(:get, fn "http://commcare.test.host/a/nj-covid-camden/api/v0.5/case/?owner_id=1234567890" = url, _headers, _options -> body = Test.Fixtures.case_list_response("nj-covid-camden", "1234567890") {:ok, %{body: body, status_code: 200, request_url: url}} end) :ok end test "gets list of cases for given county" do assert {:ok, response} = Commcare.Api.get_case_list(owner_id: Test.Fixtures.owner_id(), county_domain: Test.Fixtures.county_domain()) response |> Jason.decode!() |> Map.get("objects") |> length() |> assert_eq(2) end end describe "ping" do setup do NYSETL.HTTPoisonMock |> stub(:get, fn "http://commcare.test.host/accounts/login/" = _url, _headers, _options -> {:ok, %{body: "", status_code: 200}} end) :ok end test "returns an ok tuple for a successful request to commcare" do assert {:ok, _} = Commcare.Api.ping() end end end
40.15748
150
0.654902
f7930e533663e615af5acd5c721fbb957cccb849
428
exs
Elixir
apps/alert_processor/priv/repo/migrations/20170822155325_create_alerts.exs
mbta/alerts_concierge
d8e643445ef06f80ca273f2914c6959daea146f6
[ "MIT" ]
null
null
null
apps/alert_processor/priv/repo/migrations/20170822155325_create_alerts.exs
mbta/alerts_concierge
d8e643445ef06f80ca273f2914c6959daea146f6
[ "MIT" ]
21
2021-03-12T17:05:30.000Z
2022-02-16T21:48:35.000Z
apps/alert_processor/priv/repo/migrations/20170822155325_create_alerts.exs
mbta/alerts_concierge
d8e643445ef06f80ca273f2914c6959daea146f6
[ "MIT" ]
1
2021-12-09T15:09:53.000Z
2021-12-09T15:09:53.000Z
defmodule AlertProcessor.Repo.Migrations.CreateAlerts do use Ecto.Migration def change do create table(:alerts, primary_key: false) do add :id, :binary_id, primary_key: true add :alert_id, :string add :last_modified, :utc_datetime add :data, :map timestamps(type: :utc_datetime) end create index(:alerts, [:last_modified]) create unique_index(:alerts, [:alert_id]) end end
23.777778
56
0.686916
f793145d7a65f41ec90c756c765088174efbba3f
1,153
exs
Elixir
config/config.exs
ppraisethesun/logger_papertrail_backend
c8ae5af08306dfe788b2fd59aa373e4ed2bc69f6
[ "MIT" ]
53
2015-12-26T22:52:21.000Z
2022-01-25T03:56:48.000Z
config/config.exs
ppraisethesun/logger_papertrail_backend
c8ae5af08306dfe788b2fd59aa373e4ed2bc69f6
[ "MIT" ]
21
2015-10-02T13:15:46.000Z
2021-01-12T19:14:34.000Z
config/config.exs
ppraisethesun/logger_papertrail_backend
c8ae5af08306dfe788b2fd59aa373e4ed2bc69f6
[ "MIT" ]
11
2016-08-20T22:59:59.000Z
2021-12-12T18:04:16.000Z
# This file is responsible for configuring your application # and its dependencies with the aid of the Mix.Config module. use Mix.Config # This configuration is loaded before any dependency and is restricted # to this project. If another project depends on this project, this # file won't be loaded nor affect the parent project. For this reason, # if you want to provide default values for your application for # 3rd-party users, it should be done in your "mix.exs" file. # You can configure for your application as: # # config :logger_papertrail_backend, key: :value # # And access this configuration in your application as: # # Application.get_env(:logger_papertrail_backend, :key) # # Or configure a 3rd-party app: # # config :logger, level: :info # # It is also possible to import configuration files, relative to this # directory. For example, you can emulate configuration per environment # by uncommenting the line below and defining dev.exs, test.exs and such. # Configuration from the imported file will override the ones defined # here (which is why it is important to import them last). # # import_config "#{Mix.env}.exs"
37.193548
73
0.75889
f7931d40cb18ef760ea84698d8a397c3af25f034
3,381
ex
Elixir
lib/radiator_web/controllers/admin/episode_controller.ex
optikfluffel/radiator
b1a1b966296fa6bf123e3a2455009ff52099ace6
[ "MIT" ]
1
2021-03-02T16:59:40.000Z
2021-03-02T16:59:40.000Z
lib/radiator_web/controllers/admin/episode_controller.ex
optikfluffel/radiator
b1a1b966296fa6bf123e3a2455009ff52099ace6
[ "MIT" ]
null
null
null
lib/radiator_web/controllers/admin/episode_controller.ex
optikfluffel/radiator
b1a1b966296fa6bf123e3a2455009ff52099ace6
[ "MIT" ]
null
null
null
defmodule RadiatorWeb.Admin.EpisodeController do use RadiatorWeb, :controller alias Radiator.Directory alias Radiator.Storage alias Directory.Episode alias Directory.Editor plug :assign_podcast when action in [:new, :create, :update] defp assign_podcast(conn, _) do assign(conn, :podcast, Directory.get_podcast!(conn.params["podcast_id"])) end def new(conn, _params) do changeset = Editor.Manager.change_episode(%Episode{}) render(conn, "new.html", changeset: changeset) end def create(conn, %{"episode" => episode_params}) do podcast = conn.assigns[:podcast] episode_params = case process_upload(conn, podcast, episode_params) do {:ok, enclosure_url, enclosure_type, enclosure_size} -> episode_params |> Map.put("enclosure_url", enclosure_url) |> Map.put("enclosure_type", enclosure_type) |> Map.put("enclosure_length", enclosure_size) _ -> episode_params end case Editor.Manager.create_episode(podcast, episode_params) do {:ok, episode} -> conn |> put_flash(:info, "episode created successfully.") |> redirect( to: Routes.admin_network_podcast_episode_path( conn, :show, podcast.network_id, podcast, episode ) ) {:error, %Ecto.Changeset{} = changeset} -> render(conn, "new.html", changeset: changeset) end end def show(conn, %{"id" => id}) do episode = Directory.get_episode!(id) |> Radiator.Repo.preload(:chapters) render(conn, "show.html", episode: episode) end def edit(conn, %{"id" => id}) do episode = Directory.get_episode!(id) changeset = Editor.Manager.change_episode(episode) render(conn, "edit.html", episode: episode, changeset: changeset) end def update(conn, %{"id" => id, "episode" => episode_params}) do episode = Directory.get_episode!(id) episode_params = case process_upload(conn, conn.assigns[:podcast], episode_params) do {:ok, enclosure_url, enclosure_type, enclosure_size} -> episode_params |> Map.put("enclosure_url", enclosure_url) |> Map.put("enclosure_type", enclosure_type) |> Map.put("enclosure_length", enclosure_size) _ -> episode_params end case Editor.Manager.update_episode(episode, episode_params) do {:ok, episode} -> conn |> put_flash(:info, "episode updated successfully.") |> redirect( to: Routes.admin_network_podcast_episode_path( conn, :show, episode.podcast.network_id, episode.podcast, episode ) ) {:error, %Ecto.Changeset{} = changeset} -> render(conn, "edit.html", episode: episode, changeset: changeset) end end def process_upload(_conn, podcast, params) do if upload = params["enclosure"] do {:ok, %File.Stat{size: size}} = File.stat(upload.path) path = Storage.file_path(podcast, upload.filename) Storage.upload_file(upload.path, path, upload.content_type) enclosure_url = Storage.file_url(podcast, upload.filename) {:ok, enclosure_url, upload.content_type, size} else :noupload end end end
29.146552
77
0.619639
f7932eda6d2e7b28b02bf953f16d13b8368bf70e
11,474
ex
Elixir
lib/eqrcode/matrix.ex
maltoe/eqrcode
cde803e209eb4840750a24c394a0bb397117f96a
[ "MIT" ]
null
null
null
lib/eqrcode/matrix.ex
maltoe/eqrcode
cde803e209eb4840750a24c394a0bb397117f96a
[ "MIT" ]
null
null
null
lib/eqrcode/matrix.ex
maltoe/eqrcode
cde803e209eb4840750a24c394a0bb397117f96a
[ "MIT" ]
null
null
null
defmodule EQRCode.Matrix do @moduledoc false alias EQRCode.SpecTable import Bitwise @derive {Inspect, only: [:version, :error_correction_level, :modules, :mask]} defstruct [:version, :error_correction_level, :modules, :mask, :matrix] @type coordinate :: {non_neg_integer(), non_neg_integer()} @type matrix :: term @type t :: %__MODULE__{version: SpecTable.version(), error_correction_level: SpecTable.error_correction_level(), modules: integer, matrix: matrix} @alignments %{ 1 => [], 2 => [6, 18], 3 => [6, 22], 4 => [6, 26], 5 => [6, 30], 6 => [6, 34], 7 => [6, 22, 38], 8 => [6, 24, 42], 9 => [6, 26, 46], 10 => [6, 28, 50], 11 => [6, 30, 54], 12 => [6, 32, 58], 13 => [6, 34, 62], 14 => [6, 26, 46, 66], 15 => [6, 26, 48, 70], 16 => [6, 26, 50, 74], 17 => [6, 30, 54, 78], 18 => [6, 30, 56, 82], 19 => [6, 30, 58, 86], 20 => [6, 34, 62, 90], 21 => [6, 28, 50, 72, 94], 22 => [6, 26, 50, 74, 98], 23 => [6, 30, 54, 78, 102], 24 => [6, 28, 54, 80, 106], 25 => [6, 32, 58, 84, 110], 26 => [6, 30, 58, 86, 114], 27 => [6, 34, 62, 90, 118], 28 => [6, 26, 50, 74, 98, 122], 29 => [6, 30, 54, 78, 102, 126], 30 => [6, 26, 52, 78, 104, 130], 31 => [6, 30, 56, 82, 108, 134], 32 => [6, 34, 60, 86, 112, 138], 33 => [6, 30, 58, 86, 114, 142], 34 => [6, 34, 62, 90, 118, 146], 35 => [6, 30, 54, 78, 102, 126, 150], 36 => [6, 24, 50, 76, 102, 128, 154], 37 => [6, 28, 54, 80, 106, 132, 158], 38 => [6, 32, 58, 84, 110, 136, 162], 39 => [6, 26, 54, 82, 110, 138, 166], 40 => [6, 30, 58, 86, 114, 142, 170] } @finder_pattern Code.eval_string(""" [ 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1 ] """) |> elem(0) @alignment_pattern Code.eval_string(""" [ 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, ] """) |> elem(0) @doc """ Initialize the matrix. """ @spec new(SpecTable.version(), SpecTable.error_correction_level()) :: t def new(version, error_correction_level \\ :l) do modules = (version - 1) * 4 + 21 matrix = Tuple.duplicate(nil, modules) |> Tuple.duplicate(modules) %__MODULE__{version: version, error_correction_level: error_correction_level, modules: modules, matrix: matrix} end @doc """ Draw the finder patterns, three at a time. """ @spec draw_finder_patterns(t) :: t def draw_finder_patterns(%__MODULE__{matrix: matrix, modules: modules} = m) do z = modules - 7 matrix = [{0, 0}, {z, 0}, {0, z}] |> Stream.flat_map(&shape(&1, {7, 7})) |> Stream.zip(Stream.cycle(@finder_pattern)) |> Enum.reduce(matrix, fn {coordinate, v}, acc -> update(acc, coordinate, v) end) %{m | matrix: matrix} end @doc """ Draw the seperators. """ @spec draw_seperators(t) :: t def draw_seperators(%__MODULE__{matrix: matrix, modules: modules} = m) do z = modules - 8 matrix = [ {{0, 7}, {1, 8}}, {{0, z}, {1, 8}}, {{7, z}, {8, 1}}, {{7, 0}, {8, 1}}, {{z, 0}, {8, 1}}, {{z, 7}, {1, 8}} ] |> Stream.flat_map(fn {a, b} -> shape(a, b) end) |> Enum.reduce(matrix, &update(&2, &1, 0)) %{m | matrix: matrix} end @doc """ Draw the alignment patterns. """ @spec draw_alignment_patterns(t) :: t def draw_alignment_patterns(%__MODULE__{matrix: matrix, version: version} = m) do matrix = for( x <- @alignments[version], y <- @alignments[version], do: {x, y} ) |> Stream.filter(&available?(matrix, &1)) |> Stream.map(fn {x, y} -> {x - 2, y - 2} end) |> Stream.flat_map(&shape(&1, {5, 5})) |> Stream.zip(Stream.cycle(@alignment_pattern)) |> Enum.reduce(matrix, fn {coordinate, v}, acc -> update(acc, coordinate, v) end) %{m | matrix: matrix} end @doc """ Draw the timing patterns. """ @spec draw_timing_patterns(t) :: t def draw_timing_patterns(%__MODULE__{matrix: matrix, modules: modules} = m) do z = modules - 13 matrix = [{z, 1}, {1, z}] |> Stream.flat_map(&shape({6, 6}, &1)) |> Stream.zip(Stream.cycle([1, 0])) |> Enum.reduce(matrix, fn {coordinate, v}, acc -> update(acc, coordinate, v) end) %{m | matrix: matrix} end @doc """ Draw the dark module. """ @spec draw_dark_module(t) :: t def draw_dark_module(%__MODULE__{matrix: matrix, modules: modules} = m) do matrix = update(matrix, {modules - 8, 8}, 1) %{m | matrix: matrix} end @doc """ Draw the reserved format information areas. """ @spec draw_reserved_format_areas(t) :: t def draw_reserved_format_areas(%__MODULE__{matrix: matrix, modules: modules} = m) do z = modules - 8 matrix = [{{0, 8}, {1, 9}}, {{z, 8}, {1, 8}}, {{8, 0}, {9, 1}}, {{8, z}, {8, 1}}] |> Stream.flat_map(fn {a, b} -> shape(a, b) end) |> Enum.reduce(matrix, &update(&2, &1, :reserved)) %{m | matrix: matrix} end @doc """ Draw the reserved version information areas. """ @spec draw_reserved_version_areas(t) :: t def draw_reserved_version_areas(%__MODULE__{version: version} = m) when version < 7, do: m def draw_reserved_version_areas(%__MODULE__{matrix: matrix, modules: modules} = m) do z = modules - 11 matrix = [{{0, z}, {3, 6}}, {{z, 0}, {6, 3}}] |> Stream.flat_map(fn {a, b} -> shape(a, b) end) |> Enum.reduce(matrix, &update(&2, &1, :reserved)) %{m | matrix: matrix} end @doc """ Draw the data bits with mask. """ @spec draw_data_with_mask(t, binary) :: t def draw_data_with_mask(%__MODULE__{matrix: matrix, modules: modules} = m, data) do candidate = Stream.unfold(modules - 1, fn -1 -> nil 8 -> {8, 5} n -> {n, n - 2} end) |> Stream.zip(Stream.cycle([:up, :down])) |> Stream.flat_map(fn {z, path} -> path(path, {modules - 1, z}) end) |> Stream.filter(&available?(matrix, &1)) |> Stream.zip(EQRCode.Encode.bits(data)) {mask, _, matrix} = Stream.map(0b000..0b111, fn mask -> matrix = Enum.reduce(candidate, matrix, fn {coordinate, v}, acc -> update(acc, coordinate, v ^^^ EQRCode.Mask.mask(mask, coordinate)) end) {mask, EQRCode.Mask.score(matrix), matrix} end) |> Enum.min_by(&elem(&1, 1)) %{m | matrix: matrix, mask: mask} end @doc """ Draw the data bits with mask 0. """ @spec draw_data_with_mask0(t, binary) :: t def draw_data_with_mask0(%__MODULE__{matrix: matrix, modules: modules} = m, data) do matrix = Stream.unfold(modules - 1, fn -1 -> nil 8 -> {8, 5} n -> {n, n - 2} end) |> Stream.zip(Stream.cycle([:up, :down])) |> Stream.flat_map(fn {z, path} -> path(path, {modules - 1, z}) end) |> Stream.filter(&available?(matrix, &1)) |> Stream.zip(EQRCode.Encode.bits(data)) |> Enum.reduce(matrix, fn {coordinate, v}, acc -> update(acc, coordinate, v ^^^ EQRCode.Mask.mask(0, coordinate)) end) %{m | matrix: matrix, mask: 0} end defp path(:up, {x, y}), do: for( i <- x..0, j <- y..(y - 1), do: {i, j} ) defp path(:down, {x, y}), do: for( i <- 0..x, j <- y..(y - 1), do: {i, j} ) @doc """ Fill the reserved format information areas. """ @spec draw_format_areas(t) :: t def draw_format_areas(%__MODULE__{matrix: matrix, modules: modules, mask: mask, error_correction_level: ecl} = m) do ecc_l = SpecTable.error_corretion_bits(ecl) data = EQRCode.ReedSolomon.bch_encode(<<ecc_l::2, mask::3>>) matrix = [ {{8, 0}, {9, 1}}, {{7, 8}, {1, -6}}, {{modules - 1, 8}, {1, -6}}, {{8, modules - 8}, {8, 1}} ] |> Stream.flat_map(fn {a, b} -> shape(a, b) end) |> Stream.filter(&reserved?(matrix, &1)) |> Stream.zip(Stream.cycle(data)) |> Enum.reduce(matrix, fn {coordinate, v}, acc -> put(acc, coordinate, v) end) %{m | matrix: matrix} end @doc """ Fill the reserved version information areas. """ @spec draw_version_areas(t) :: t def draw_version_areas(%__MODULE__{version: version} = m) when version < 7, do: m def draw_version_areas(%__MODULE__{matrix: matrix, modules: modules, version: version} = m) do version_information_bits = SpecTable.version_information_bits(version) data = EQRCode.Encode.bits(<<version_information_bits::18>>) z = modules - 9 matrix = [ {{z, 5}, {1, -1}}, {{z, 4}, {1, -1}}, {{z, 3}, {1, -1}}, {{z, 2}, {1, -1}}, {{z, 1}, {1, -1}}, {{z, 0}, {1, -1}}, {{5, z}, {-1, 1}}, {{4, z}, {-1, 1}}, {{3, z}, {-1, 1}}, {{2, z}, {-1, 1}}, {{1, z}, {-1, 1}}, {{0, z}, {-1, 1}} ] |> Stream.flat_map(fn {a, b} -> shape(a, b) end) |> Stream.filter(&reserved?(matrix, &1)) |> Stream.zip(Stream.cycle(data)) |> Enum.reduce(matrix, fn {coordinate, v}, acc -> put(acc, coordinate, v) end) %{m | matrix: matrix} end defp reserved?(matrix, {x, y}) do get_in(matrix, [Access.elem(x), Access.elem(y)]) == :reserved end defp put(matrix, {x, y}, value) do put_in(matrix, [Access.elem(x), Access.elem(y)], value) end @doc """ Draw the quite zone. """ @spec draw_quite_zone(t) :: t def draw_quite_zone(%__MODULE__{matrix: matrix, modules: modules} = m) do zone = Tuple.duplicate(0, modules + 4) matrix = Enum.reduce(0..(modules - 1), matrix, fn i, acc -> update_in(acc, [Access.elem(i)], fn row -> Tuple.insert_at(row, 0, 0) |> Tuple.insert_at(0, 0) |> Tuple.append(0) |> Tuple.append(0) end) end) |> Tuple.insert_at(0, zone) |> Tuple.insert_at(0, zone) |> Tuple.append(zone) |> Tuple.append(zone) %{m | matrix: matrix} end @doc """ Given the starting point {x, y} and {width, height} returns the coordinates of the shape. Example: iex> EQRCode.Matrix.shape({0, 0}, {3, 3}) [{0, 0}, {0, 1}, {0, 2}, {1, 0}, {1, 1}, {1, 2}, {2, 0}, {2, 1}, {2, 2}] """ @spec shape(coordinate, {integer, integer}) :: [coordinate] def shape({x, y}, {w, h}) do for i <- x..(x + h - 1), j <- y..(y + w - 1), do: {i, j} end defp update(matrix, {x, y}, value) do update_in(matrix, [Access.elem(x), Access.elem(y)], fn nil -> value val -> val end) end defp available?(matrix, {x, y}) do get_in(matrix, [Access.elem(x), Access.elem(y)]) == nil end @doc """ Get matrix size. """ @spec size(t()) :: integer() def size(%__MODULE__{matrix: matrix}) do matrix |> Tuple.to_list() |> Enum.count() end end
27.714976
148
0.505404
f7933b6193d2bf50a4547827b12880a7e5777997
2,555
ex
Elixir
clients/document_ai/lib/google_api/document_ai/v1beta2/model/google_cloud_documentai_v1beta1_document_page_visual_element.ex
MasashiYokota/elixir-google-api
975dccbff395c16afcb62e7a8e411fbb58e9ab01
[ "Apache-2.0" ]
null
null
null
clients/document_ai/lib/google_api/document_ai/v1beta2/model/google_cloud_documentai_v1beta1_document_page_visual_element.ex
MasashiYokota/elixir-google-api
975dccbff395c16afcb62e7a8e411fbb58e9ab01
[ "Apache-2.0" ]
1
2020-12-18T09:25:12.000Z
2020-12-18T09:25:12.000Z
clients/document_ai/lib/google_api/document_ai/v1beta2/model/google_cloud_documentai_v1beta1_document_page_visual_element.ex
MasashiYokota/elixir-google-api
975dccbff395c16afcb62e7a8e411fbb58e9ab01
[ "Apache-2.0" ]
1
2020-10-04T10:12:44.000Z
2020-10-04T10:12:44.000Z
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.DocumentAI.V1beta2.Model.GoogleCloudDocumentaiV1beta1DocumentPageVisualElement do @moduledoc """ Detected non-text visual elements e.g. checkbox, signature etc. on the page. ## Attributes * `detectedLanguages` (*type:* `list(GoogleApi.DocumentAI.V1beta2.Model.GoogleCloudDocumentaiV1beta1DocumentPageDetectedLanguage.t)`, *default:* `nil`) - A list of detected languages together with confidence. * `layout` (*type:* `GoogleApi.DocumentAI.V1beta2.Model.GoogleCloudDocumentaiV1beta1DocumentPageLayout.t`, *default:* `nil`) - Layout for VisualElement. * `type` (*type:* `String.t`, *default:* `nil`) - Type of the VisualElement. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :detectedLanguages => list( GoogleApi.DocumentAI.V1beta2.Model.GoogleCloudDocumentaiV1beta1DocumentPageDetectedLanguage.t() ), :layout => GoogleApi.DocumentAI.V1beta2.Model.GoogleCloudDocumentaiV1beta1DocumentPageLayout.t(), :type => String.t() } field(:detectedLanguages, as: GoogleApi.DocumentAI.V1beta2.Model.GoogleCloudDocumentaiV1beta1DocumentPageDetectedLanguage, type: :list ) field(:layout, as: GoogleApi.DocumentAI.V1beta2.Model.GoogleCloudDocumentaiV1beta1DocumentPageLayout ) field(:type) end defimpl Poison.Decoder, for: GoogleApi.DocumentAI.V1beta2.Model.GoogleCloudDocumentaiV1beta1DocumentPageVisualElement do def decode(value, options) do GoogleApi.DocumentAI.V1beta2.Model.GoogleCloudDocumentaiV1beta1DocumentPageVisualElement.decode( value, options ) end end defimpl Poison.Encoder, for: GoogleApi.DocumentAI.V1beta2.Model.GoogleCloudDocumentaiV1beta1DocumentPageVisualElement do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
36.5
212
0.748337
f7933d1d0bbd0ceaa94bce6052c1ddbd88e81990
993
exs
Elixir
exercise_2.1.exs
bschmeck/sicp-elixir
4e6f959f506d0cb4e2692177cbdad1c87779cf7d
[ "MIT" ]
2
2019-08-26T13:17:27.000Z
2020-09-24T13:16:07.000Z
exercise_2.1.exs
bschmeck/sicp-elixir
4e6f959f506d0cb4e2692177cbdad1c87779cf7d
[ "MIT" ]
null
null
null
exercise_2.1.exs
bschmeck/sicp-elixir
4e6f959f506d0cb4e2692177cbdad1c87779cf7d
[ "MIT" ]
null
null
null
defmodule Rational do def make(n, d) when d < 0, do: make(-n, -d) def make(n, d) do g = gcd(abs(n), abs(d)) [div(n, g) | div(d, g)] end def numer(rat), do: hd(rat) def denom(rat), do: tl(rat) def string(rat), do: "#{Rational.numer rat} / #{Rational.denom rat}" def print(rat), do: IO.puts string(rat) defp gcd(a, 0), do: a defp gcd(a, b), do: gcd(b, rem(a, b)) end ExUnit.start defmodule RationalTests do use ExUnit.Case, async: true test "it reduces a positive numerator and positive denominator" do assert Rational.make(3, 6) == Rational.make(1, 2) end test "it reduces a positive numerator and negative denominator" do assert Rational.make(3, -6) == Rational.make(-1, 2) end test "it reduces a negative numerator and positive denominator" do assert Rational.make(-3, 6) == Rational.make(-1, 2) end test "it reduces a negative numerator and negative denominator" do assert Rational.make(-3, -6) == Rational.make(1, 2) end end
28.371429
70
0.655589
f793478939120ebb2ce3ab510e659639f7ef412a
436
ex
Elixir
lib/apeekee.ex
KamilZielinski/apeekee
6f9373f92d596dd942e7449f41be7f694ec0fc50
[ "MIT" ]
1
2020-05-01T11:29:53.000Z
2020-05-01T11:29:53.000Z
lib/apeekee.ex
KamilZielinski/apeekee
6f9373f92d596dd942e7449f41be7f694ec0fc50
[ "MIT" ]
3
2019-10-07T22:38:05.000Z
2019-10-12T01:23:15.000Z
lib/apeekee.ex
KamilZielinski/apeekee
6f9373f92d596dd942e7449f41be7f694ec0fc50
[ "MIT" ]
null
null
null
defmodule Apeekee.Plug do import Plug.Conn alias Apeekee.Key alias Apeekee.Auth def init(_), do: [] def call(conn, _) do case Key.get_auth_key(conn) do {:ok, key} -> case Auth.auth_by_key(conn, key) do {:ok, user} -> Auth.on_success(conn, user) {:error, error} -> Auth.on_failure(conn, error) end {:error, error} -> Auth.on_failure(conn, error) end end end
20.761905
57
0.584862
f7935b9b6bdc67cd38ce9f6a3af128865348d937
1,516
ex
Elixir
lib/usho_web.ex
agleb/usho
e5971569c67e53049c8ce3ccf24d52df7e185026
[ "MIT" ]
null
null
null
lib/usho_web.ex
agleb/usho
e5971569c67e53049c8ce3ccf24d52df7e185026
[ "MIT" ]
null
null
null
lib/usho_web.ex
agleb/usho
e5971569c67e53049c8ce3ccf24d52df7e185026
[ "MIT" ]
null
null
null
defmodule UshoWeb do @moduledoc """ The entrypoint for defining your web interface, such as controllers, views, channels and so on. This can be used in your application as: use UshoWeb, :controller use UshoWeb, :view The definitions below will be executed for every view, controller, etc, so keep them short and clean, focused on imports, uses and aliases. Do NOT define functions inside the quoted expressions below. Instead, define any helper function in modules and import those modules here. """ def controller do quote do use Phoenix.Controller, namespace: UshoWeb import Plug.Conn import UshoWeb.Gettext alias UshoWeb.Router.Helpers, as: Routes end end def view do quote do use Phoenix.View, root: "lib/usho_web/templates", namespace: UshoWeb # Import convenience functions from controllers import Phoenix.Controller, only: [get_flash: 1, get_flash: 2, view_module: 1] import UshoWeb.ErrorHelpers import UshoWeb.Gettext alias UshoWeb.Router.Helpers, as: Routes end end def router do quote do use Phoenix.Router import Plug.Conn import Phoenix.Controller end end def channel do quote do use Phoenix.Channel import UshoWeb.Gettext end end @doc """ When used, dispatch to the appropriate controller/view/etc. """ defmacro __using__(which) when is_atom(which) do apply(__MODULE__, which, []) end end
22.626866
83
0.684697
f79388e289c03f48bb1deacf115db68578205128
2,436
exs
Elixir
src/mbs/test/functional_tests/command/build/ls_test.exs
visciang/mbs
3f218b82f3097c03149b6a4a440804056d94c387
[ "MIT" ]
null
null
null
src/mbs/test/functional_tests/command/build/ls_test.exs
visciang/mbs
3f218b82f3097c03149b6a4a440804056d94c387
[ "MIT" ]
1
2021-12-15T07:22:59.000Z
2021-12-15T07:47:56.000Z
src/mbs/test/functional_tests/command/build/ls_test.exs
visciang/mbs
3f218b82f3097c03149b6a4a440804056d94c387
[ "MIT" ]
null
null
null
defmodule Test.Command.Build.Ls do use ExUnit.Case, async: false import ExUnit.CaptureIO alias Test.Utils @test_component_a_id "test_component_a" @test_toolchain_a_id "test_toolchain_a" @component_a_dir Path.absname(Path.join(Utils.test_project_dir(), @test_component_a_id)) @toolchain_a_dir Path.absname(Path.join(Utils.test_project_dir(), @test_toolchain_a_id)) test "ls" do msg = capture_io(fn -> assert :ok == MBS.Main.run(["build", "ls"], Utils.test_project_dir()) end) expected_output = ~r""" #{@test_component_a_id} \(component\) #{@test_toolchain_a_id} \(toolchain\) """ assert msg =~ expected_output end test "ls --verbose (single target)" do msg = capture_io(fn -> assert :ok == MBS.Main.run(["build", "ls", "--verbose", @test_component_a_id], Utils.test_project_dir()) end) @component_a_dir = Path.absname(Path.join(Utils.test_project_dir(), @test_component_a_id)) expected_output = ~r""" #{@test_component_a_id} \(component\): dir: #{@component_a_dir} timeout: infinity toolchain: #{@test_toolchain_a_id} targets: - #{@component_a_dir}/#{@test_component_a_id}.target_1 - #{@component_a_dir}/#{@test_component_a_id}.target_2 files: - #{@component_a_dir}/.mbs-build.json - #{@component_a_dir}/file_1.txt """ assert msg =~ expected_output end test "ls --verbose" do msg = capture_io(fn -> assert :ok == MBS.Main.run(["build", "ls", "--verbose"], Utils.test_project_dir()) end) expected_output = ~r""" #{@test_component_a_id} \(component\): dir: #{@component_a_dir} timeout: infinity toolchain: #{@test_toolchain_a_id} targets: - #{@component_a_dir}/#{@test_component_a_id}.target_1 - #{@component_a_dir}/#{@test_component_a_id}.target_2 files: - #{@component_a_dir}/.mbs-build.json - #{@component_a_dir}/file_1.txt #{@test_toolchain_a_id} \(toolchain\): dir: #{@toolchain_a_dir} timeout: infinity dockerfile: #{@toolchain_a_dir}/Dockerfile steps: - step_1 - step_2 files: - #{@toolchain_a_dir}/.mbs-toolchain.json - #{@toolchain_a_dir}/Dockerfile - #{@toolchain_a_dir}/toolchain.sh """ assert msg =~ expected_output end end
27.681818
114
0.621511
f7938d6e74aa470350901d321643f5eb688a0a0f
2,685
ex
Elixir
lib/user_manager/authorize/authorize_user_validate_token.ex
Alezrik/user_manager
ef30f87587f652842b335b38dd2222873dbcb56b
[ "MIT" ]
3
2017-04-02T11:17:31.000Z
2017-09-08T09:12:11.000Z
lib/user_manager/authorize/authorize_user_validate_token.ex
Alezrik/user_manager
ef30f87587f652842b335b38dd2222873dbcb56b
[ "MIT" ]
15
2017-02-11T01:08:54.000Z
2017-02-22T09:45:41.000Z
lib/user_manager/authorize/authorize_user_validate_token.ex
Alezrik/user_manager
ef30f87587f652842b335b38dd2222873dbcb56b
[ "MIT" ]
1
2018-06-19T04:10:53.000Z
2018-06-19T04:10:53.000Z
defmodule UserManager.Authorize.AuthorizeUserValidateToken do @moduledoc false use GenStage require Logger def start_link(_) do GenStage.start_link(__MODULE__, [], name: __MODULE__) end def init(_) do {:producer_consumer, [], subscribe_to: [UserManager.Authorize.AuthorizeUserWorkflowProducer]} end @doc""" decode and verify user token ## Examples iex>name = Faker.Name.first_name <> Faker.Name.last_name iex>email = Faker.Internet.email iex>{:notify, _user} = UserManager.UserManagerApi.create_user(name, "secretpassword", email) iex>{:notify, response} = UserManager.UserManagerApi.authenticate_user(name, "secretpassword", :browser) iex>token = Map.fetch!(response.response_parameters, "authenticate_token") iex>msg = {:authorize_token, token, [], true, nil} iex>{:noreply, response, state} = UserManager.Authorize.AuthorizeUserValidateToken.handle_events([msg], nil, []) iex>Enum.at(Tuple.to_list(Enum.at(response, 0)),0) :validate_permissions iex>msg = {:authorize_token, "fskafsakjfkasfd", [], true, nil} iex>UserManager.Authorize.AuthorizeUserValidateToken.handle_events([msg], nil, []) {:noreply, [], []} iex>token = "eyJhbGciOiJIUzUxMiIsInR5cCI6IkpXVCJ9.eyJhdWQiOiJVc2VyOjQiLCJleHAiOjE0ODkzMDMyMTksImlhdCI6MTQ4NjcxMTIxOSwiaXNzIjoiU29tZW9uZSIsImp0aSI6Ijg0NGUwY2EzLWM4ZWUtNDQ3Mi1iMzYxLWVhODdjNGUzYjU3NCIsInBlbSI6eyJkZWZhdWx0IjoxfSwic3ViIjoiVXNlcjo0IiwidHlwIjoiYnJvd3NlciJ9.nA3-dkFNqTW1GYO8x1v9zTQoUk6ddyK2FqgZPZk9k6lO_iIOQx6We35ItLEeRAZO_5lv9JR4WWizQ7J7p8HRcA" iex>msg = {:authorize_token, token, [], true, nil} iex>UserManager.Authorize.AuthorizeUserValidateToken.handle_events([msg], nil, []) {:noreply, [], []} """ def handle_events(events, _from, state) do process_events = events |> Flow.from_enumerable |> Flow.flat_map(fn e -> process_event(e) end) |> Enum.to_list {:noreply, process_events, state} end defp process_event({:authorize_token, token, permission_list, require_all, notify}) do case Guardian.decode_and_verify(token) do {:error, :token_not_found} -> UserManager.Notifications.NotificationResponseProcessor.process_notification(:authorize, :token_not_found, %{}, notify) [] {:error, reason} -> UserManager.Notifications.NotificationResponseProcessor.process_notification(:authorize, :token_decode_error, UserManager.Notifications.NotificationMetadataHelper.build_token_decode_error(reason), notify) [] {:ok, data} -> [{:validate_permissions, data, permission_list, require_all, notify}] end end end
49.722222
360
0.730726
f7939fbdd09ba7f883777663d0ccd7971f1378fc
1,556
ex
Elixir
clients/android_enterprise/lib/google_api/android_enterprise/v1/model/web_apps_list_response.ex
MasashiYokota/elixir-google-api
975dccbff395c16afcb62e7a8e411fbb58e9ab01
[ "Apache-2.0" ]
null
null
null
clients/android_enterprise/lib/google_api/android_enterprise/v1/model/web_apps_list_response.ex
MasashiYokota/elixir-google-api
975dccbff395c16afcb62e7a8e411fbb58e9ab01
[ "Apache-2.0" ]
1
2020-12-18T09:25:12.000Z
2020-12-18T09:25:12.000Z
clients/android_enterprise/lib/google_api/android_enterprise/v1/model/web_apps_list_response.ex
MasashiYokota/elixir-google-api
975dccbff395c16afcb62e7a8e411fbb58e9ab01
[ "Apache-2.0" ]
1
2020-10-04T10:12:44.000Z
2020-10-04T10:12:44.000Z
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.AndroidEnterprise.V1.Model.WebAppsListResponse do @moduledoc """ ## Attributes * `webApp` (*type:* `list(GoogleApi.AndroidEnterprise.V1.Model.WebApp.t)`, *default:* `nil`) - The manifest describing a web app. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :webApp => list(GoogleApi.AndroidEnterprise.V1.Model.WebApp.t()) } field(:webApp, as: GoogleApi.AndroidEnterprise.V1.Model.WebApp, type: :list) end defimpl Poison.Decoder, for: GoogleApi.AndroidEnterprise.V1.Model.WebAppsListResponse do def decode(value, options) do GoogleApi.AndroidEnterprise.V1.Model.WebAppsListResponse.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.AndroidEnterprise.V1.Model.WebAppsListResponse do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
33.106383
133
0.750643
f793e224b27a28fc94b449904eb3880ef637a0df
3,791
exs
Elixir
test/hangman/pass_unit_test.exs
brpandey/elixir-hangman
458502af766b42e492ebb9ca543fc8b855687b09
[ "MIT" ]
1
2016-12-19T00:10:34.000Z
2016-12-19T00:10:34.000Z
test/hangman/pass_unit_test.exs
brpandey/elixir-hangman
458502af766b42e492ebb9ca543fc8b855687b09
[ "MIT" ]
null
null
null
test/hangman/pass_unit_test.exs
brpandey/elixir-hangman
458502af766b42e492ebb9ca543fc8b855687b09
[ "MIT" ]
null
null
null
defmodule Hangman.Pass.UnitTest do use ExUnit.Case, async: true alias Hangman.{Reduction, Counter, Pass, Words} setup context do case context[:case_key] do :start -> start_round_setup() :guessing -> guessing_round_setup() end end def start_round_setup do #### START ROUND 1 pass_key = {"bernard", 1, 1} context = {:start, 8} guessed = [] reduce_key = Reduction.Options.reduce_key(context, guessed) tally = Counter.new(%{ "e" => 19600, "s" => 16560, "i" => 15530, "a" => 14490, "r" => 14211, "n" => 12186, "t" => 11870, "o" => 11462, "l" => 11026, "d" => 8046, "c" => 7815, "u" => 7377, "g" => 6009, "m" => 5793, "p" => 5763, "h" => 5111, "b" => 4485, "y" => 3395, "f" => 2897, "k" => 2628, "w" => 2313, "v" => 2156, "z" => 783, "x" => 662, "q" => 422, "j" => 384 }) pass_receipt = %Pass{size: 28558, tally: tally, last_word: "", possible: ""} [pass: pass_receipt, pass_key: pass_key, reduce_key: reduce_key] end def guessing_round_setup do # Run the start round before we do the guessing round so that the proper state is setup # setup start round [pass: pass_receipt, pass_key: pass_key, reduce_key: reduce_key] = start_round_setup() # run the start round {^pass_key, ^pass_receipt} = Pass.result(:start, pass_key, reduce_key) #### GUESSING ROUND 2 pass_key = {"bernard", 1, 2} guessed = ["e"] regex_key = ~r/^[^e][^e][^e][^e][^e][^e][^e]e$/ tally = Counter.new(%{ "a" => 1215, "i" => 1154, "l" => 940, "o" => 855, "t" => 807, "s" => 689, "r" => 688, "n" => 662, "u" => 548, "c" => 527, "b" => 425, "p" => 387, "m" => 380, "d" => 348, "g" => 280, "h" => 257, "k" => 228, "f" => 169, "v" => 155, "y" => 127, "z" => 112, "w" => 111, "q" => 35, "x" => 24, "j" => 18 }) pass_receipt = %Pass{last_word: "", size: 1833, tally: tally} [pass: pass_receipt, pass_key: pass_key, exclusion: guessed, regex_key: regex_key] end @tag case_key: :start test "pass start", %{pass: pass_receipt, pass_key: pass_key, reduce_key: reduce_key} do # Assert pass reduce results!!! assert {^pass_key, ^pass_receipt} = Pass.result(:start, pass_key, reduce_key) end @tag case_key: :guessing test "pass guessing", %{ pass: pass_data, pass_key: pass_key, exclusion: exclusion, regex_key: regex_key } do # We just perform the computation from pass result :guessing # Below is the code contained in the reduce and store routine # in Reduction.Engine.Worker # NOTE: Yes we are testing the actual literal code in reduction worker # So yes this is too tight coupling to the implementation details # Since these are the implementation details! But since # Reduction.Engine worker is a process as is Pass.Cache this makes it # easier to test this important "KERNEL" code # Request word list data from Pass data = %Words{} = Pass.Reduction.words(pass_key) # REDUCE # Create new Words abstraction after filtering out failed word matches new_data = %Words{} = data |> Words.filter(regex_key) # STORE # Write to cache pass_receipt = %Pass{} = Pass.Reduction.store(pass_key, new_data, exclusion) # IO.puts "pass receipt #{inspect pass_receipt}" # Assert pass reduce results!!! assert ^pass_receipt = pass_data end end
25.965753
91
0.54181
f793f2600ae49cd8dfac885367f65963bf267488
1,269
exs
Elixir
mix.exs
iStefo/OK
cb98f2ee1be66e7af11d6a103470dd5f138f335b
[ "Apache-2.0" ]
582
2015-10-21T06:51:08.000Z
2022-03-22T09:21:12.000Z
mix.exs
iStefo/OK
cb98f2ee1be66e7af11d6a103470dd5f138f335b
[ "Apache-2.0" ]
61
2015-10-21T07:09:19.000Z
2021-06-20T10:16:39.000Z
mix.exs
iStefo/OK
cb98f2ee1be66e7af11d6a103470dd5f138f335b
[ "Apache-2.0" ]
24
2015-10-21T06:59:02.000Z
2021-05-02T12:09:10.000Z
defmodule OK.Mixfile do use Mix.Project def project do [ app: :ok, version: "2.3.0", elixir: "~> 1.1", elixirc_paths: elixirc_paths(Mix.env()), build_embedded: Mix.env() == :prod, start_permanent: Mix.env() == :prod, deps: deps(), description: description(), docs: [ main: "readme", source_url: "https://github.com/CrowdHailer/OK", extras: ["README.md"], groups_for_functions: [ Guards: & &1[:guard] ] ], package: package() ] end def application do [applications: [:logger]] end defp elixirc_paths(:test), do: ["lib", "test/integration.ex"] defp elixirc_paths(_), do: ["lib"] defp deps do [ {:ex_doc, ">= 0.0.0", only: :dev}, {:dialyxir, "~> 1.0.0-rc.3", only: [:test, :dev], runtime: false} ] end defp description do """ Elegant error/exception handling in Elixir, with result monads. """ end defp package do # These are the default files included in the package [ files: ["lib", "mix.exs", "README*", "LICENSE*"], maintainers: ["Peter Saxton"], licenses: ["Apache 2.0"], links: %{"GitHub" => "https://github.com/CrowdHailer/OK"} ] end end
22.660714
71
0.551615
f7940a5cde7794b5dcbb55c13f196c4222c86478
4,447
exs
Elixir
mix.exs
wjdix/oban
b68bcf02d2942c6ef7a98f8ea6ec80912beea47c
[ "Apache-2.0" ]
null
null
null
mix.exs
wjdix/oban
b68bcf02d2942c6ef7a98f8ea6ec80912beea47c
[ "Apache-2.0" ]
null
null
null
mix.exs
wjdix/oban
b68bcf02d2942c6ef7a98f8ea6ec80912beea47c
[ "Apache-2.0" ]
null
null
null
defmodule Oban.MixProject do use Mix.Project @version "2.3.3" def project do [ app: :oban, version: @version, elixir: "~> 1.8", elixirc_paths: elixirc_paths(Mix.env()), start_permanent: Mix.env() == :prod, deps: deps(), aliases: aliases(), preferred_cli_env: [ bench: :test, ci: :test, "test.setup": :test ], # Hex package: package(), description: """ Robust job processing, backed by modern PostgreSQL. """, # Dialyzer dialyzer: [ plt_add_apps: [:ex_unit], plt_core_path: "_build/#{Mix.env()}", flags: [:error_handling, :race_conditions, :underspecs] ], # Docs name: "Oban", docs: [ main: "Oban", source_ref: "v#{@version}", source_url: "https://github.com/sorentwo/oban", extra_section: "GUIDES", formatters: ["html"], extras: extras() ++ pro_extras() ++ web_extras(), groups_for_extras: groups_for_extras() ] ] end def application do [ mod: {Oban.Application, []}, extra_applications: [:logger] ] end defp elixirc_paths(:test), do: ["lib", "test/support"] defp elixirc_paths(_env), do: ["lib"] defp extras do [ "CHANGELOG.md", "guides/installation.md", "guides/troubleshooting.md", "guides/writing_plugins.md", "guides/upgrading/v2.0.md", "guides/release_configuration.md", "guides/recipes/recursive-jobs.md", "guides/recipes/reliable-scheduling.md", "guides/recipes/reporting-progress.md", "guides/recipes/expected-failures.md", "guides/recipes/splitting-queues.md" ] end defp pro_extras do if File.exists?("../oban_pro") do [ "../oban_pro/guides/pro/overview.md": [filename: "pro_overview"], "../oban_pro/guides/pro/installation.md": [filename: "pro_installation"], "../oban_pro/guides/plugins/lifeline.md": [title: "Lifeline Plugin"], "../oban_pro/guides/plugins/dynamic_cron.md": [title: "Dynamic Cron Plugin"], "../oban_pro/guides/plugins/dynamic_pruner.md": [title: "Dynamic Pruner Plugin"], "../oban_pro/guides/plugins/reprioritizer.md": [title: "Reprioritizer Plugin"], "../oban_pro/guides/workers/batch.md": [title: "Batch Worker"], "../oban_pro/guides/workers/workflow.md": [title: "Workflow Worker"], "../oban_pro/CHANGELOG.md": [filename: "pro-changelog", title: "Changelog"] ] else [] end end defp web_extras do if File.exists?("../oban_web") do [ "../oban_web/guides/web/overview.md": [filename: "web_overview"], "../oban_web/guides/web/installation.md": [filename: "web_installation"], "../oban_web/guides/web/troubleshooting.md": [filename: "web_troubleshooting"], "../oban_web/CHANGELOG.md": [filename: "web-changelog", title: "Changelog"] ] else [] end end defp groups_for_extras do [ Guides: ~r{guides/[^\/]+\.md}, Recipes: ~r{guides/recipes/.?}, Extras: ~r{^CHANGELOG.md}, "Upgrade Guides": ~r{guides/upgrading/.*}, "Oban Pro": ~r{oban_pro/.?}, "Oban Web": ~r{oban_web/.?} ] end defp package do [ maintainers: ["Parker Selbert"], licenses: ["Apache-2.0"], links: %{ Website: "https://getoban.pro", Changelog: "https://github.com/sorentwo/oban/blob/master/CHANGELOG.md", GitHub: "https://github.com/sorentwo/oban" } ] end defp deps do [ {:ecto_sql, ">= 3.4.3"}, {:jason, "~> 1.1"}, {:postgrex, "~> 0.14"}, {:telemetry, "~> 0.4"}, {:stream_data, "~> 0.4", only: [:test, :dev]}, {:tzdata, "~> 1.0", only: [:test, :dev]}, {:benchee, "~> 1.0", only: [:test, :dev], runtime: false}, {:credo, "~> 1.4", only: [:test, :dev], runtime: false}, {:dialyxir, "~> 1.0", only: [:test, :dev], runtime: false}, {:ex_doc, "~> 0.20", only: [:test, :dev], runtime: false}, {:nimble_parsec, "~> 1.0", only: [:test, :dev], runtime: false, override: true} ] end defp aliases do [ bench: "run bench/bench_helper.exs", "test.setup": ["ecto.create", "ecto.migrate"], ci: [ "format --check-formatted", "credo --strict", "test --raise", "dialyzer" ] ] end end
28.324841
89
0.559478
f7942e7cad7bd010204478fee7266a8701c206f4
965
exs
Elixir
config/config.exs
mapmeld/superfund-me
8bd1aeb78504e6ae068cf57dbefca05bebbb2b07
[ "MIT" ]
null
null
null
config/config.exs
mapmeld/superfund-me
8bd1aeb78504e6ae068cf57dbefca05bebbb2b07
[ "MIT" ]
3
2016-09-09T21:09:17.000Z
2017-09-13T17:55:20.000Z
config/config.exs
Georeactor/superfund-me
8bd1aeb78504e6ae068cf57dbefca05bebbb2b07
[ "MIT" ]
null
null
null
# This file is responsible for configuring your application # and its dependencies with the aid of the Mix.Config module. # # This configuration file is loaded before any dependency and # is restricted to this project. use Mix.Config # Configures the endpoint config :superfundme, Superfundme.Endpoint, url: [host: "localhost"], root: Path.dirname(__DIR__), secret_key_base: "H3qm2S1laMtHK0X3x4QVZB7liXyNfmt9fzcoZcLzoV3G9Q3hobb8CMmUFBYNz9Wv", render_errors: [accepts: ~w(html json)], pubsub: [name: Superfundme.PubSub, adapter: Phoenix.PubSub.PG2] # Configures Elixir's Logger config :logger, :console, format: "$time $metadata[$level] $message\n", metadata: [:request_id] # Import environment specific config. This must remain at the bottom # of this file so it overrides the configuration defined above. import_config "#{Mix.env}.exs" # Configure phoenix generators config :phoenix, :generators, migration: true, binary_id: false
32.166667
86
0.761658
f79462e966f051fb6f0c5d545920a4a51300d58b
84
ex
Elixir
example_json_parse/lib/core.ex
cuevacreativa/Domo
5f2f5ff3cb57dfe774408dcae6ccb5b79d1a3089
[ "MIT" ]
null
null
null
example_json_parse/lib/core.ex
cuevacreativa/Domo
5f2f5ff3cb57dfe774408dcae6ccb5b79d1a3089
[ "MIT" ]
null
null
null
example_json_parse/lib/core.ex
cuevacreativa/Domo
5f2f5ff3cb57dfe774408dcae6ccb5b79d1a3089
[ "MIT" ]
null
null
null
defmodule Core do @moduledoc """ Represents the core model of the app """ end
14
38
0.678571
f794971e78b434a84249f8036376059f72c53a56
556
ex
Elixir
lib/portunes_core/snapshot.ex
portunes/portunes-core
24112f9f593275611e96d59cc0f0e4117ed0d21f
[ "Apache-2.0" ]
1
2021-02-25T16:49:20.000Z
2021-02-25T16:49:20.000Z
lib/portunes_core/snapshot.ex
portunes/portunes-core
24112f9f593275611e96d59cc0f0e4117ed0d21f
[ "Apache-2.0" ]
null
null
null
lib/portunes_core/snapshot.ex
portunes/portunes-core
24112f9f593275611e96d59cc0f0e4117ed0d21f
[ "Apache-2.0" ]
null
null
null
defmodule PortunesCore.Snapshot do @moduledoc """ Contains an snapshot of configuration from a remote configration repository and all of its associated metadata. The meaning of this metadata is up to the `Portunes.Repository` adapter used. """ use TypedStruct typedstruct do @typedoc "A remote repository snapshot" field(:repo_name, String.t(), enforce: true) field(:version, String.t(), enforce: true, default: "") field(:properties, map(), enforce: true) field(:metadata, map, enforce: true, default: %{}) end end
27.8
92
0.708633
f794c3d4f383746e3822c6ff3b7f3c2ff355d0d3
613
ex
Elixir
apps/core/lib/core/conduit/broker.ex
michaeljguarino/forge
50ee583ecb4aad5dee4ef08fce29a8eaed1a0824
[ "Apache-2.0" ]
59
2021-09-16T19:29:39.000Z
2022-03-31T20:44:24.000Z
apps/core/lib/core/conduit/broker.ex
svilenkov/plural
ac6c6cc15ac4b66a3b5e32ed4a7bee4d46d1f026
[ "Apache-2.0" ]
111
2021-08-15T09:56:37.000Z
2022-03-31T23:59:32.000Z
apps/core/lib/core/conduit/broker.ex
svilenkov/plural
ac6c6cc15ac4b66a3b5e32ed4a7bee4d46d1f026
[ "Apache-2.0" ]
4
2021-12-13T09:43:01.000Z
2022-03-29T18:08:44.000Z
defmodule Core.Conduit.Broker do use Core.Conduit.Base, otp_app: :core pipeline :in_tracking do plug Conduit.Plug.CorrelationId plug Conduit.Plug.LogIncoming end pipeline :error_handling do plug Core.Conduit.Plug.DeadLetter, broker: __MODULE__, publish_to: :error plug Core.Conduit.Plug.Retry, attempts: 3 end pipeline :deserialize do plug Conduit.Plug.Parse, content_type: "application/x-erlang-binary" end incoming Core.Conduit do pipe_through [:in_tracking, :error_handling, :deserialize] subscribe :message, WebhookSubscriber, from: "plural.webhook" end end
25.541667
77
0.750408
f794cf9a50ab6117c4ad3ec3b8d3c9f93e1befdd
732
ex
Elixir
lib/brando/villain/tags/t.ex
univers-agency/brando
69c3c52498a3f64518da3522cd9f27294a52cc68
[ "Apache-2.0" ]
1
2020-04-26T09:53:02.000Z
2020-04-26T09:53:02.000Z
lib/brando/villain/tags/t.ex
univers-agency/brando
69c3c52498a3f64518da3522cd9f27294a52cc68
[ "Apache-2.0" ]
198
2019-08-20T16:16:07.000Z
2020-07-03T15:42:07.000Z
lib/brando/villain/tags/t.ex
univers-agency/brando
69c3c52498a3f64518da3522cd9f27294a52cc68
[ "Apache-2.0" ]
null
null
null
defmodule Brando.Villain.Tags.T do @moduledoc false @behaviour Liquex.Tag import NimbleParsec alias Liquex.Parser.Literal alias Liquex.Parser.Field alias Liquex.Parser.Tag @impl true def parse() do ignore(Tag.open_tag()) |> ignore(string("t")) |> ignore(Literal.whitespace()) |> unwrap_and_tag(Field.identifier(), :language) |> ignore(Literal.whitespace()) |> unwrap_and_tag(Literal.quoted_string(), :string) |> ignore(Tag.close_tag()) end @impl true def render([language: language, string: string], context) do ctx_language = Map.get(context.variables, "language") if language == ctx_language do {[string], context} else {[], context} end end end
22.875
62
0.669399
f794d89780753006c94e6943973e191469b55f85
1,574
exs
Elixir
apps/neoscan_node/mix.exs
vincentgeneste/neo-scan
4a654575331eeb3eb12d4fd61696a7bd6dbca3ce
[ "MIT" ]
75
2017-07-23T02:45:32.000Z
2021-12-13T11:04:17.000Z
apps/neoscan_node/mix.exs
vincentgeneste/neo-scan
4a654575331eeb3eb12d4fd61696a7bd6dbca3ce
[ "MIT" ]
252
2017-07-13T19:36:00.000Z
2021-07-28T18:40:00.000Z
apps/neoscan_node/mix.exs
vincentgeneste/neo-scan
4a654575331eeb3eb12d4fd61696a7bd6dbca3ce
[ "MIT" ]
87
2017-07-23T02:45:34.000Z
2022-03-02T14:54:27.000Z
defmodule NeoscanNode.Mixfile do use Mix.Project def project do [ app: :neoscan_node, version: "2.0.0", build_path: "../../_build", config_path: "../../config/config.exs", deps_path: "../../deps", lockfile: "../../mix.lock", elixir: "~> 1.6", elixirc_options: [ warnings_as_errors: true ], test_coverage: [ tool: ExCoveralls ], preferred_cli_env: [ coveralls: :test, "coveralls.detail": :test, "coveralls.post": :test, "coveralls.html": :test ], build_embedded: Mix.env() == :prod, start_permanent: Mix.env() == :prod, deps: deps() ] end # Configuration for the OTP application # # Type "mix help compile.app" for more information def application do # Specify extra applications you'll use from Erlang/Elixir [ extra_applications: [:logger, :httpoison], mod: {NeoscanNode.Application, []} ] end # Dependencies can be Hex packages: # # {:my_dep, "~> 0.3.0"} # # Or git/path repositories: # # {:my_dep, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"} # # To depend on another app inside the umbrella: # # {:my_app, in_umbrella: true} # # Type "mix help deps" for more examples and options defp deps do [ {:hackney, "~> 1.13", override: true}, {:poison, "~> 3.1"}, {:httpoison, "~> 0.11 or ~> 0.12 or ~> 0.13"}, {:neo_node, in_umbrella: true}, {:excoveralls, "~> 0.8", only: :test} ] end end
24.215385
79
0.557814
f7951d6bb68ce998fe5aa1cf792b78b1a3ffc62f
791
exs
Elixir
test/scenic/primitive/style/cap_test.exs
mikeover/scenic
4b61c4996ed2d06b8cdf94f88c8a0522160e10b5
[ "Apache-2.0" ]
null
null
null
test/scenic/primitive/style/cap_test.exs
mikeover/scenic
4b61c4996ed2d06b8cdf94f88c8a0522160e10b5
[ "Apache-2.0" ]
null
null
null
test/scenic/primitive/style/cap_test.exs
mikeover/scenic
4b61c4996ed2d06b8cdf94f88c8a0522160e10b5
[ "Apache-2.0" ]
null
null
null
# # Created by Boyd Multerer on June 18, 2018. # Copyright © 2018 Kry10 Industries. All rights reserved. # defmodule Scenic.Primitive.Style.CapTest do use ExUnit.Case, async: true doctest Scenic alias Scenic.Primitive.Style alias Scenic.Primitive.Style.Cap # ============================================================================ # verify - various forms test "verfy works" do assert Cap.verify(:butt) assert Cap.verify(:round) assert Cap.verify(:square) end test "verify rejects invalid values" do refute Cap.verify("butt") refute Cap.verify(123) end test "verify! works" do assert Cap.verify!(:butt) end test "verify! raises an error" do assert_raise Style.FormatError, fn -> Cap.verify!("butt") end end end
21.378378
80
0.618205
f79534d1a047cf94624f7df4ec79a955dac5dd59
1,939
ex
Elixir
clients/source_repo/lib/google_api/source_repo/v1/model/mirror_config.ex
GoNZooo/elixir-google-api
cf3ad7392921177f68091f3d9001f1b01b92f1cc
[ "Apache-2.0" ]
null
null
null
clients/source_repo/lib/google_api/source_repo/v1/model/mirror_config.ex
GoNZooo/elixir-google-api
cf3ad7392921177f68091f3d9001f1b01b92f1cc
[ "Apache-2.0" ]
null
null
null
clients/source_repo/lib/google_api/source_repo/v1/model/mirror_config.ex
GoNZooo/elixir-google-api
cf3ad7392921177f68091f3d9001f1b01b92f1cc
[ "Apache-2.0" ]
1
2018-07-28T20:50:50.000Z
2018-07-28T20:50:50.000Z
# Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the &quot;License&quot;); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an &quot;AS IS&quot; BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This class is auto generated by the swagger code generator program. # https://github.com/swagger-api/swagger-codegen.git # Do not edit the class manually. defmodule GoogleApi.SourceRepo.V1.Model.MirrorConfig do @moduledoc """ Configuration to automatically mirror a repository from another hosting service, for example GitHub or BitBucket. ## Attributes - deployKeyId (String): ID of the SSH deploy key at the other hosting service. Removing this key from the other service would deauthorize Google Cloud Source Repositories from mirroring. Defaults to: `null`. - url (String): URL of the main repository at the other hosting service. Defaults to: `null`. - webhookId (String): ID of the webhook listening to updates to trigger mirroring. Removing this webook from the other hosting service will stop Google Cloud Source Repositories from receiving notifications, and thereby disabling mirroring. Defaults to: `null`. """ defstruct [ :"deployKeyId", :"url", :"webhookId" ] end defimpl Poison.Decoder, for: GoogleApi.SourceRepo.V1.Model.MirrorConfig do def decode(value, _options) do value end end defimpl Poison.Encoder, for: GoogleApi.SourceRepo.V1.Model.MirrorConfig do def encode(value, options) do GoogleApi.SourceRepo.V1.Deserializer.serialize_non_nil(value, options) end end
38.78
263
0.763796
f7954c72ab00ee841ce2b95765a910b2d04a6066
2,306
ex
Elixir
clients/plus/lib/google_api/plus/v1/model/comment_feed.ex
GoNZooo/elixir-google-api
cf3ad7392921177f68091f3d9001f1b01b92f1cc
[ "Apache-2.0" ]
null
null
null
clients/plus/lib/google_api/plus/v1/model/comment_feed.ex
GoNZooo/elixir-google-api
cf3ad7392921177f68091f3d9001f1b01b92f1cc
[ "Apache-2.0" ]
null
null
null
clients/plus/lib/google_api/plus/v1/model/comment_feed.ex
GoNZooo/elixir-google-api
cf3ad7392921177f68091f3d9001f1b01b92f1cc
[ "Apache-2.0" ]
1
2018-07-28T20:50:50.000Z
2018-07-28T20:50:50.000Z
# Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the &quot;License&quot;); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an &quot;AS IS&quot; BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This class is auto generated by the swagger code generator program. # https://github.com/swagger-api/swagger-codegen.git # Do not edit the class manually. defmodule GoogleApi.Plus.V1.Model.CommentFeed do @moduledoc """ ## Attributes - etag (String): ETag of this response for caching purposes. Defaults to: `null`. - id (String): The ID of this collection of comments. Defaults to: `null`. - items (List[Comment]): The comments in this page of results. Defaults to: `null`. - kind (String): Identifies this resource as a collection of comments. Value: \&quot;plus#commentFeed\&quot;. Defaults to: `null`. - nextLink (String): Link to the next page of activities. Defaults to: `null`. - nextPageToken (String): The continuation token, which is used to page through large result sets. Provide this value in a subsequent request to return the next page of results. Defaults to: `null`. - title (String): The title of this collection of comments. Defaults to: `null`. - updated (DateTime): The time at which this collection of comments was last updated. Formatted as an RFC 3339 timestamp. Defaults to: `null`. """ defstruct [ :"etag", :"id", :"items", :"kind", :"nextLink", :"nextPageToken", :"title", :"updated" ] end defimpl Poison.Decoder, for: GoogleApi.Plus.V1.Model.CommentFeed do import GoogleApi.Plus.V1.Deserializer def decode(value, options) do value |> deserialize(:"items", :list, GoogleApi.Plus.V1.Model.Comment, options) end end defimpl Poison.Encoder, for: GoogleApi.Plus.V1.Model.CommentFeed do def encode(value, options) do GoogleApi.Plus.V1.Deserializer.serialize_non_nil(value, options) end end
37.193548
200
0.724631
f79565025d857398516c9d06f658949561c182b5
3,845
exs
Elixir
.credo.exs
se-apc/sqlite_ecto2
c88d930b6007ec2d0c3d8556ace780d722886570
[ "MIT" ]
null
null
null
.credo.exs
se-apc/sqlite_ecto2
c88d930b6007ec2d0c3d8556ace780d722886570
[ "MIT" ]
null
null
null
.credo.exs
se-apc/sqlite_ecto2
c88d930b6007ec2d0c3d8556ace780d722886570
[ "MIT" ]
null
null
null
%{configs: [ %{name: "default", files: %{ included: ["lib/", "test/", "integration/"], excluded: [~r"/_build/", ~r"/deps/"] }, requires: [], check_for_updates: false, # You can customize the parameters of any check by adding a second element # to the tuple. # # To disable a check put `false` as second element: # # {Credo.Check.Design.DuplicatedCode, false} # checks: [ {Credo.Check.Consistency.ExceptionNames}, {Credo.Check.Consistency.LineEndings}, {Credo.Check.Consistency.MultiAliasImportRequireUse}, {Credo.Check.Consistency.ParameterPatternMatching}, {Credo.Check.Consistency.SpaceAroundOperators}, {Credo.Check.Consistency.SpaceInParentheses}, {Credo.Check.Consistency.TabsOrSpaces}, {Credo.Check.Design.AliasUsage, false}, {Credo.Check.Design.DuplicatedCode, excluded_macros: []}, # Disabled for now as those are checked by Code Climate {Credo.Check.Design.TagTODO, false}, {Credo.Check.Design.TagFIXME, false}, {Credo.Check.Readability.FunctionNames}, {Credo.Check.Readability.LargeNumbers}, {Credo.Check.Readability.MaxLineLength, false}, {Credo.Check.Readability.ModuleAttributeNames}, {Credo.Check.Readability.ModuleDoc}, {Credo.Check.Readability.ModuleNames}, {Credo.Check.Readability.ParenthesesInCondition}, {Credo.Check.Readability.PredicateFunctionNames}, {Credo.Check.Readability.PreferImplicitTry, false}, {Credo.Check.Readability.RedundantBlankLines}, {Credo.Check.Readability.Semicolons}, {Credo.Check.Readability.SinglePipe, false}, # ^^ Ecto does this quite a bit and we want to follow their # code format closely, so silence this warning. {Credo.Check.Readability.SpaceAfterCommas}, {Credo.Check.Readability.Specs, false}, {Credo.Check.Readability.StringSigils, false}, # ^^ Ecto does this quite a bit and we want to follow their # code format closely, so silence this warning. {Credo.Check.Readability.TrailingBlankLine}, {Credo.Check.Readability.TrailingWhiteSpace}, {Credo.Check.Readability.VariableNames}, {Credo.Check.Readability.RedundantBlankLines}, {Credo.Check.Refactor.ABCSize, false}, {Credo.Check.Refactor.CondStatements}, {Credo.Check.Refactor.CyclomaticComplexity}, {Credo.Check.Refactor.DoubleBooleanNegation, false}, {Credo.Check.Refactor.FunctionArity, max_arity: 8}, {Credo.Check.Refactor.MatchInCondition}, {Credo.Check.Refactor.PipeChainStart, false}, {Credo.Check.Refactor.NegatedConditionsInUnless}, {Credo.Check.Refactor.NegatedConditionsWithElse}, {Credo.Check.Refactor.Nesting}, {Credo.Check.Refactor.UnlessWithElse}, {Credo.Check.Refactor.VariableRebinding, false}, {Credo.Check.Warning.BoolOperationOnSameValues}, {Credo.Check.Warning.IExPry}, {Credo.Check.Warning.IoInspect, false}, {Credo.Check.Warning.NameRedeclarationByAssignment, false}, {Credo.Check.Warning.NameRedeclarationByCase, false}, {Credo.Check.Warning.NameRedeclarationByDef, false}, {Credo.Check.Warning.NameRedeclarationByFn, false}, {Credo.Check.Warning.OperationOnSameValues, false}, # Disabled because of p.x == p.x in Ecto queries {Credo.Check.Warning.OperationWithConstantResult}, {Credo.Check.Warning.UnusedEnumOperation}, {Credo.Check.Warning.UnusedFileOperation}, {Credo.Check.Warning.UnusedKeywordOperation}, {Credo.Check.Warning.UnusedListOperation}, {Credo.Check.Warning.UnusedPathOperation}, {Credo.Check.Warning.UnusedRegexOperation}, {Credo.Check.Warning.UnusedStringOperation}, {Credo.Check.Warning.UnusedTupleOperation}, ] } ]}
41.344086
78
0.70039
f7957d665a8192b0d0f9028253eddb7932742018
4,567
exs
Elixir
test/poison/parser_test.exs
CleverBytes/poison
eb48f76c2d3172c908035311c79ce21ac6fa16af
[ "CC0-1.0" ]
null
null
null
test/poison/parser_test.exs
CleverBytes/poison
eb48f76c2d3172c908035311c79ce21ac6fa16af
[ "CC0-1.0" ]
null
null
null
test/poison/parser_test.exs
CleverBytes/poison
eb48f76c2d3172c908035311c79ce21ac6fa16af
[ "CC0-1.0" ]
null
null
null
defmodule Poison.ParserTest do use ExUnit.Case, async: true import Poison.Parser alias Poison.SyntaxError test "numbers" do assert_raise SyntaxError, "Unexpected end of input at position 1", fn -> parse!("-") end assert_raise SyntaxError, "Unexpected token at position 1: -", fn -> parse!("--1") end assert_raise SyntaxError, "Unexpected token at position 1: 1", fn -> parse!("01") end assert_raise SyntaxError, "Unexpected token at position 0: .", fn -> parse!(".1") end assert_raise SyntaxError, "Unexpected end of input at position 2", fn -> parse!("1.") end assert_raise SyntaxError, "Unexpected end of input at position 2", fn -> parse!("1e") end assert_raise SyntaxError, "Unexpected end of input at position 5", fn -> parse!("1.0e+") end assert parse!("0") == 0 assert parse!("1") == 1 assert parse!("-0") == 0 assert parse!("-1") == -1 assert parse!("0.1") == 0.1 assert parse!("-0.1") == -0.1 assert parse!("0e0") == 0 assert parse!("0E0") == 0 assert parse!("1e0") == 1 assert parse!("1E0") == 1 assert parse!("1.0e0") == 1.0 assert parse!("1e+0") == 1 assert parse!("1.0e+0") == 1.0 assert parse!("0.1e1") == 0.1e1 assert parse!("0.1e-1") == 0.1e-1 assert parse!("99.99e99") == 99.99e99 assert parse!("-99.99e-99") == -99.99e-99 assert parse!("123456789.123456789e123") == 123456789.123456789e123 end test "strings" do assert_raise SyntaxError, "Unexpected end of input at position 1", fn -> parse!(~s(")) end assert_raise SyntaxError, "Unexpected end of input at position 2", fn -> parse!(~s("\\")) end assert_raise SyntaxError, "Unexpected token at position 1: k", fn -> parse!(~s("\\k")) end assert_raise SyntaxError, "Unexpected end of input at position 1", fn -> parse!(<<34, 128, 34>>) end assert_raise SyntaxError, "Unexpected end of input at position 7", fn -> parse!(~s("\\u2603\\")) end assert_raise SyntaxError, "Unexpected end of input at position 39", fn -> parse!(~s("Here's a snowman for you: ☃. Good day!)) end assert_raise SyntaxError, "Unexpected end of input at position 2", fn -> parse!(~s("𝄞)) end assert parse!(~s("\\"\\\\\\/\\b\\f\\n\\r\\t")) == ~s("\\/\b\f\n\r\t) assert parse!(~s("\\u2603")) == "☃" assert parse!(~s("\\u2028\\u2029")) == "\u2028\u2029" assert parse!(~s("\\uD834\\uDD1E")) == "𝄞" assert parse!(~s("\\uD834\\uDD1E")) == "𝄞" assert parse!(~s("\\uD799\\uD799")) == "힙힙" assert parse!(~s("✔︎")) == "✔︎" end test "objects" do assert_raise SyntaxError, "Unexpected end of input at position 1", fn -> parse!("{") end assert_raise SyntaxError, "Unexpected token at position 1: ,", fn -> parse!("{,") end assert_raise SyntaxError, "Unexpected token at position 6: }", fn -> parse!(~s({"foo"})) end assert_raise SyntaxError, "Unexpected token at position 14: }", fn -> parse!(~s({"foo": "bar",})) end assert parse!("{}") == %{} assert parse!(~s({"foo": "bar"})) == %{"foo" => "bar"} expected = %{"foo" => "bar", "baz" => "quux"} assert parse!(~s({"foo": "bar", "baz": "quux"})) == expected expected = %{"foo" => %{"bar" => "baz"}} assert parse!(~s({"foo": {"bar": "baz"}})) == expected end test "arrays" do assert_raise SyntaxError, "Unexpected end of input at position 1", fn -> parse!("[") end assert_raise SyntaxError, "Unexpected token at position 1: ,", fn -> parse!("[,") end assert_raise SyntaxError, "Unexpected token at position 3: ]", fn -> parse!("[1,]") end assert parse!("[]") == [] assert parse!("[1, 2, 3]") == [1, 2, 3] assert parse!(~s(["foo", "bar", "baz"])) == ["foo", "bar", "baz"] assert parse!(~s([{"foo": "bar"}])) == [%{"foo" => "bar"}] end test "whitespace" do assert_raise SyntaxError, "Unexpected end of input at position 0", fn -> parse!("") end assert_raise SyntaxError, "Unexpected end of input at position 4", fn -> parse!(" ") end assert parse!(" [ ] ") == [] assert parse!(" { } ") == %{} assert parse!(" [ 1 , 2 , 3 ] ") == [1, 2, 3] expected = %{"foo" => "bar", "baz" => "quux"} assert parse!(~s( { "foo" : "bar" , "baz" : "quux" } )) == expected end test "atom keys" do hash = :erlang.phash2(:crypto.strong_rand_bytes(8)) assert_raise ArgumentError, fn -> parse!(~s({"key#{hash}": null}), keys: :atoms!) end assert parse!(~s({"foo": "bar"}), keys: :atoms) == %{foo: "bar"} assert parse!(~s({"foo": "bar"}), keys: :atoms!) == %{foo: "bar"} end end
44.77451
133
0.579593
f7959655bec6282e17f8944b1df2e1663da19a7a
1,119
exs
Elixir
config/config.exs
SekiT/brainfux
aded7df53c885391a18a5f9e0ad4622a40901d68
[ "WTFPL" ]
4
2016-10-11T13:10:57.000Z
2016-10-19T07:24:31.000Z
config/config.exs
SekiT/brainfux
aded7df53c885391a18a5f9e0ad4622a40901d68
[ "WTFPL" ]
null
null
null
config/config.exs
SekiT/brainfux
aded7df53c885391a18a5f9e0ad4622a40901d68
[ "WTFPL" ]
null
null
null
# This file is responsible for configuring your application # and its dependencies with the aid of the Mix.Config module. use Mix.Config # This configuration is loaded before any dependency and is restricted # to this project. If another project depends on this project, this # file won't be loaded nor affect the parent project. For this reason, # if you want to provide default values for your application for # 3rd-party users, it should be done in your "mix.exs" file. # You can configure for your application as: # # config :brainfux, key: :value # # And access this configuration in your application as: # # Application.get_env(:brainfux, :key) # # Or configure a 3rd-party app: # # config :logger, level: :info # # It is also possible to import configuration files, relative to this # directory. For example, you can emulate configuration per environment # by uncommenting the line below and defining dev.exs, test.exs and such. # Configuration from the imported file will override the ones defined # here (which is why it is important to import them last). # # import_config "#{Mix.env}.exs"
36.096774
73
0.751564
f795a69ccd7100a34e70e338f47de2154b239861
99
ex
Elixir
test/support/memory_journal.ex
revelrylabs/journal
82450c22a5f77e152a0f91ba37345bc411e002e0
[ "MIT" ]
1
2019-08-09T06:01:42.000Z
2019-08-09T06:01:42.000Z
test/support/memory_journal.ex
revelrylabs/journal
82450c22a5f77e152a0f91ba37345bc411e002e0
[ "MIT" ]
3
2019-02-23T23:31:16.000Z
2019-03-07T15:22:23.000Z
test/support/memory_journal.ex
revelrylabs/journal
82450c22a5f77e152a0f91ba37345bc411e002e0
[ "MIT" ]
1
2020-03-05T21:17:21.000Z
2020-03-05T21:17:21.000Z
defmodule Journal.Memory do use Journal, otp_app: :journal, adapter: Journal.Adapters.Memory end
24.75
66
0.79798
f795bbd168cd23e1583ff7db1e0432b853a1d586
6,613
ex
Elixir
lib/hxl/eval/base.ex
drowzy/hcl
afb65a3fe4c86d90ba02ab8402d99019bb6509bc
[ "Apache-2.0" ]
18
2021-09-28T23:15:46.000Z
2021-12-21T15:04:40.000Z
lib/hxl/eval/base.ex
drowzy/hxl
afb65a3fe4c86d90ba02ab8402d99019bb6509bc
[ "Apache-2.0" ]
4
2021-09-28T14:50:16.000Z
2022-03-29T13:22:49.000Z
lib/hxl/eval/base.ex
drowzy/hcl
afb65a3fe4c86d90ba02ab8402d99019bb6509bc
[ "Apache-2.0" ]
null
null
null
defmodule HXL.Evaluator.Base do use HXL.Evaluator alias HXL.Ast.{ AccessOperation, Attr, Binary, Block, Body, Comment, Conditional, ForExpr, FunctionCall, Identifier, Literal, Object, TemplateExpr, Tuple, Unary } @impl true def eval(%Body{statements: stmts}, ctx) do Enum.reduce(stmts, ctx, fn x, acc -> case eval(x, acc) do {{k, v}, acc} -> %{acc | document: Map.put(acc.document, ctx.key_encoder.(k), v)} {map, acc} when is_map(map) -> %{acc | document: Map.merge(acc.document, map)} {:ignore, acc} -> acc end end) end def eval(%Block{body: body, type: type, labels: labels}, ctx) do # Build a nested structure from type + labels. # Given the a block: # a "b" "c" { # d = 1 # } # The following structure should be created: # # { # "a" => %{ # "b" => %{ # "d" => 1 # } # } # } block_scope = [type | labels] |> Enum.map(ctx.key_encoder) |> scope([]) |> Enum.reverse() block_ctx = eval(body, %{ctx | document: %{}}) {put_in(ctx.document, block_scope, block_ctx.document), ctx} end def eval(%Attr{name: name, expr: expr}, ctx) do {value, ctx} = eval(expr, ctx) st = Map.put(ctx.symbol_table, name, value) {{name, value}, %{ctx | symbol_table: st}} end def eval(%Comment{}, ctx) do {:ignore, ctx} end def eval(%Unary{expr: expr, operator: op}, ctx) do {value, ctx} = eval(expr, ctx) {eval_unary_op(op, value), ctx} end def eval(%Binary{left: left, operator: op, right: right}, ctx) do {left_value, ctx} = eval(left, ctx) {right_value, ctx} = eval(right, ctx) value = eval_bin_op(op, left_value, right_value) {value, ctx} end def eval(%Literal{value: value}, ctx) do {ast_value_to_value(value), ctx} end def eval(%Identifier{name: name}, ctx) do id_value = Map.fetch!(ctx.symbol_table, name) {id_value, ctx} end def eval(%TemplateExpr{delimiter: _, lines: lines}, ctx) do {Enum.join(lines, "\n"), ctx} end def eval(%Tuple{values: values}, ctx) do {values, ctx} = Enum.reduce(values, {[], ctx}, fn value, {list, ctx} -> {value, ctx} = eval(value, ctx) {[value | list], ctx} end) {Enum.reverse(values), ctx} end def eval(%Object{kvs: kvs}, ctx) do Enum.reduce(kvs, {%{}, ctx}, fn {k, v}, {state, ctx} -> {value, ctx} = eval(v, ctx) state = Map.put(state, k, value) {state, ctx} end) end def eval(%Conditional{predicate: pred, then: then, else: else_}, ctx) do if pred |> eval(ctx) |> elem(0) do eval(then, ctx) else eval(else_, ctx) end end def eval(%FunctionCall{name: name, arity: arity, args: args}, %{functions: funcs} = ctx) do case Map.get(funcs, name) do nil -> raise ArgumentError, message: "FunctionCalls cannot be used without providing a function with the same arity in #{__MODULE__}.eval/2. Got: #{name}/#{arity}" func when not is_function(func, arity) -> raise ArgumentError, message: "FunctionCall arity missmatch Expected: #{name}/#{arity} got: arity=#{:erlang.fun_info(func)[:arity]}" func -> {args, ctx} = Enum.reduce(args, {[], ctx}, fn arg, {acc, ctx} -> {eval_arg, ctx} = eval(arg, ctx) {[eval_arg | acc], ctx} end) {Kernel.apply(func, Enum.reverse(args)), ctx} end end def eval( %ForExpr{ enumerable: enum, conditional: conditional, enumerable_type: e_t, keys: keys, body: body }, ctx ) do {enum, ctx} = eval(enum, ctx) {acc, reducer} = closure(keys, conditional, body, ctx) for_into = case e_t do :for_tuple -> &Function.identity/1 :for_object -> &Enum.into(&1, %{}) end iterated = enum |> Enum.reduce(acc, reducer) |> elem(0) |> Enum.reverse() |> for_into.() {iterated, ctx} end def eval(%AccessOperation{expr: expr, operation: op, key: key}, ctx) do {expr_value, ctx} = eval(expr, ctx) access_fn = eval_op(op, key, ctx) {Kernel.get_in(expr_value, List.wrap(access_fn)), ctx} end def eval({k, v}, ctx) do {k_value, ctx} = eval(k, ctx) {v_value, ctx} = eval(v, ctx) {{k_value, v_value}, ctx} end def eval_op(:index_access, index_expr, ctx) do {index, _} = eval(index_expr, ctx) Access.at(index) end def eval_op(:attr_access, attr, _ctx) do Access.key!(attr) end def eval_op(op, attrs, ctx) when op in [:attr_splat, :full_splat] do accs = for {op, key} <- attrs, do: eval_op(op, key, ctx) access_map(accs) end defp access_map(ops) do fn :get, data, next when is_list(data) -> data |> Enum.map(&get_in(&1, ops)) |> Enum.map(next) end end defp ast_value_to_value({_, value}), do: value defp eval_unary_op(:!, expr), do: !expr defp eval_unary_op(op, expr), do: apply(Kernel, op, [expr]) defp eval_bin_op(:&&, left, right) do left && right end defp eval_bin_op(:||, left, right) do left || right end defp eval_bin_op(op, left, right) do apply(Kernel, op, [left, right]) end def scope([key], acc) do [key | acc] end def scope([key | rest], acc) do acc = [Access.key(key, %{}) | acc] scope(rest, acc) end defp closure([key], conditional, body, ctx) do conditional_fn = closure_cond(conditional) reducer = fn v, {acc, ctx} -> ctx = %{ctx | symbol_table: Map.put(ctx.symbol_table, key, v)} acc = if conditional_fn.(ctx) do {value, _} = eval(body, ctx) [value | acc] else acc end {acc, ctx} end {{[], ctx}, reducer} end defp closure([index, value], conditional, body, ctx) do conditional_fn = closure_cond(conditional) reducer = fn v, {acc, i, ctx} -> st = ctx.symbol_table |> Map.put(index, i) |> Map.put(value, v) ctx = %{ctx | symbol_table: st} acc = if conditional_fn.(ctx) do {value, _} = eval(body, ctx) [value | acc] else acc end {acc, i + 1, ctx} end {{[], 0, ctx}, reducer} end defp closure_cond(nil), do: fn _ctx -> true end defp closure_cond(expr) do fn ctx -> expr |> eval(ctx) |> elem(0) end end end
22.191275
138
0.553455
f795be67e34bd79eb0ee323717d685dcf5c8f86d
1,411
ex
Elixir
alcarin_api/apps/alcarin/test/support/data_case.ex
alcarin-org/alcarin-elixir
a04d4e043790a7773745e0fba7098e1c06362896
[ "MIT" ]
null
null
null
alcarin_api/apps/alcarin/test/support/data_case.ex
alcarin-org/alcarin-elixir
a04d4e043790a7773745e0fba7098e1c06362896
[ "MIT" ]
3
2018-05-26T10:36:22.000Z
2018-05-26T13:48:36.000Z
alcarin_api/apps/alcarin/test/support/data_case.ex
alcarin-org/alcarin-elixir
a04d4e043790a7773745e0fba7098e1c06362896
[ "MIT" ]
null
null
null
defmodule Alcarin.DataCase do @moduledoc """ This module defines the setup for tests requiring access to the application's data layer. You may define functions here to be used as helpers in your tests. Finally, if the test case interacts with the database, it cannot be async. For this reason, every test runs inside a transaction which is reset at the beginning of the test unless the test case is marked as async. """ use ExUnit.CaseTemplate using do quote do alias Alcarin.Repo import Ecto import Ecto.Changeset import Ecto.Query import Alcarin.DataCase use ExUnitProperties end end setup tags do :ok = Ecto.Adapters.SQL.Sandbox.checkout(Alcarin.Repo) unless tags[:async] do Ecto.Adapters.SQL.Sandbox.mode(Alcarin.Repo, {:shared, self()}) end :ok end @doc """ A helper that transform changeset errors to a map of messages. assert {:error, changeset} = Accounts.create_user(%{password: "short"}) assert "password is too short" in errors_on(changeset).password assert %{password: ["password is too short"]} = errors_on(changeset) """ def errors_on(changeset) do Ecto.Changeset.traverse_errors(changeset, fn {message, opts} -> Enum.reduce(opts, message, fn {key, value}, acc -> String.replace(acc, "%{#{key}}", to_string(value)) end) end) end end
25.196429
77
0.67966
f795e7bf3cf5b76f1115cea0d6292d66a9ede634
1,443
ex
Elixir
lib/aph/main.ex
tometoproject/tometo
ed91069b11a020723edb9a143de29d9bac86a2b0
[ "BlueOak-1.0.0", "Apache-2.0" ]
8
2019-09-26T13:59:25.000Z
2020-03-30T21:26:48.000Z
lib/aph/main.ex
tometoproject/tometo
ed91069b11a020723edb9a143de29d9bac86a2b0
[ "BlueOak-1.0.0", "Apache-2.0" ]
39
2019-11-16T02:24:28.000Z
2020-01-14T16:40:28.000Z
lib/aph/main.ex
tometoproject/tometo
ed91069b11a020723edb9a143de29d9bac86a2b0
[ "BlueOak-1.0.0", "Apache-2.0" ]
2
2019-12-16T07:55:14.000Z
2020-06-11T04:14:00.000Z
defmodule Aph.Main do @moduledoc """ The context for Avatars. """ import Ecto.Query, warn: false alias Aph.Repo alias Aph.Main.Avatar def list_avatars do Repo.all(Avatar) end def get_avatar(id), do: Repo.get!(Avatar, id) def create_avatar(attrs \\ %{}, pic1, pic2) do with {:ok, avatar} <- %Avatar{} |> Avatar.changeset(attrs) |> Repo.insert(), :ok <- File.cp(pic1.path, elem(avatar_picture_path(avatar.id), 0)), :ok <- File.cp(pic2.path, elem(avatar_picture_path(avatar.id), 1)) do {:ok, avatar} else {:error, _reason} = error -> error end end def update_avatar(%Avatar{} = avatar, attrs \\ %{}, pic1, pic2) do case avatar |> Avatar.changeset(attrs) |> Repo.update() do {:ok, avatar} -> if !is_bitstring(pic1) do :ok = File.cp(pic1.path, elem(avatar_picture_path(avatar.id), 0)) end if !is_bitstring(pic2) do :ok = File.cp(pic2.path, elem(avatar_picture_path(avatar.id), 1)) end {:ok, avatar} {:error, _reason} = error -> error end end def delete_avatar(%Avatar{} = avatar) do Repo.delete(avatar) end def change_avatar(%Avatar{} = avatar) do Avatar.changeset(avatar, %{}) end def avatar_picture_path(id) do pic1 = ["priv/static", "av#{id}-1.png"] |> Path.join() pic2 = ["priv/static", "av#{id}-2.png"] |> Path.join() {pic1, pic2} end end
24.457627
80
0.593902
f79642f5d52e5a2dc225cda8e7246bd17b91a85a
284
ex
Elixir
lib/actions/deploy_contract.ex
alexfilatov/near_api
85d92f0ad4fae3a1135adba6a48a1c284870bed8
[ "MIT" ]
7
2021-12-27T19:02:30.000Z
2022-03-30T14:08:46.000Z
lib/actions/deploy_contract.ex
alexfilatov/near_api
85d92f0ad4fae3a1135adba6a48a1c284870bed8
[ "MIT" ]
4
2021-12-29T21:16:02.000Z
2022-01-01T22:07:17.000Z
lib/actions/deploy_contract.ex
alexfilatov/near_api
85d92f0ad4fae3a1135adba6a48a1c284870bed8
[ "MIT" ]
1
2022-02-21T18:17:36.000Z
2022-02-21T18:17:36.000Z
defmodule NearApi.Actions.DeployContract do @moduledoc """ Transaction Actions DeployContract """ @type t :: %__MODULE__{action_code: integer, code: integer} use Borsh, schema: [ action_code: :u8, code: :u8 ] defstruct [:code, action_code: 1] end
16.705882
61
0.651408
f7964815b30dd8b89c7d44ea1767efd069256bb1
606
ex
Elixir
lib/blockchain.ex
esdrasedu/ex_coin
e468977a17295daffef1ce9db0eabf0057b591f1
[ "MIT" ]
null
null
null
lib/blockchain.ex
esdrasedu/ex_coin
e468977a17295daffef1ce9db0eabf0057b591f1
[ "MIT" ]
null
null
null
lib/blockchain.ex
esdrasedu/ex_coin
e468977a17295daffef1ce9db0eabf0057b591f1
[ "MIT" ]
null
null
null
defmodule ExCoin.Blockchain do @moduledoc """ Blockchain is a monotonically increasing chain of blocks in which subsequent blocks are verified based on the data of preceding blocks the chain - address: a unique base58 encoded value representing a public_key in the network - ledger: a mapping of address and balances - transaction: a struct represeting a single atomic modifications to the ledger with some extra data to prove validity - block: a list of ordered transaction with some extra data to prove validity """ defstruct [:address, :difficulty, :transaction, :block, :ledger] end
43.285714
146
0.775578
f7964a85882b1994ab5cc3a8586e60ba084c30e3
140
ex
Elixir
web/controllers/page_controller.ex
mehmetboraezer/elephant
2e23af1b632fbef0a1371acd3ae27bd063e1212e
[ "Apache-2.0" ]
null
null
null
web/controllers/page_controller.ex
mehmetboraezer/elephant
2e23af1b632fbef0a1371acd3ae27bd063e1212e
[ "Apache-2.0" ]
null
null
null
web/controllers/page_controller.ex
mehmetboraezer/elephant
2e23af1b632fbef0a1371acd3ae27bd063e1212e
[ "Apache-2.0" ]
null
null
null
defmodule Elephant.PageController do use Elephant.Web, :controller def index(conn, _params) do render conn, "index.html" end end
17.5
36
0.735714
f79665d1d5ea19c5e6d7ac5ae47e7f2a7bc66864
1,334
exs
Elixir
config/config.exs
taylonr/random-tweets
b49da6231565c89ce23a13e40eb7070a9f54344b
[ "MIT" ]
4
2016-11-24T08:40:44.000Z
2019-05-24T13:09:14.000Z
config/config.exs
taylonr/random-tweets
b49da6231565c89ce23a13e40eb7070a9f54344b
[ "MIT" ]
null
null
null
config/config.exs
taylonr/random-tweets
b49da6231565c89ce23a13e40eb7070a9f54344b
[ "MIT" ]
null
null
null
# This file is responsible for configuring your application # and its dependencies with the aid of the Mix.Config module. use Mix.Config # This configuration is loaded before any dependency and is restricted # to this project. If another project depends on this project, this # file won't be loaded nor affect the parent project. For this reason, # if you want to provide default values for your application for # 3rd-party users, it should be done in your "mix.exs" file. # You can configure for your application as: # # config :random_tweets, key: :value config :quantum, cron: [ # Every minute #"30 13 1,15 * *": {RandomTweets.CLI, :process} "10 18 2,*/9 * *": {RandomTweets.CLI, :process}, "25,55 * * * *": {RandomTweets.Heartbeat, :alive} ] # # And access this configuration in your application as: # # Application.get_env(:random_tweets, :key) # # Or configure a 3rd-party app: # # config :logger, level: :info # # It is also possible to import configuration files, relative to this # directory. For example, you can emulate configuration per environment # by uncommenting the line below and defining dev.exs, test.exs and such. # Configuration from the imported file will override the ones defined # here (which is why it is important to import them last). # # import_config "#{Mix.env}.exs"
36.054054
73
0.724138
f7969bd3b7a79f377f69a796a5d41f569092c3a0
919
exs
Elixir
mix.exs
thiamsantos/conceal
3d636d3f043314afd6565c2f67ad2781063c87e0
[ "Apache-2.0" ]
2
2020-03-02T00:48:45.000Z
2020-03-05T17:58:19.000Z
mix.exs
thiamsantos/conceal
3d636d3f043314afd6565c2f67ad2781063c87e0
[ "Apache-2.0" ]
null
null
null
mix.exs
thiamsantos/conceal
3d636d3f043314afd6565c2f67ad2781063c87e0
[ "Apache-2.0" ]
null
null
null
defmodule Conceal.MixProject do use Mix.Project def project do [ app: :conceal, version: "0.1.1", elixir: "~> 1.7", start_permanent: Mix.env() == :prod, deps: deps(), description: "Easy encrypt data with AES-CBC-256", package: package(), name: "Conceal", docs: docs() ] end # Run "mix help compile.app" to learn about applications. def application do [ extra_applications: [:logger] ] end defp package do [ maintainers: ["Thiago Santos"], licenses: ["Apache 2.0"], links: %{"GitHub" => "https://github.com/thiamsantos/conceal"} ] end defp docs do [ main: "Conceal", source_url: "https://github.com/thiamsantos/conceal" ] end # Run "mix help deps" to learn about dependencies. defp deps do [ {:ex_doc, "~> 0.21.3", only: :dev, runtime: false} ] end end
19.553191
68
0.569097
f796d2d4e9560f136da75f4e00e3bb585a40ebe9
447
ex
Elixir
lib/bitcoin/protocol/types/var_string.ex
anthdm/bitcoin-elixir
5ca9f8bf4e9b2b38527670e80568a85e0aa612c0
[ "Apache-2.0" ]
81
2017-04-20T17:42:59.000Z
2022-02-08T03:49:22.000Z
lib/bitcoin/protocol/types/var_string.ex
anthdm/bitcoin-elixir
5ca9f8bf4e9b2b38527670e80568a85e0aa612c0
[ "Apache-2.0" ]
2
2017-09-07T13:58:57.000Z
2018-01-13T10:38:07.000Z
lib/bitcoin/protocol/types/var_string.ex
anthdm/bitcoin-elixir
5ca9f8bf4e9b2b38527670e80568a85e0aa612c0
[ "Apache-2.0" ]
22
2017-08-16T14:19:44.000Z
2021-12-22T04:36:57.000Z
defmodule Bitcoin.Protocol.Types.VarString do alias Bitcoin.Protocol.Types.VarInteger @spec parse_stream(binary) :: {binary, binary} def parse_stream(payload) do {string_size, payload} = VarInteger.parse_stream(payload) << data :: bytes-size(string_size), payload :: binary >> = payload {data, payload} end @spec serialize(binary) :: binary def serialize(str), do: (byte_size(str) |> VarInteger.serialize) <> str end
24.833333
73
0.709172
f796d526d360cbc5a1e4d5d7dd630ddbb63d1f31
2,102
exs
Elixir
test/json_schema_test_suite/draft6/boolean_schema_test.exs
hrzndhrn/json_xema
955eab7b0919d144b38364164d90275201c89474
[ "MIT" ]
54
2019-03-10T19:51:07.000Z
2021-12-23T07:31:09.000Z
test/json_schema_test_suite/draft6/boolean_schema_test.exs
hrzndhrn/json_xema
955eab7b0919d144b38364164d90275201c89474
[ "MIT" ]
36
2018-05-20T09:13:20.000Z
2021-03-14T15:22:03.000Z
test/json_schema_test_suite/draft6/boolean_schema_test.exs
hrzndhrn/json_xema
955eab7b0919d144b38364164d90275201c89474
[ "MIT" ]
3
2019-04-12T09:08:51.000Z
2019-12-04T01:23:56.000Z
defmodule JsonSchemaTestSuite.Draft6.BooleanSchemaTest do use ExUnit.Case import JsonXema, only: [valid?: 2] describe ~s|boolean schema 'true'| do setup do %{schema: JsonXema.new(true)} end test ~s|number is valid|, %{schema: schema} do assert valid?(schema, 1) end test ~s|string is valid|, %{schema: schema} do assert valid?(schema, "foo") end test ~s|boolean true is valid|, %{schema: schema} do assert valid?(schema, true) end test ~s|boolean false is valid|, %{schema: schema} do assert valid?(schema, false) end test ~s|null is valid|, %{schema: schema} do assert valid?(schema, nil) end test ~s|object is valid|, %{schema: schema} do assert valid?(schema, %{"foo" => "bar"}) end test ~s|empty object is valid|, %{schema: schema} do assert valid?(schema, %{}) end test ~s|array is valid|, %{schema: schema} do assert valid?(schema, ["foo"]) end test ~s|empty array is valid|, %{schema: schema} do assert valid?(schema, []) end end describe ~s|boolean schema 'false'| do setup do %{schema: JsonXema.new(false)} end test ~s|number is invalid|, %{schema: schema} do refute valid?(schema, 1) end test ~s|string is invalid|, %{schema: schema} do refute valid?(schema, "foo") end test ~s|boolean true is invalid|, %{schema: schema} do refute valid?(schema, true) end test ~s|boolean false is invalid|, %{schema: schema} do refute valid?(schema, false) end test ~s|null is invalid|, %{schema: schema} do refute valid?(schema, nil) end test ~s|object is invalid|, %{schema: schema} do refute valid?(schema, %{"foo" => "bar"}) end test ~s|empty object is invalid|, %{schema: schema} do refute valid?(schema, %{}) end test ~s|array is invalid|, %{schema: schema} do refute valid?(schema, ["foo"]) end test ~s|empty array is invalid|, %{schema: schema} do refute valid?(schema, []) end end end
23.355556
59
0.598002
f797094d7a2a1e9078b8b28e6db559fe9f569790
2,346
ex
Elixir
web/models/card.ex
flyingalex/another-phoenix-trello
c8844c4f4ad64df2f809a3f87637a416561c71b6
[ "MIT" ]
null
null
null
web/models/card.ex
flyingalex/another-phoenix-trello
c8844c4f4ad64df2f809a3f87637a416561c71b6
[ "MIT" ]
null
null
null
web/models/card.ex
flyingalex/another-phoenix-trello
c8844c4f4ad64df2f809a3f87637a416561c71b6
[ "MIT" ]
null
null
null
defmodule PhoenixTrello.Card do use PhoenixTrello.Web, :model alias PhoenixTrello.{Repo, Card, Comment, CardMember} @derive {Jason.Encoder, only: [:id, :name, :list_id, :description, :position, :tags, :comments, :card_members, :members]} schema "cards" do field :name, :string field :description, :string field :position, :integer field :tags, {:array, :string} belongs_to :list, PhoenixTrello.List has_many :comments, Comment has_many :card_members, CardMember has_many :members, through: [:card_members, :user] timestamps() end @required_fields ~w(name list_id)a @optional_fields ~w(description position tags)a @doc """ Creates a changeset based on the `model` and `params`. If no params are provided, an invalid changeset is returned with no validation performed. """ def changeset(model, params \\ %{}) do model |> cast(params, List.flatten(@required_fields, @optional_fields)) |> validate_required(@required_fields) |> calculate_position() end def update_changeset(model, params \\ %{}) do model |> cast(params, List.flatten(@required_fields, @optional_fields)) |> validate_required(@required_fields) end defp calculate_position(current_changeset) do model = current_changeset.data query = from(c in Card, select: c.position, where: c.list_id == ^(model.list_id), order_by: [desc: c.position], limit: 1) case Repo.one(query) do nil -> put_change(current_changeset, :position, 1024) position -> put_change(current_changeset, :position, position + 1024) end end def preload_all(query \\ %Card{}) do comments_query = from c in Comment, order_by: [desc: c.inserted_at], preload: :user from c in query, preload: [:members, [comments: ^comments_query]] end def get_by_user_and_board(query \\ %Card{}, card_id, user_id, board_id) do from c in query, left_join: co in assoc(c, :comments), left_join: cu in assoc(co, :user), left_join: me in assoc(c, :members), join: l in assoc(c, :list), join: b in assoc(l, :board), join: ub in assoc(b, :user_boards), where: ub.user_id == ^user_id and b.id == ^board_id and c.id == ^card_id, preload: [comments: {co, user: cu }, members: me] end end
30.868421
123
0.660699
f79717b428a2b1ae42f111eae121ae8f5d0606a6
2,352
ex
Elixir
clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p3beta1_output_config.ex
MasashiYokota/elixir-google-api
975dccbff395c16afcb62e7a8e411fbb58e9ab01
[ "Apache-2.0" ]
null
null
null
clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p3beta1_output_config.ex
MasashiYokota/elixir-google-api
975dccbff395c16afcb62e7a8e411fbb58e9ab01
[ "Apache-2.0" ]
1
2020-12-18T09:25:12.000Z
2020-12-18T09:25:12.000Z
clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p3beta1_output_config.ex
MasashiYokota/elixir-google-api
975dccbff395c16afcb62e7a8e411fbb58e9ab01
[ "Apache-2.0" ]
1
2020-10-04T10:12:44.000Z
2020-10-04T10:12:44.000Z
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p3beta1OutputConfig do @moduledoc """ The desired output location and metadata. ## Attributes * `batchSize` (*type:* `integer()`, *default:* `nil`) - The max number of response protos to put into each output JSON file on Google Cloud Storage. The valid range is [1, 100]. If not specified, the default value is 20. For example, for one pdf file with 100 pages, 100 response protos will be generated. If `batch_size` = 20, then 5 json files each containing 20 response protos will be written under the prefix `gcs_destination`.`uri`. Currently, batch_size only applies to GcsDestination, with potential future support for other output configurations. * `gcsDestination` (*type:* `GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p3beta1GcsDestination.t`, *default:* `nil`) - The Google Cloud Storage location to write the output(s) to. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :batchSize => integer(), :gcsDestination => GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p3beta1GcsDestination.t() } field(:batchSize) field(:gcsDestination, as: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p3beta1GcsDestination) end defimpl Poison.Decoder, for: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p3beta1OutputConfig do def decode(value, options) do GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p3beta1OutputConfig.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p3beta1OutputConfig do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
46.117647
559
0.760629
f79720705634e7b4f7a39d50085bd9ff373ee67c
323
exs
Elixir
priv/repo/migrations/20170323041850_create_accounts_user.exs
KZeillmann/MenuPlanner
56bd6afca8e321261720dc290f180f4944b8b483
[ "MIT" ]
null
null
null
priv/repo/migrations/20170323041850_create_accounts_user.exs
KZeillmann/MenuPlanner
56bd6afca8e321261720dc290f180f4944b8b483
[ "MIT" ]
null
null
null
priv/repo/migrations/20170323041850_create_accounts_user.exs
KZeillmann/MenuPlanner
56bd6afca8e321261720dc290f180f4944b8b483
[ "MIT" ]
null
null
null
defmodule MenuPlanner.Repo.Migrations.CreateMenuPlanner.Accounts.User do use Ecto.Migration def change do create table(:accounts_users) do add :email, :string add :name, :string add :password_hash, :string timestamps() end create unique_index(:accounts_users, [:email]) end end
20.1875
72
0.693498
f79738a622fa8acfa6528c67c8163eb67a3d9bcd
15,197
ex
Elixir
lib/cssex.ex
mnussbaumer/cssex
da3b0add68d766faacb459c14a8eb43ca32aa9f4
[ "MIT" ]
13
2021-03-16T15:02:08.000Z
2021-09-22T02:40:25.000Z
lib/cssex.ex
mnussbaumer/cssex
da3b0add68d766faacb459c14a8eb43ca32aa9f4
[ "MIT" ]
3
2021-03-19T20:14:19.000Z
2021-03-29T13:34:17.000Z
lib/cssex.ex
mnussbaumer/cssex
da3b0add68d766faacb459c14a8eb43ca32aa9f4
[ "MIT" ]
null
null
null
defmodule CSSEx do @moduledoc """ CSSEx is a CSS pre-processor inspired by SASS/SCSS. A complete description of what it can do can be found in the project's README.md [hex](https://hexdocs.pm/cssex/readme.html) / [github](https://github.com/mnussbaumer/cssex) file. This module is the file watcher responsible for automatically processing your CSSEx files into proper CSS files. Refer to the [usage](https://hexdocs.pm/cssex/readme.html#usage) section for details. """ @behaviour :gen_statem require Logger @timeout 15_000 defstruct entry_points: [], pretty_print: false, file_watch: false, watchers: %{}, no_start: false, dependency_graph: %{}, monitors: %{}, reply_to: [], reprocess: [] @type t :: %__MODULE__{ entry_points: list(Keyword.t()), pretty_print: boolean, file_watch: boolean, no_start: boolean } @doc """ Generate a `%CSSEx{}` struct from a keyword list or a map. Its only relevant use case is to "parse" app config environment values. You can also pass a directory as the last argument where it will be joined to the paths in the `:entry_points`. Whatever the final path it will be expanded when this config is passed as the argument to `start_link/1` """ @spec make_config(Keyword.t() | Map.t(), base_dir :: String.t()) :: %__MODULE__{} def make_config(opts, dir \\ nil) when is_list(opts) do Enum.reduce(opts, %__MODULE__{}, fn {k, v}, acc -> case k do :entry_points -> new_entries = Enum.map(v, fn {orig, final} -> case dir do nil -> {orig, final} _ -> {Path.join([dir, orig]), Path.join([dir, final])} end end) struct(acc, [{k, new_entries}]) _ -> struct(acc, [{k, v}]) end end) end @doc """ Start a watcher responsible for automatically processing cssex files into css files. Define in the application config something as: ``` config :yourapp_web, CSSEx, entry_points: [ {"../../../../apps/yourapp_web/assets/cssex/app.cssex", "../../../../apps/yourapp_web/assets/css/app.css"} ] ``` With as many `:entry_points` as necessary specified as tuples of `{"source", "dest"}` Then, ``` Application.get_env(:yourapp_web, CSSEx) |> CSSEx.make_config(Application.app_dir(:your_app_web)) |> CSSEx.start_link() ``` Or add it to a supervision tree. Refer to the README.md file. """ @spec start_link(%__MODULE__{}) :: {:ok, pid} | {:error, term} def start_link(%__MODULE__{} = config) do :gen_statem.start_link(__MODULE__, config, []) end @impl :gen_statem def callback_mode(), do: :handle_event_function @impl :gen_statem def init(%__MODULE__{} = config) do {:ok, :starting, config, [{:next_event, :internal, :prepare}]} end @impl :gen_statem # parse and set up the correct paths in case they're relative and substitute the entry_points field with those updated, trigger the :setup event def handle_event(:internal, :prepare, _, %{entry_points: entries} = data) do cwd = File.cwd!() new_entries = Enum.reduce(entries, %{}, fn {path, final}, acc -> expanded_base = assemble_path(path, cwd) expanded_final = assemble_path(final, cwd) Map.put(acc, expanded_base, expanded_final) end) {:keep_state, %{data | entry_points: new_entries}, [{:next_event, :internal, :setup}]} end # create the basic depedency graph, in this case it will just be for the entry points base paths, trigger the :start event def handle_event(:internal, :setup, _, %{entry_points: entries, dependency_graph: dg} = data) do new_dg = Enum.reduce(entries, dg, fn {path, _}, acc -> Map.put(acc, path, [path]) end) {:keep_state, %{data | dependency_graph: new_dg}, [{:next_event, :internal, :synch_watchers}, {:next_event, :internal, :start}]} end # for each entry point check if it exists, if it does start a parser under a monitor, if it not log an error def handle_event(:internal, :start, _, %{entry_points: entries} = data) do self_pid = self() new_monitors = Enum.reduce(entries, %{}, fn {path, final}, monitors_acc -> case File.exists?(path) do true -> {_pid, monitor} = Process.spawn(__MODULE__, :parse_file, [path, final, self_pid], [:monitor]) Map.put(monitors_acc, monitor, path) false -> Logger.error("CSSEx Watcher: Couldn't find entry point #{path}") monitors_acc end end) {:next_state, :processing, %{data | monitors: new_monitors}, [{:next_event, :internal, :set_status}, @timeout]} end def handle_event( :internal, {:process, file_path}, _, %{entry_points: entries, monitors: monitors, reprocess: reprocess} = data ) do case Map.get(monitors, file_path) do nil -> self_pid = self() final_file = Map.get(entries, file_path) {_pid, monitor} = Process.spawn(__MODULE__, :parse_file, [file_path, final_file, self_pid], [:monitor]) new_monitors = Map.put(monitors, monitor, file_path) new_reprocess = Enum.filter(reprocess, fn path -> file_path == path end) {:next_state, :processing, %{data | monitors: new_monitors, reprocess: new_reprocess}, [@timeout]} _ -> case file_path in reprocess do true -> {:keep_state_and_data, [{:next_event, :internal, :set_status}]} false -> {:keep_state, %{data | reprocess: [file_path | reprocess]}, [{:next_event, :internal, :set_status}]} end end end def handle_event(:internal, {:post_process, parser}, _, _data) do case parser do %CSSEx.Parser{valid?: true, warnings: [], file: file} -> Logger.info( IO.ANSI.green() <> "CSSEx PROCESSED file :: #{file}\n" <> IO.ANSI.default_color() ) {:keep_state_and_data, [{:next_event, :internal, :set_status}]} %CSSEx.Parser{valid?: true, warnings: warnings, file: file} -> Enum.each(warnings, fn warning -> Logger.warn("CSSEx warning when processing #{file} ::\n\n #{warning}\n") end) {:keep_state_and_data, [{:next_event, :internal, :set_status}]} {original_file, %CSSEx.Parser{valid?: false, error: error}} -> Logger.error("CSSEx ERROR when processing #{original_file} :: \n\n #{error}\n") {:keep_state_and_data, [{:next_event, :internal, :set_status}]} end end def handle_event(:internal, :set_status, _, %{monitors: monitors} = data) do case monitors == %{} do true -> {:next_state, :ready, data, [{:next_event, :internal, :maybe_reply}]} _ -> {:next_state, :processing, data, []} end end def handle_event(:internal, :maybe_reply, :ready, %{reply_to: reply_to}) do to_reply = Enum.map(reply_to, fn from -> {:reply, from, :ready} end) {:keep_state_and_data, to_reply} end def handle_event(:internal, :maybe_reply, _, _), do: {:keep_state_and_data, []} def handle_event(:internal, {:maybe_process, file_path}, _, %{ dependency_graph: dependency_graph, entry_points: eps }) do events = Enum.reduce(eps, [], fn {entry, _}, acc -> deps = Map.get(dependency_graph, entry, []) case file_path in deps || file_path == entry do true -> [{{:timeout, {:to_process, entry}}, 50, nil} | acc] false -> acc end end) |> Enum.uniq() {:keep_state_and_data, events} end def handle_event({:timeout, {:to_process, entry}}, _, _, _data) do {:keep_state_and_data, [{:next_event, :internal, {:process, entry}}]} end def handle_event( :internal, {:refresh_dependencies, %CSSEx.Parser{valid?: true, file: original_file, dependencies: dependencies}}, _state, %{dependency_graph: d_g} = data ) do new_d_g = clean_up_deps(d_g, original_file, dependencies) {:keep_state, %{data | dependency_graph: new_d_g}, [{:next_event, :internal, :synch_watchers}]} end def handle_event( :internal, {:refresh_dependencies, {original_file, %CSSEx.Parser{file: error_file, dependencies: dependencies}}}, _state, %{dependency_graph: d_g} = data ) do to_clean_up = case error_file do nil -> dependencies _ -> [error_file | dependencies] end new_d_g = clean_up_deps(d_g, original_file, to_clean_up) {:keep_state, %{data | dependency_graph: new_d_g}, [{:next_event, :internal, :synch_watchers}]} end def handle_event(:internal, :synch_watchers, _, %{watchers: watchers} = data) do watch_paths = watch_list(data) new_watchers = synch_watchers(watch_paths, watchers) new_data = maybe_start_watchers(watch_paths, %{data | watchers: new_watchers}) {:keep_state, new_data, []} end def handle_event( :info, {:DOWN, ref, :process, _, _reason}, _, %{monitors: monitors, reprocess: reprocess} = data ) when is_map_key(monitors, ref) do {path, new_monitors} = Map.pop(monitors, ref) new_data = %{data | monitors: new_monitors} case path in reprocess do false -> {:keep_state, new_data, [{:next_event, :internal, :set_status}]} true -> {:keep_state, new_data, [{{:timeout, {:to_process, path}}, 50, nil}]} end end def handle_event(:info, {:DOWN, _, _, _, _}, _, _data), do: {:keep_state_and_data, []} def handle_event(:info, {:parsed, parsed}, _, _data) do {:keep_state_and_data, [ {:next_event, :internal, {:refresh_dependencies, parsed}}, {:next_event, :internal, {:post_process, parsed}} ]} end def handle_event(:info, {:file_event, _worker_pid, {file_path, events}}, _, _data) do case (:modified in events and :closed in events) or :closed in events do false -> {:keep_state_and_data, []} true -> {:keep_state_and_data, [{:next_event, :internal, {:maybe_process, file_path}}]} end end def handle_event(:info, {:file_event, worker_pid, :stop}, _, %{watchers: watchers} = data) do {path, new_watchers} = Map.pop(watchers, worker_pid) {_, final_watchers} = Map.pop(new_watchers, path) new_data = %{data | watchers: final_watchers} {:keep_state, new_data, [{:next_event, :internal, {:retry_watchers, [path]}}]} end def handle_event(:info, {:retry_watchers, paths}, _, data) do new_data = maybe_start_watchers(paths, data) {:keep_state, new_data, []} end def handle_event({:call, from}, :status, state, %{reply_to: reply_to} = data) do case state do :ready -> {:keep_state_and_data, [{:reply, from, :ready}]} _ -> {:keep_state, %{data | reply_to: [from | reply_to]}, []} end end @doc false def clean_up_deps(d_graph, original_file, dependencies) do Enum.reduce(dependencies, d_graph, fn dep, acc -> case Map.get(acc, dep) do nil -> Map.put(acc, dep, [original_file]) deps -> case original_file in deps do true -> acc false -> Map.put(acc, dep, [original_file | deps]) end end end) |> Enum.reduce(d_graph, fn {file, deps}, acc -> case original_file do ^file -> Map.put(acc, file, dependencies) parent -> case file in dependencies do true -> case parent in deps do true -> Map.put(acc, file, deps) false -> Map.put(acc, file, [parent | deps]) end false -> case parent in deps do true -> Map.put(acc, file, Enum.filter(deps, fn d -> d != parent end)) false -> acc end end end end) |> Enum.reduce(%{}, fn {file, deps}, acc -> case deps do [] -> acc _ -> Map.put(acc, file, Enum.uniq(deps)) end end) end @doc false def watch_list(%{dependency_graph: dg} = _data) do Enum.reduce(dg, [], fn {k, deps}, acc -> Enum.reduce(deps, [Path.dirname(k) | acc], fn dep, acc_i -> [Path.dirname(dep) | acc_i] end) end) |> Enum.uniq() end @doc false def synch_watchers(paths, watchers) do Enum.reduce(watchers, %{}, fn {k, v}, acc -> case Map.get(acc, k) do nil -> case is_pid(k) do true -> case v in paths do true -> acc |> Map.put(k, v) |> Map.put(v, k) _ -> Process.exit(k, :normal) acc end false -> case k in paths do true -> acc |> Map.put(k, v) |> Map.put(v, k) false -> Process.exit(v, :normal) acc end end _ -> acc end end) end @doc false def maybe_start_watchers(paths, %{dependency_graph: dg, watchers: watchers} = data) do dg_paths = Enum.reduce(dg, [], fn {k, _}, acc -> [Path.dirname(k) | acc] end) |> Enum.uniq() new_watchers = Enum.reduce(paths, watchers, fn path, acc -> case Map.get(watchers, path) do pid when is_pid(pid) -> acc nil -> case path in dg_paths do false -> acc _ -> case File.exists?(path) do false -> Logger.error("CSSEx Watcher: #{path} doesn't exist, retrying in 3secs") Process.send_after(self(), {:retry_watchers, [path]}, 3000) acc true -> {:ok, pid} = FileSystem.start_link(dirs: [path]) FileSystem.subscribe(pid) acc |> Map.put(path, pid) |> Map.put(pid, path) end end end end) %{data | watchers: new_watchers} end @doc false def parse_file(path, final_file, self_pid) do case CSSEx.Parser.parse_file(nil, Path.dirname(path), Path.basename(path), final_file) do {:ok, parser, _} -> send(self_pid, {:parsed, parser}) {:error, parser} -> send(self_pid, {:parsed, {path, parser}}) end end @doc false # TODO check use cases with expand, perhaps it's not warranted? # when it's an absolute path probably not def assemble_path(<<"/", _::binary>> = path, _cwd), do: Path.expand(path) # but here yes, because files through plain "imports" in css/cssex might refer to relative paths, such as those in node_modules but also others using relative paths to indicate their source def assemble_path(path, cwd) do Path.join([cwd, path]) |> Path.expand() end end
31.334021
244
0.580378
f7975c4f8a2eb187b5bcaa36903d7e20ae43e04d
2,116
ex
Elixir
clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/row_dimensions.ex
pojiro/elixir-google-api
928496a017d3875a1929c6809d9221d79404b910
[ "Apache-2.0" ]
1
2021-12-20T03:40:53.000Z
2021-12-20T03:40:53.000Z
clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/row_dimensions.ex
pojiro/elixir-google-api
928496a017d3875a1929c6809d9221d79404b910
[ "Apache-2.0" ]
1
2020-08-18T00:11:23.000Z
2020-08-18T00:44:16.000Z
clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/row_dimensions.ex
pojiro/elixir-google-api
928496a017d3875a1929c6809d9221d79404b910
[ "Apache-2.0" ]
null
null
null
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.AdExchangeBuyer.V2beta1.Model.RowDimensions do @moduledoc """ A response may include multiple rows, breaking down along various dimensions. Encapsulates the values of all dimensions for a given row. ## Attributes * `publisherIdentifier` (*type:* `String.t`, *default:* `nil`) - The publisher identifier for this row, if a breakdown by [BreakdownDimension.PUBLISHER_IDENTIFIER](https://developers.google.com/authorized-buyers/apis/reference/rest/v2beta1/bidders.accounts.filterSets#FilterSet.BreakdownDimension) was requested. * `timeInterval` (*type:* `GoogleApi.AdExchangeBuyer.V2beta1.Model.TimeInterval.t`, *default:* `nil`) - The time interval that this row represents. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :publisherIdentifier => String.t() | nil, :timeInterval => GoogleApi.AdExchangeBuyer.V2beta1.Model.TimeInterval.t() | nil } field(:publisherIdentifier) field(:timeInterval, as: GoogleApi.AdExchangeBuyer.V2beta1.Model.TimeInterval) end defimpl Poison.Decoder, for: GoogleApi.AdExchangeBuyer.V2beta1.Model.RowDimensions do def decode(value, options) do GoogleApi.AdExchangeBuyer.V2beta1.Model.RowDimensions.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.AdExchangeBuyer.V2beta1.Model.RowDimensions do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
42.32
316
0.76276
f797ff12fe2973890a6f331a7c3a2a53ecbbb176
2,173
exs
Elixir
config/prod.exs
wsmoak/elm_time
1f12d8d3267c88a1323a8e5fbae722e1fb392e81
[ "MIT" ]
2
2016-03-08T05:26:58.000Z
2016-05-03T15:54:19.000Z
config/prod.exs
wsmoak/elm_time
1f12d8d3267c88a1323a8e5fbae722e1fb392e81
[ "MIT" ]
null
null
null
config/prod.exs
wsmoak/elm_time
1f12d8d3267c88a1323a8e5fbae722e1fb392e81
[ "MIT" ]
null
null
null
use Mix.Config # For production, we configure the host to read the PORT # from the system environment. Therefore, you will need # to set PORT=80 before running your server. # # You should also configure the url host to something # meaningful, we use this information when generating URLs. # # Finally, we also include the path to a manifest # containing the digested version of static files. This # manifest is generated by the mix phoenix.digest task # which you typically run after static files are built. config :elm_time, ElmTime.Endpoint, http: [port: {:system, "PORT"}], url: [host: "example.com", port: 80], cache_static_manifest: "priv/static/manifest.json" # Do not print debug messages in production config :logger, level: :info # ## SSL Support # # To get SSL working, you will need to add the `https` key # to the previous section and set your `:url` port to 443: # # config :elm_time, ElmTime.Endpoint, # ... # url: [host: "example.com", port: 443], # https: [port: 443, # keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"), # certfile: System.get_env("SOME_APP_SSL_CERT_PATH")] # # Where those two env variables return an absolute path to # the key and cert in disk or a relative path inside priv, # for example "priv/ssl/server.key". # # We also recommend setting `force_ssl`, ensuring no data is # ever sent via http, always redirecting to https: # # config :elm_time, ElmTime.Endpoint, # force_ssl: [hsts: true] # # Check `Plug.SSL` for all available options in `force_ssl`. # ## Using releases # # If you are doing OTP releases, you need to instruct Phoenix # to start the server for all endpoints: # # config :phoenix, :serve_endpoints, true # # Alternatively, you can configure exactly which server to # start per endpoint: # # config :elm_time, ElmTime.Endpoint, server: true # # You will also need to set the application root to `.` in order # for the new static assets to be served after a hot upgrade: # # config :elm_time, ElmTime.Endpoint, root: "." # Finally import the config/prod.secret.exs # which should be versioned separately. import_config "prod.secret.exs"
32.924242
67
0.712839
f7980618105518aa0fb47b9b270f49943d7e1634
341
ex
Elixir
elixir/concurrency-model/state-actor/parallel.ex
lijiansong/lang
e255709da2b12e09dea45f86d54f77a19b96f13b
[ "WTFPL" ]
1
2020-01-09T03:22:09.000Z
2020-01-09T03:22:09.000Z
elixir/concurrency-model/state-actor/parallel.ex
lijiansong/lang
e255709da2b12e09dea45f86d54f77a19b96f13b
[ "WTFPL" ]
null
null
null
elixir/concurrency-model/state-actor/parallel.ex
lijiansong/lang
e255709da2b12e09dea45f86d54f77a19b96f13b
[ "WTFPL" ]
null
null
null
defmodule Parallel do def map(collection, fun) do parent = self() processes = Enum.map(collection, fn(e) -> spawn_link(fn() -> send(parent, {self(), fun.(e)}) end) end) Enum.map(processes, fn(pid) -> receive do {^pid, result} -> result end end) end end
18.944444
45
0.504399
f7980b113a3e42605fcdc020a4e6cdb9d545ca14
166
exs
Elixir
config/test.exs
yknx4/event_bus
49027b459afc325ebf71a1e5001fb8718b4e7d80
[ "MIT" ]
557
2018-01-24T13:34:57.000Z
2022-03-31T20:19:09.000Z
config/test.exs
yknx4/event_bus
49027b459afc325ebf71a1e5001fb8718b4e7d80
[ "MIT" ]
62
2018-02-05T05:14:11.000Z
2022-02-26T13:04:24.000Z
config/test.exs
yknx4/event_bus
49027b459afc325ebf71a1e5001fb8718b4e7d80
[ "MIT" ]
37
2018-02-23T16:49:15.000Z
2021-09-03T18:11:06.000Z
use Mix.Config config :event_bus, topics: [:metrics_received, :metrics_summed], ttl: 30_000_000, time_unit: :microsecond, id_generator: EventBus.Util.Base62
20.75
47
0.759036
f7985a81fd27ba1bd37fbb89bd92b28397a22095
1,670
ex
Elixir
clients/dfa_reporting/lib/google_api/dfa_reporting/v35/model/placement_tag.ex
renovate-bot/elixir-google-api
1da34cd39b670c99f067011e05ab90af93fef1f6
[ "Apache-2.0" ]
1
2021-12-20T03:40:53.000Z
2021-12-20T03:40:53.000Z
clients/dfa_reporting/lib/google_api/dfa_reporting/v35/model/placement_tag.ex
swansoffiee/elixir-google-api
9ea6d39f273fb430634788c258b3189d3613dde0
[ "Apache-2.0" ]
1
2020-08-18T00:11:23.000Z
2020-08-18T00:44:16.000Z
clients/dfa_reporting/lib/google_api/dfa_reporting/v35/model/placement_tag.ex
dazuma/elixir-google-api
6a9897168008efe07a6081d2326735fe332e522c
[ "Apache-2.0" ]
null
null
null
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.DFAReporting.V35.Model.PlacementTag do @moduledoc """ Placement Tag ## Attributes * `placementId` (*type:* `String.t`, *default:* `nil`) - Placement ID * `tagDatas` (*type:* `list(GoogleApi.DFAReporting.V35.Model.TagData.t)`, *default:* `nil`) - Tags generated for this placement. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :placementId => String.t() | nil, :tagDatas => list(GoogleApi.DFAReporting.V35.Model.TagData.t()) | nil } field(:placementId) field(:tagDatas, as: GoogleApi.DFAReporting.V35.Model.TagData, type: :list) end defimpl Poison.Decoder, for: GoogleApi.DFAReporting.V35.Model.PlacementTag do def decode(value, options) do GoogleApi.DFAReporting.V35.Model.PlacementTag.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.DFAReporting.V35.Model.PlacementTag do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
33.4
132
0.728743
f798627cbd5ed7321f55e6094f01dc7af290d4c0
838
ex
Elixir
apps/rig_inbound_gateway/lib/rig_inbound_gateway/request_logger/console.ex
arana3/reactive-interaction-gateway
793648bcc5b8b05fc53df1f5f97818fb40ca84be
[ "Apache-2.0" ]
null
null
null
apps/rig_inbound_gateway/lib/rig_inbound_gateway/request_logger/console.ex
arana3/reactive-interaction-gateway
793648bcc5b8b05fc53df1f5f97818fb40ca84be
[ "Apache-2.0" ]
132
2018-11-26T14:00:54.000Z
2022-03-11T04:17:54.000Z
apps/rig_inbound_gateway/lib/rig_inbound_gateway/request_logger/console.ex
arana3/reactive-interaction-gateway
793648bcc5b8b05fc53df1f5f97818fb40ca84be
[ "Apache-2.0" ]
null
null
null
defmodule RigInboundGateway.RequestLogger.Console do @moduledoc """ Example request logger implementation. """ @behaviour RigInboundGateway.RequestLogger @impl RigInboundGateway.RequestLogger @spec log_call(Proxy.endpoint(), Proxy.api_definition(), %Plug.Conn{}) :: :ok def log_call( %{"secured" => true} = endpoint, %{"auth_type" => "jwt"} = api_definition, _conn ) do IO.puts("CALL: #{endpoint_desc(endpoint)} => #{api_definition["proxy"]["target_url"]}") :ok end def log_call(endpoint, api_definition, _conn) do IO.puts( "UNAUTHENTICATED CALL: #{endpoint_desc(endpoint)} => #{ api_definition["proxy"]["target_url"] }" ) :ok end defp endpoint_desc(endpoint) do "[#{endpoint["id"]}] #{endpoint["method"]} #{endpoint["path"]}" end end
25.393939
91
0.638425
f79862f69421e97c728d56a249fcb7f3208666fc
179
ex
Elixir
web/views/tp_shared_group_view.ex
zombalo/cgrates_web_jsonapi
47845be4311839fe180cc9f2c7c6795649da4430
[ "MIT" ]
null
null
null
web/views/tp_shared_group_view.ex
zombalo/cgrates_web_jsonapi
47845be4311839fe180cc9f2c7c6795649da4430
[ "MIT" ]
null
null
null
web/views/tp_shared_group_view.ex
zombalo/cgrates_web_jsonapi
47845be4311839fe180cc9f2c7c6795649da4430
[ "MIT" ]
null
null
null
defmodule CgratesWebJsonapi.TpSharedGroupView do use CgratesWebJsonapi.Web, :view use JaSerializer.PhoenixView attributes ~w[tpid tag account strategy rating_subject]a end
25.571429
58
0.832402
f79863e61186ddaf1e00be1c69afd6944868adad
1,904
ex
Elixir
lib/liveview_todos_web.ex
mwindholtz/liveview_todos
1f6a2e576be4a41f49d0bfe2b01da97d268d0d0d
[ "Apache-2.0" ]
null
null
null
lib/liveview_todos_web.ex
mwindholtz/liveview_todos
1f6a2e576be4a41f49d0bfe2b01da97d268d0d0d
[ "Apache-2.0" ]
2
2021-03-09T14:03:26.000Z
2021-05-10T06:02:53.000Z
lib/liveview_todos_web.ex
mwindholtz/liveview_todos
1f6a2e576be4a41f49d0bfe2b01da97d268d0d0d
[ "Apache-2.0" ]
null
null
null
defmodule LiveviewTodosWeb do @moduledoc """ The entrypoint for defining your web interface, such as controllers, views, channels and so on. This can be used in your application as: use LiveviewTodosWeb, :controller use LiveviewTodosWeb, :view The definitions below will be executed for every view, controller, etc, so keep them short and clean, focused on imports, uses and aliases. Do NOT define functions inside the quoted expressions below. Instead, define any helper function in modules and import those modules here. """ def controller do quote do use Phoenix.Controller, namespace: LiveviewTodosWeb import Plug.Conn import LiveviewTodosWeb.Gettext import Phoenix.LiveView.Controller, only: [live_render: 3] alias LiveviewTodosWeb.Router.Helpers, as: Routes end end def view do quote do use Phoenix.View, root: "lib/liveview_todos_web/templates", namespace: LiveviewTodosWeb # Import convenience functions from controllers import Phoenix.Controller, only: [get_flash: 1, get_flash: 2, view_module: 1] # Use all HTML functionality (forms, tags, etc) use Phoenix.HTML import LiveviewTodosWeb.ErrorHelpers import LiveviewTodosWeb.Gettext import Phoenix.LiveView, only: [live_render: 2, live_render: 3, live_link: 1, live_link: 2] alias LiveviewTodosWeb.Router.Helpers, as: Routes end end def router do quote do use Phoenix.Router import Plug.Conn import Phoenix.Controller import Phoenix.LiveView.Router end end def channel do quote do use Phoenix.Channel import LiveviewTodosWeb.Gettext end end @doc """ When used, dispatch to the appropriate controller/view/etc. """ defmacro __using__(which) when is_atom(which) do apply(__MODULE__, which, []) end end
25.72973
97
0.703256
f79871bcbdf7b8d375acd126841991de2d2f3a6b
223
ex
Elixir
implementations/elixir/lib/ockam.ex
miedziak/ockam
694587c9650e44a54df6933032810dcd80b88582
[ "Apache-2.0" ]
null
null
null
implementations/elixir/lib/ockam.ex
miedziak/ockam
694587c9650e44a54df6933032810dcd80b88582
[ "Apache-2.0" ]
null
null
null
implementations/elixir/lib/ockam.ex
miedziak/ockam
694587c9650e44a54df6933032810dcd80b88582
[ "Apache-2.0" ]
null
null
null
defmodule Ockam do @on_load :init def init do path = Application.app_dir(:ockam, "priv/ockam") |> String.to_charlist :ok = :erlang.load_nif(path, 0) end def random() do exit(:nif_not_loaded) end end
17.153846
74
0.663677
f798a8725d0b38020022e758eb0935f2da2ce8e6
2,103
ex
Elixir
clients/content/lib/google_api/content/v21/model/account_return_carrier.ex
mcrumm/elixir-google-api
544f22797cec52b3a23dfb6e39117f0018448610
[ "Apache-2.0" ]
null
null
null
clients/content/lib/google_api/content/v21/model/account_return_carrier.ex
mcrumm/elixir-google-api
544f22797cec52b3a23dfb6e39117f0018448610
[ "Apache-2.0" ]
null
null
null
clients/content/lib/google_api/content/v21/model/account_return_carrier.ex
mcrumm/elixir-google-api
544f22797cec52b3a23dfb6e39117f0018448610
[ "Apache-2.0" ]
null
null
null
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.Content.V21.Model.AccountReturnCarrier do @moduledoc """ The return carrier information. This service is designed for merchants enrolled in the Buy on Google program. ## Attributes * `carrierAccountId` (*type:* `String.t`, *default:* `nil`) - Output only. Immutable. The Google-provided unique carrier ID, used to update the resource. * `carrierAccountName` (*type:* `String.t`, *default:* `nil`) - Name of the carrier account. * `carrierAccountNumber` (*type:* `String.t`, *default:* `nil`) - Number of the carrier account. * `carrierCode` (*type:* `String.t`, *default:* `nil`) - The carrier code enum. Accepts the values FEDEX or UPS. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :carrierAccountId => String.t(), :carrierAccountName => String.t(), :carrierAccountNumber => String.t(), :carrierCode => String.t() } field(:carrierAccountId) field(:carrierAccountName) field(:carrierAccountNumber) field(:carrierCode) end defimpl Poison.Decoder, for: GoogleApi.Content.V21.Model.AccountReturnCarrier do def decode(value, options) do GoogleApi.Content.V21.Model.AccountReturnCarrier.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.Content.V21.Model.AccountReturnCarrier do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
37.553571
157
0.721826
f798bc6b4f2616843d0fd1bb61e0294c4e4ee1d0
716
ex
Elixir
lib/telemetry_metrics/last_value.ex
adkron/telemetry_metrics
bb12a95e82cc6a0ac1571aecd4248b8c3a134655
[ "Apache-2.0" ]
null
null
null
lib/telemetry_metrics/last_value.ex
adkron/telemetry_metrics
bb12a95e82cc6a0ac1571aecd4248b8c3a134655
[ "Apache-2.0" ]
null
null
null
lib/telemetry_metrics/last_value.ex
adkron/telemetry_metrics
bb12a95e82cc6a0ac1571aecd4248b8c3a134655
[ "Apache-2.0" ]
null
null
null
defmodule Telemetry.Metrics.LastValue do @moduledoc """ Defines a specification of last value metric. """ alias Telemetry.Metrics defstruct [ :name, :event_name, :measurement, :tags, :tag_values, :description, :unit, :reporter_options ] @type t :: %__MODULE__{ name: Metrics.normalized_metric_name(), event_name: :telemetry.event_name(), measurement: Metrics.measurement(), tags: Metrics.tags(), tag_values: (:telemetry.event_metadata() -> :telemetry.event_metadata()), description: Metrics.description(), unit: Metrics.unit(), reporter_options: Metrics.reporter_options() } end
23.866667
83
0.622905
f798c93dc203b179864db6017e5c75ccd62319a9
4,093
ex
Elixir
clients/fact_check_tools/lib/google_api/fact_check_tools/v1alpha1/model/google_factchecking_factchecktools_v1alpha1_claim_review_markup_page.ex
pojiro/elixir-google-api
928496a017d3875a1929c6809d9221d79404b910
[ "Apache-2.0" ]
1
2021-12-20T03:40:53.000Z
2021-12-20T03:40:53.000Z
clients/fact_check_tools/lib/google_api/fact_check_tools/v1alpha1/model/google_factchecking_factchecktools_v1alpha1_claim_review_markup_page.ex
pojiro/elixir-google-api
928496a017d3875a1929c6809d9221d79404b910
[ "Apache-2.0" ]
1
2020-08-18T00:11:23.000Z
2020-08-18T00:44:16.000Z
clients/fact_check_tools/lib/google_api/fact_check_tools/v1alpha1/model/google_factchecking_factchecktools_v1alpha1_claim_review_markup_page.ex
pojiro/elixir-google-api
928496a017d3875a1929c6809d9221d79404b910
[ "Apache-2.0" ]
null
null
null
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.FactCheckTools.V1alpha1.Model.GoogleFactcheckingFactchecktoolsV1alpha1ClaimReviewMarkupPage do @moduledoc """ Holds one or more instances of `ClaimReview` markup for a webpage. ## Attributes * `claimReviewAuthor` (*type:* `GoogleApi.FactCheckTools.V1alpha1.Model.GoogleFactcheckingFactchecktoolsV1alpha1ClaimReviewAuthor.t`, *default:* `nil`) - Info about the author of this claim review. Similar to the above, semantically these are page-level fields, and each `ClaimReview` on this page will contain the same values. * `claimReviewMarkups` (*type:* `list(GoogleApi.FactCheckTools.V1alpha1.Model.GoogleFactcheckingFactchecktoolsV1alpha1ClaimReviewMarkup.t)`, *default:* `nil`) - A list of individual claim reviews for this page. Each item in the list corresponds to one `ClaimReview` element. * `name` (*type:* `String.t`, *default:* `nil`) - The name of this `ClaimReview` markup page resource, in the form of `pages/{page_id}`. Except for update requests, this field is output-only and should not be set by the user. * `pageUrl` (*type:* `String.t`, *default:* `nil`) - The URL of the page associated with this `ClaimReview` markup. While every individual `ClaimReview` has its own URL field, semantically this is a page-level field, and each `ClaimReview` on this page will use this value unless individually overridden. Corresponds to `ClaimReview.url` * `publishDate` (*type:* `String.t`, *default:* `nil`) - The date when the fact check was published. Similar to the URL, semantically this is a page-level field, and each `ClaimReview` on this page will contain the same value. Corresponds to `ClaimReview.datePublished` * `versionId` (*type:* `String.t`, *default:* `nil`) - The version ID for this markup. Except for update requests, this field is output-only and should not be set by the user. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :claimReviewAuthor => GoogleApi.FactCheckTools.V1alpha1.Model.GoogleFactcheckingFactchecktoolsV1alpha1ClaimReviewAuthor.t() | nil, :claimReviewMarkups => list( GoogleApi.FactCheckTools.V1alpha1.Model.GoogleFactcheckingFactchecktoolsV1alpha1ClaimReviewMarkup.t() ) | nil, :name => String.t() | nil, :pageUrl => String.t() | nil, :publishDate => String.t() | nil, :versionId => String.t() | nil } field(:claimReviewAuthor, as: GoogleApi.FactCheckTools.V1alpha1.Model.GoogleFactcheckingFactchecktoolsV1alpha1ClaimReviewAuthor ) field(:claimReviewMarkups, as: GoogleApi.FactCheckTools.V1alpha1.Model.GoogleFactcheckingFactchecktoolsV1alpha1ClaimReviewMarkup, type: :list ) field(:name) field(:pageUrl) field(:publishDate) field(:versionId) end defimpl Poison.Decoder, for: GoogleApi.FactCheckTools.V1alpha1.Model.GoogleFactcheckingFactchecktoolsV1alpha1ClaimReviewMarkupPage do def decode(value, options) do GoogleApi.FactCheckTools.V1alpha1.Model.GoogleFactcheckingFactchecktoolsV1alpha1ClaimReviewMarkupPage.decode( value, options ) end end defimpl Poison.Encoder, for: GoogleApi.FactCheckTools.V1alpha1.Model.GoogleFactcheckingFactchecktoolsV1alpha1ClaimReviewMarkupPage do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
48.72619
341
0.7398
f798d33d466149b5210ba5e8612960dcefc2f07f
2,010
exs
Elixir
test/elixir_boilerplate_web/errors_test.exs
Amirhat/test-elixir-boilerplate
2429a061d0b7b51ac838d15ffc629a667ed9e941
[ "BSD-3-Clause" ]
854
2019-03-18T19:13:58.000Z
2022-03-30T01:47:30.000Z
test/elixir_boilerplate_web/errors_test.exs
Amirhat/test-elixir-boilerplate
2429a061d0b7b51ac838d15ffc629a667ed9e941
[ "BSD-3-Clause" ]
167
2019-03-18T21:23:28.000Z
2022-03-31T19:07:34.000Z
test/elixir_boilerplate_web/errors_test.exs
Amirhat/test-elixir-boilerplate
2429a061d0b7b51ac838d15ffc629a667ed9e941
[ "BSD-3-Clause" ]
63
2019-03-28T14:19:11.000Z
2022-02-15T17:22:14.000Z
defmodule ElixirBoilerplateWeb.ErrorsTest do use ElixirBoilerplate.DataCase, async: true alias ElixirBoilerplateWeb.Errors defmodule UserRole do use Ecto.Schema import Ecto.Changeset embedded_schema do field(:type, :string) timestamps() end def changeset(%__MODULE__{} = user_role, params) do user_role |> cast(params, [:type]) |> validate_required([:type]) |> validate_inclusion(:type, ~w(admin moderator member)) end end defmodule User do use Ecto.Schema import Ecto.Changeset schema "users" do field(:email, :string) field(:nicknames, {:array, :string}) embeds_one(:single_role, UserRole) embeds_many(:multiple_roles, UserRole) timestamps() end def changeset(%__MODULE__{} = user, params) do user |> cast(params, [:email, :nicknames]) |> cast_embed(:single_role) |> cast_embed(:multiple_roles) |> validate_length(:email, is: 10) |> validate_length(:nicknames, min: 1) |> validate_format(:email, ~r/@/) end end test "error_messages/1 without errors should return an empty string" do html = %User{} |> change() |> changeset_to_error_messages() assert html == "" end test "error_messages/1 should render error messages on changeset" do html = %User{} |> User.changeset(%{"email" => "foo", "nicknames" => [], "single_role" => %{"type" => "bar"}, "multiple_roles" => [%{"type" => ""}]}) |> changeset_to_error_messages() assert html =~ "<li>email has invalid format</li>" assert html =~ "<li>email should be 10 characters</li>" assert html =~ "<li>multiple_roles.type can’t be blank</li>" assert html =~ "<li>nicknames should have at least 1 item</li>" assert html =~ "<li>single_role.type is invalid</li>" end defp changeset_to_error_messages(changeset) do changeset |> Errors.error_messages() |> Phoenix.HTML.safe_to_string() end end
25.443038
139
0.634328
f798ffc8760f0bac4996a365948431b1d5e04338
3,284
exs
Elixir
test/matcher_test.exs
btedev/pair2
eb7d8f78a13046461f12ff16372e7dc03175c2cc
[ "MIT" ]
null
null
null
test/matcher_test.exs
btedev/pair2
eb7d8f78a13046461f12ff16372e7dc03175c2cc
[ "MIT" ]
null
null
null
test/matcher_test.exs
btedev/pair2
eb7d8f78a13046461f12ff16372e7dc03175c2cc
[ "MIT" ]
null
null
null
defmodule MatcherTest do use ExUnit.Case alias Pair2.Matcher alias Pair2.MatchRule setup do basic = [%{id: 1, amount: 1.0, date: ~D[2016-01-01]}] # When matching on both mdn and date, a correct match # will require conflict resolution. lcell1 will initially match # to rcell1 but lcell3 is a better fit for it. # lcell1 should ultimately be matched to rcell3. # Correct: # { lcell1, rcell3 } # { lcell3, rcell1 } lcell1 = %{id: "l1", mdn: "1111111111", date: ~D[2016-04-01]} lcell2 = %{id: "l2", mdn: "2222222222", date: ~D[2016-04-01]} lcell3 = %{id: "l3", mdn: "1111111111", date: ~D[2016-04-25]} rcell1 = %{id: "r1", mdn: "1111111111", date: ~D[2016-04-24]} rcell2 = %{id: "r2", mdn: "3333333333", date: ~D[2016-04-02]} rcell3 = %{id: "r3", mdn: "1111111111", date: ~D[2016-04-01]} {:ok, basic: basic, lcells: [lcell1, lcell2, lcell3], rcells: [rcell1, rcell2, rcell3]} end test "it requires at least one rule to be indexed" do assert_raise RuntimeError, "At least one attribute must be indexed", fn -> Matcher.match(nil, nil, [], 1.0) end end test "it matches two lists of maps based on multiple match rules", %{basic: data} do rule_amount = %MatchRule{left_attr: :amount, right_attr: :amount, indexed: true} rule_date = %MatchRule{left_attr: :date, right_attr: :date} {:ok, matches} = Matcher.match(data, data, [rule_amount, rule_date], 1.0) assert 1 == Enum.count(matches) [{_match_l, _match_r, score}] = matches assert 2.0 == score {:ok, matches2} = Matcher.match(data, data, [rule_amount, rule_date], 3.0) assert 0 == Enum.count(matches2) end test "it resolves conflicts when there are multiple match options", %{lcells: lcells, rcells: rcells} do rule_mdn = %MatchRule{left_attr: :mdn, right_attr: :mdn, indexed: true} rule_date = %MatchRule{left_attr: :date, right_attr: :date, min_match: 0.0} {:ok, matches} = Matcher.match(lcells, rcells, [rule_mdn, rule_date], 1.1) assert 2 == Enum.count(matches) {"l3", "r1", s0} = Enum.at(matches, 0) assert s0 > 1.96 && s0 < 1.97 {"l1", "r3", s1} = Enum.at(matches, 1) assert 2.0 == s1 end test "conflict resolution test 1" do matches = %{ "l1" => [{"r1", 3.0}, {"r2", 2.0}], "l2" => [{"r2", 1.0}] } final = Matcher.resolve(matches) assert 2 == Enum.count(final) assert {"l1", "r1", 3.0} == Enum.at(final, 0) assert {"l2", "r2", 1.0} == Enum.at(final, 1) end test "conflict resolution test 2" do matches = %{ "l1" => [{"r2", 2.0}, {"r3", 1.0}], "l2" => [{"r2", 3.0}], "l3" => [{"r3", 2.0}] } final = Matcher.resolve(matches) assert 2 == Enum.count(final) refute Enum.any?(final, fn({l, _r, _s}) -> l == "l1" end) assert Enum.any?(final, fn({l, _r, _s}) -> l == "l2" end) assert Enum.any?(final, fn({l, _r, _s}) -> l == "l3" end) end test "end state test" do matches = %{ "l1" => [{"r2", 2.0}, {"r3", 1.0}], "l2" => [{"r2", 3.0}], "l3" => [] } final = Matcher.resolve(matches) assert 2 == Enum.count(final) end end
34.568421
106
0.564555
f799014fc8e0d4c8da8cb53bd38c25ab42f9d9e6
1,679
ex
Elixir
clients/cloud_iot/lib/google_api/cloud_iot/v1/model/test_iam_permissions_request.ex
pojiro/elixir-google-api
928496a017d3875a1929c6809d9221d79404b910
[ "Apache-2.0" ]
1
2021-12-20T03:40:53.000Z
2021-12-20T03:40:53.000Z
clients/cloud_iot/lib/google_api/cloud_iot/v1/model/test_iam_permissions_request.ex
pojiro/elixir-google-api
928496a017d3875a1929c6809d9221d79404b910
[ "Apache-2.0" ]
1
2020-08-18T00:11:23.000Z
2020-08-18T00:44:16.000Z
clients/cloud_iot/lib/google_api/cloud_iot/v1/model/test_iam_permissions_request.ex
pojiro/elixir-google-api
928496a017d3875a1929c6809d9221d79404b910
[ "Apache-2.0" ]
null
null
null
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.CloudIot.V1.Model.TestIamPermissionsRequest do @moduledoc """ Request message for `TestIamPermissions` method. ## Attributes * `permissions` (*type:* `list(String.t)`, *default:* `nil`) - The set of permissions to check for the `resource`. Permissions with wildcards (such as '*' or 'storage.*') are not allowed. For more information see [IAM Overview](https://cloud.google.com/iam/docs/overview#permissions). """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :permissions => list(String.t()) | nil } field(:permissions, type: :list) end defimpl Poison.Decoder, for: GoogleApi.CloudIot.V1.Model.TestIamPermissionsRequest do def decode(value, options) do GoogleApi.CloudIot.V1.Model.TestIamPermissionsRequest.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.CloudIot.V1.Model.TestIamPermissionsRequest do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
35.723404
288
0.743895
f79907204f19026b02351e680675f5b72c05d3e4
3,087
ex
Elixir
lib/pow/phoenix/views/view_helpers.ex
abartier/pow
58a3d082da093e2dc7f07825a950ee133204813f
[ "Unlicense", "MIT" ]
null
null
null
lib/pow/phoenix/views/view_helpers.ex
abartier/pow
58a3d082da093e2dc7f07825a950ee133204813f
[ "Unlicense", "MIT" ]
null
null
null
lib/pow/phoenix/views/view_helpers.ex
abartier/pow
58a3d082da093e2dc7f07825a950ee133204813f
[ "Unlicense", "MIT" ]
null
null
null
defmodule Pow.Phoenix.ViewHelpers do @moduledoc """ Module that renders views. By default, the controller views and templates in this library will be used, and the layout view will be based on the module namespace of the Endpoint module. By setting the `:web_module` key in config, the controller and layout views can be used from this context app. So if you set up your endpoint like this: defmodule MyAppWeb.Endpoint do plug Pow.Plug.Session end Only `MyAppWeb.LayoutView` will be used from your app. However, if you set up the endpoint with a `:web_module` key: defmodule MyAppWeb.Endpoint do plug Pow.Plug.Session, web_module: MyAppWeb end The following modules are will be used from your app: * `MyAppWeb.LayoutView` * `MyAppWeb.Pow.RegistrationView` * `MyAppWeb.Pow.SessionView` And also the following templates has to exist in `lib/my_project_web/templates/pow`: * `registration/new.html.eex` * `registration/edit.html.eex` * `session/new.html.eex` """ alias Phoenix.Controller alias Plug.Conn alias Pow.{Config, Plug} @doc """ Sets the view layout based on the pow configuration. """ @spec layout(Conn.t()) :: Conn.t() def layout(conn) do config = Plug.fetch_config(conn) web_module = Config.get(config, :web_module) view = view(conn, web_module) layout = layout(conn, web_module) conn |> Controller.put_view(view) |> Controller.put_layout(layout) end defp view(conn, web_module) do conn |> Controller.view_module() |> build_view_module(web_module) end defp layout(conn, web_module) do conn |> Controller.layout() |> build_layout(web_module || web_base(conn)) end defp web_base(conn) do conn |> Controller.endpoint_module() |> split_module() |> Enum.reverse() |> case do ["Endpoint" | base] -> base base -> base end |> Enum.reverse() end @doc """ Generates the view module atom. """ @spec build_view_module(module(), module() | nil) :: module() def build_view_module(module, nil), do: module def build_view_module(module, web_module) when is_atom(web_module) do build_view_module(module, split_module(web_module)) end def build_view_module(module, base) do base = pow_base(module, base) module |> split_module() |> build_module(base) end defp pow_base(module, base) do [pow_module | _rest] = Module.split(module) base ++ [pow_module] end defp build_layout({view, template}, web_module) when is_atom(web_module) do build_layout({view, template}, split_module(web_module)) end defp build_layout({view, template}, base) do view = view |> split_module() |> build_module(base) {view, template} end defp build_module([_base, "Phoenix" | rest], base) do base |> Enum.concat(rest) |> Module.concat() end defp split_module(nil), do: nil defp split_module(module) when is_atom(module), do: Module.split(module) end
24.895161
79
0.667638
f7992b0bdc9751a1cf0bbb66e3420a9789824392
1,652
ex
Elixir
lib/harald/protocols.ex
smartrent/harald
158a69bc2b70b3f51d67bd935d223a42a3633d68
[ "MIT" ]
3
2020-08-07T02:09:09.000Z
2020-08-28T12:25:48.000Z
lib/harald/protocols.ex
smartrent/harald
158a69bc2b70b3f51d67bd935d223a42a3633d68
[ "MIT" ]
null
null
null
lib/harald/protocols.ex
smartrent/harald
158a69bc2b70b3f51d67bd935d223a42a3633d68
[ "MIT" ]
null
null
null
defprotocol HCI.Serializable do @doc """ Serialize an HCI data structure as a binary """ def serialize(hci_struct) end defprotocol HCI.Deserializable do @doc """ Deserialize a binary into HCI data structures """ def deserialize(bin) end defprotocol HCI.CommandComplete.ReturnParameters do @doc """ Protocol for handling command return_parameters in CommandComplete event This is mainly to allow us to do function generation at compile time for handling this parsing for specific commands. """ def parse(cc_struct) end defimpl HCI.Deserializable, for: BitString do # Define deserialize/1 for HCI.Command modules for mod <- Harald.HCI.Command.__modules__(), opcode = mod.__opcode__() do def deserialize(unquote(opcode) <> _ = bin) do unquote(mod).deserialize(bin) end end def deserialize(bin) do error = """ Unable to deserialize #{inspect(bin)} If this is unexpected, then be sure that the target deserialized module is defined in the @modules attribute of the appropiate type: * Harald.HCI.Command * Harald.HCI.Event """ {:error, error} end end defimpl HCI.CommandComplete.ReturnParameters, for: Harald.HCI.Event.CommandComplete do def parse(cc) do %{cc | return_parameters: do_parse(cc.opcode, cc.return_parameters)} end # Generate return_parameter parsing function for all available command # modules based on the requirements in Harald.HCI.Command behaviour for mod <- Harald.HCI.Command.__modules__(), opcode = mod.__opcode__() do defp do_parse(unquote(opcode), rp_bin) do unquote(mod).return_parameters(rp_bin) end end end
27.081967
86
0.730024
f7994a46388442b3f4be76ba862f8f6e254d93ab
281
ex
Elixir
ex3.ex
kalashnikov/elixir_script
64ecbe5878d7c7cdc5392a1e6b83b08697c8f9b4
[ "MIT" ]
null
null
null
ex3.ex
kalashnikov/elixir_script
64ecbe5878d7c7cdc5392a1e6b83b08697c8f9b4
[ "MIT" ]
null
null
null
ex3.ex
kalashnikov/elixir_script
64ecbe5878d7c7cdc5392a1e6b83b08697c8f9b4
[ "MIT" ]
null
null
null
defmodule Math do def square(x) do x * x end end IO.puts inspect Enum.map [1,2,3], &Math.square/1 IO.puts Enum.reduce(1..10, 2, fn (i, acc) -> acc * i end) sum=0 for input = 1..100, Enum.filter fn(x) -> rem(x,3)==0 or rem(x,5)==0 end, do IO.puts end
14.789474
57
0.565836
f7994faa8c98b7b05eab9f232be6011cce31fdd9
6,022
exs
Elixir
.credo.exs
fanhero/spreedly-elixir
ee65326a2c7ffccd4683b1be754e4a7db5857ab3
[ "MIT" ]
8
2018-03-09T18:12:26.000Z
2020-08-25T02:29:12.000Z
.credo.exs
fanhero/spreedly-elixir
ee65326a2c7ffccd4683b1be754e4a7db5857ab3
[ "MIT" ]
17
2017-01-05T17:14:51.000Z
2020-02-04T14:48:02.000Z
.credo.exs
fanhero/spreedly-elixir
ee65326a2c7ffccd4683b1be754e4a7db5857ab3
[ "MIT" ]
5
2017-05-01T18:23:54.000Z
2021-03-19T01:34:26.000Z
# This file contains the configuration for Credo and you are probably reading # this after creating it with `mix credo.gen.config`. # # If you find anything wrong or unclear in this file, please report an # issue on GitHub: https://github.com/rrrene/credo/issues # %{ # # You can have as many configs as you like in the `configs:` field. configs: [ %{ # # Run any exec using `mix credo -C <name>`. If no exec name is given # "default" is used. # name: "default", # # These are the files included in the analysis: files: %{ # # You can give explicit globs or simply directories. # In the latter case `**/*.{ex,exs}` will be used. # included: ["lib/", "src/", "web/", "apps/"], excluded: [~r"/_build/", ~r"/deps/"] }, # # If you create your own checks, you must specify the source files for # them here, so they can be loaded by Credo before running the analysis. # requires: [], # # Credo automatically checks for updates, like e.g. Hex does. # You can disable this behaviour below: # check_for_updates: true, # # If you want to enforce a style guide and need a more traditional linting # experience, you can change `strict` to `true` below: # strict: true, # # If you want to use uncolored output by default, you can change `color` # to `false` below: # color: true, # # You can customize the parameters of any check by adding a second element # to the tuple. # # To disable a check put `false` as second element: # # {Credo.Check.Design.DuplicatedCode, false} # checks: [ {Credo.Check.Consistency.ExceptionNames}, {Credo.Check.Consistency.LineEndings}, {Credo.Check.Consistency.ParameterPatternMatching}, {Credo.Check.Consistency.SpaceAroundOperators}, {Credo.Check.Consistency.SpaceInParentheses}, {Credo.Check.Consistency.TabsOrSpaces}, # For some checks, like AliasUsage, you can only customize the priority # Priority values are: `low, normal, high, higher` # {Credo.Check.Design.AliasUsage, priority: :low}, # For others you can set parameters # If you don't want the `setup` and `test` macro calls in ExUnit tests # or the `schema` macro in Ecto schemas to trigger DuplicatedCode, just # set the `excluded_macros` parameter to `[:schema, :setup, :test]`. # {Credo.Check.Design.DuplicatedCode, excluded_macros: []}, # You can also customize the exit_status of each check. # If you don't want TODO comments to cause `mix credo` to fail, just # set this value to 0 (zero). # {Credo.Check.Design.TagTODO, exit_status: 2}, {Credo.Check.Design.TagFIXME}, {Credo.Check.Readability.FunctionNames}, {Credo.Check.Readability.LargeNumbers}, {Credo.Check.Readability.MaxLineLength, priority: :low, max_length: 120}, {Credo.Check.Readability.ModuleAttributeNames}, {Credo.Check.Readability.ModuleDoc, false}, {Credo.Check.Readability.ModuleNames}, {Credo.Check.Readability.ParenthesesOnZeroArityDefs}, {Credo.Check.Readability.ParenthesesInCondition}, {Credo.Check.Readability.PredicateFunctionNames}, {Credo.Check.Readability.PreferImplicitTry}, {Credo.Check.Readability.RedundantBlankLines}, {Credo.Check.Readability.StringSigils}, {Credo.Check.Readability.TrailingBlankLine}, {Credo.Check.Readability.TrailingWhiteSpace}, {Credo.Check.Readability.VariableNames}, {Credo.Check.Readability.Semicolons}, {Credo.Check.Readability.SpaceAfterCommas}, {Credo.Check.Refactor.DoubleBooleanNegation}, {Credo.Check.Refactor.CondStatements}, {Credo.Check.Refactor.CyclomaticComplexity}, {Credo.Check.Refactor.FunctionArity, max_arity: 7}, {Credo.Check.Refactor.LongQuoteBlocks}, {Credo.Check.Refactor.MatchInCondition}, {Credo.Check.Refactor.NegatedConditionsInUnless}, {Credo.Check.Refactor.NegatedConditionsWithElse}, {Credo.Check.Refactor.Nesting}, {Credo.Check.Refactor.PipeChainStart}, {Credo.Check.Refactor.UnlessWithElse}, {Credo.Check.Warning.BoolOperationOnSameValues}, {Credo.Check.Warning.IExPry}, {Credo.Check.Warning.IoInspect}, {Credo.Check.Warning.LazyLogging}, {Credo.Check.Warning.OperationOnSameValues}, {Credo.Check.Warning.OperationWithConstantResult}, {Credo.Check.Warning.UnusedEnumOperation}, {Credo.Check.Warning.UnusedFileOperation}, {Credo.Check.Warning.UnusedKeywordOperation}, {Credo.Check.Warning.UnusedListOperation}, {Credo.Check.Warning.UnusedPathOperation}, {Credo.Check.Warning.UnusedRegexOperation}, {Credo.Check.Warning.UnusedStringOperation}, {Credo.Check.Warning.UnusedTupleOperation}, {Credo.Check.Warning.RaiseInsideRescue}, # Controversial and experimental checks (opt-in, just remove `, false`) # {Credo.Check.Refactor.ABCSize, false}, {Credo.Check.Refactor.AppendSingleItem, false}, {Credo.Check.Refactor.VariableRebinding, false}, {Credo.Check.Warning.MapGetUnsafePass, false}, {Credo.Check.Consistency.MultiAliasImportRequireUse, false}, # Deprecated checks (these will be deleted after a grace period) # {Credo.Check.Readability.Specs, false}, {Credo.Check.Warning.NameRedeclarationByAssignment, false}, {Credo.Check.Warning.NameRedeclarationByCase, false}, {Credo.Check.Warning.NameRedeclarationByDef, false}, {Credo.Check.Warning.NameRedeclarationByFn, false}, # Custom checks can be created using `mix credo.gen.check`. # ] } ] }
39.880795
81
0.652773
f7995ab9fbb8856381156b5a286f064e057d5869
378
ex
Elixir
lib/code_santa/advent_of_code.ex
Blond11516/code-santa
0b5ac9a2bad1ce692e8b9f1b94de8eb497488c74
[ "MIT" ]
null
null
null
lib/code_santa/advent_of_code.ex
Blond11516/code-santa
0b5ac9a2bad1ce692e8b9f1b94de8eb497488c74
[ "MIT" ]
null
null
null
lib/code_santa/advent_of_code.ex
Blond11516/code-santa
0b5ac9a2bad1ce692e8b9f1b94de8eb497488c74
[ "MIT" ]
null
null
null
defmodule CodeSanta.AdventOfCode do @base_url "https://adventofcode.com" @spec puzzle_url(integer(), integer()) :: String.t() def puzzle_url(year, day) do Enum.join([@base_url, Integer.to_string(year), "day", Integer.to_string(day)], "/") end @spec to_absolute_url(String.t()) :: String.t() def to_absolute_url(relative_url), do: @base_url <> relative_url end
31.5
87
0.703704
f7997b17e442227caf63fd07c98e0ce924683fee
46
exs
Elixir
test/consul_test.exs
nsdavidson/consulex
6ac7360c4035555da0d0066c1893a7d4d63b1b0b
[ "Apache-2.0" ]
48
2015-03-27T01:29:57.000Z
2021-03-02T04:09:49.000Z
test/consul_test.exs
nsdavidson/consulex
6ac7360c4035555da0d0066c1893a7d4d63b1b0b
[ "Apache-2.0" ]
7
2015-03-11T20:44:33.000Z
2019-03-13T09:02:52.000Z
test/consul_test.exs
nsdavidson/consulex
6ac7360c4035555da0d0066c1893a7d4d63b1b0b
[ "Apache-2.0" ]
29
2015-03-11T20:41:26.000Z
2020-07-01T05:02:01.000Z
defmodule ConsulTest do use ExUnit.Case end
11.5
23
0.804348
f799c75680910172c72b160fb7e353c8ca877061
405
ex
Elixir
lib/rocketpay/accounts/withdraw.ex
RafaelVsc/phx-rocketpay
91435f0539a961691394809058b97a4e1b58fa84
[ "RSA-MD" ]
2
2021-03-01T09:15:57.000Z
2021-03-02T23:30:57.000Z
lib/rocketpay/accounts/withdraw.ex
luizDorval/Rocketpay
b8cafbbb8b65b53596a350897aeee3dc4dd474ed
[ "MIT" ]
9
2021-02-28T20:29:58.000Z
2021-03-26T01:28:01.000Z
lib/rocketpay/accounts/withdraw.ex
luizDorval/Rocketpay
b8cafbbb8b65b53596a350897aeee3dc4dd474ed
[ "MIT" ]
1
2021-03-28T11:56:37.000Z
2021-03-28T11:56:37.000Z
defmodule Rocketpay.Accounts.Withdraw do alias Rocketpay.Accounts.Operation alias Rocketpay.Repo def call(params) do params |> Operation.call(:withdraw) |> run_transaction() end defp run_transaction(multi) do case Repo.transaction(multi) do {:error, _operation, reason, _changes} -> {:error, reason} {:ok, %{withdraw: account}} -> {:ok, account} end end end
21.315789
64
0.671605
f799d034983315d971b1d92d0b7e435aad38e607
3,357
ex
Elixir
lib/timescaledb/models/model.ex
membraneframework/membrane_timescaledb_reporter
9300a743c41f45bdbc880def2aec55166baf4885
[ "Apache-2.0" ]
2
2020-10-13T18:04:54.000Z
2021-12-15T08:23:28.000Z
lib/timescaledb/models/model.ex
membraneframework/membrane_timescaledb_reporter
9300a743c41f45bdbc880def2aec55166baf4885
[ "Apache-2.0" ]
9
2020-07-30T14:57:46.000Z
2021-12-28T07:37:27.000Z
lib/timescaledb/models/model.ex
membraneframework/membrane_timescaledb_reporter
9300a743c41f45bdbc880def2aec55166baf4885
[ "Apache-2.0" ]
null
null
null
defmodule Membrane.Telemetry.TimescaleDB.Model do @moduledoc """ Module responsible for putting data to TimescaleDB. """ import Ecto.Query alias Membrane.Telemetry.TimescaleDB.Repo alias Membrane.Telemetry.TimescaleDB.Model.{ComponentPath, Element, Measurement, Link} require Logger @doc """ Inserts all given measurements into a database as a batch. Takes a tuple consisting of 3 lists: * `with_paths` - list of measurements with already present `component_path_id` replacing `component_path` * `without_paths` - list of measurements with unknown `component_path_id` but with a present `component_path` fields * `paths_to_insert` - list of components' paths that must be inserted to the database, the inserted records are then used to assign `without_paths` their corresponding `path_id`s Returns number of inserted records and a mapping of newly inserted paths `{component_path => component_path_id}`. """ @spec add_all_measurements({list(), list(), list()}) :: {:ok, non_neg_integer(), map()} def add_all_measurements({with_paths, without_paths, paths_to_insert}) do with {:ok, inserted_paths} <- insert_new_paths(paths_to_insert), new_with_paths = prepare_measurements_without_paths(without_paths, inserted_paths), {total_inserted, _} <- Repo.insert_all("measurements", new_with_paths ++ with_paths) do {:ok, total_inserted, inserted_paths} else other -> {:error, "failed to add measurements #{inspect(other)}"} end end @spec add_measurement(map()) :: {:ok, Measurement.t()} | {:error, Ecto.Changeset.t()} def add_measurement(measurement) do %Measurement{} |> Measurement.changeset(measurement) |> Repo.insert() end @spec add_link(map()) :: {:ok, Link.t()} | {:error, Ecto.Changeset.t()} def add_link(link) do %Link{} |> Link.changeset(link) |> Repo.insert() end @spec add_element_event(map()) :: {:ok, Element.t()} | {:error, Ecto.Changeset.t()} def add_element_event(element) do %Element{} |> Element.changeset(element) |> Repo.insert() end defp insert_new_paths([]) do {:ok, %{}} end defp insert_new_paths(paths_to_insert) do {inserted, paths} = Repo.insert_all( ComponentPath, Enum.map(paths_to_insert, &%{path: &1}), on_conflict: :nothing, returning: true ) # if 'inserted' count is less than the number of paths to insert # that means that we got a conflict and some path is already inserted # in such case just query non inserted paths for their ids already_inserted = if length(paths_to_insert) > inserted do to_query = paths_to_insert |> MapSet.new() |> MapSet.difference(MapSet.new(paths, & &1.path)) |> MapSet.to_list() Repo.all(from(cp in ComponentPath, where: cp.path in ^to_query)) else [] end (paths ++ already_inserted) |> Map.new(fn el -> {el.path, el.id} end) |> then(&{:ok, &1}) end defp prepare_measurements_without_paths(without_paths, inserted_paths) do without_paths |> Enum.map(fn measurement -> measurement |> Map.put(:component_path_id, Map.get(inserted_paths, measurement.component_path)) |> Map.delete(:component_path) end) end end
33.237624
133
0.669348
f799e06c342919cb174c8eab4b2963d12e2931f2
3,284
ex
Elixir
lib/simple_pool/v2/cluster_management_framework/structures/entities/cluster/service/state_entity.ex
noizu/simple_pool
59251a3391ff82152a31626072955b95f83c18ee
[ "MIT" ]
null
null
null
lib/simple_pool/v2/cluster_management_framework/structures/entities/cluster/service/state_entity.ex
noizu/simple_pool
59251a3391ff82152a31626072955b95f83c18ee
[ "MIT" ]
null
null
null
lib/simple_pool/v2/cluster_management_framework/structures/entities/cluster/service/state_entity.ex
noizu/simple_pool
59251a3391ff82152a31626072955b95f83c18ee
[ "MIT" ]
null
null
null
#------------------------------------------------------------------------------- # Author: Keith Brings # Copyright (C) 2020 Noizu Labs, Inc. All rights reserved. #------------------------------------------------------------------------------- defmodule Noizu.SimplePool.V2.ClusterManagement.Cluster.Service.StateEntity do @vsn 1.0 @type t :: %__MODULE__{ identifier: atom, status: atom, state: atom, pending_state: atom, service_definition: Noizu.SimplePool.V2.ClusterManagement.Cluster.Service.Definition.t, status_details: Noizu.SimplePool.V2.ClusterManagement.Cluster.Service.Status.t, instance_definitions: Map.t, instance_statuses: Map.t, health_report: Noizu.SimplePool.V2.ClusterManagement.HealthReport.t, updated_on: DateTime.t, telemetry_handler: any, event_handler: any, state_changed_on: DateTime.t, pending_state_changed_on: DateTime.t, meta: Map.t, vsn: any } defstruct [ identifier: nil, status: :unknown, state: :offline, pending_state: :offline, service_definition: nil, status_details: nil, instance_definitions: %{}, instance_statuses: %{}, health_report: :pending, updated_on: nil, telemetry_handler: nil, event_handler: nil, state_changed_on: nil, pending_state_changed_on: nil, meta: %{}, vsn: @vsn ] def reset(%__MODULE__{} = this, _context, options \\ %{}) do current_time = options[:current_time] || DateTime.utc_now() # @TODO flag service status entries as unknown/pending to force status updates. %__MODULE__{this| status: :warmup, state: :init, pending_state: :online, state_changed_on: current_time, pending_state_changed_on: current_time } end use Noizu.Scaffolding.V2.EntityBehaviour, sref_module: "service-state", entity_table: Noizu.SimplePool.V2.Database.Cluster.Service.StateTable defimpl Noizu.ERP, for: Noizu.SimplePool.V2.ClusterManagement.Cluster.Service.StateEntity do defdelegate id(o), to: Noizu.Scaffolding.V2.ERPResolver defdelegate ref(o), to: Noizu.Scaffolding.V2.ERPResolver defdelegate sref(o), to: Noizu.Scaffolding.V2.ERPResolver defdelegate entity(o, options \\ nil), to: Noizu.Scaffolding.V2.ERPResolver defdelegate entity!(o, options \\ nil), to: Noizu.Scaffolding.V2.ERPResolver defdelegate record(o, options \\ nil), to: Noizu.Scaffolding.V2.ERPResolver defdelegate record!(o, options \\ nil), to: Noizu.Scaffolding.V2.ERPResolver def id_ok(o) do r = id(o) r && {:ok, r} || {:error, o} end def ref_ok(o) do r = ref(o) r && {:ok, r} || {:error, o} end def sref_ok(o) do r = sref(o) r && {:ok, r} || {:error, o} end def entity_ok(o, options \\ %{}) do r = entity(o, options) r && {:ok, r} || {:error, o} end def entity_ok!(o, options \\ %{}) do r = entity!(o, options) r && {:ok, r} || {:error, o} end end end
35.311828
103
0.573691
f799f102e7678a4c3026b29993e582772d493bfb
1,413
ex
Elixir
test/support/data_case.ex
DylanGuedes/batch_processor
2f3122a5f1a31557a39fac67aa62e297c39e8bf5
[ "Apache-2.0" ]
null
null
null
test/support/data_case.ex
DylanGuedes/batch_processor
2f3122a5f1a31557a39fac67aa62e297c39e8bf5
[ "Apache-2.0" ]
3
2018-08-17T13:42:45.000Z
2018-08-17T17:05:11.000Z
test/support/data_case.ex
DylanGuedes/batch_processor
2f3122a5f1a31557a39fac67aa62e297c39e8bf5
[ "Apache-2.0" ]
null
null
null
defmodule DataProcessor.DataCase do @moduledoc """ This module defines the setup for tests requiring access to the application's data layer. You may define functions here to be used as helpers in your tests. Finally, if the test case interacts with the database, it cannot be async. For this reason, every test runs inside a transaction which is reset at the beginning of the test unless the test case is marked as async. """ use ExUnit.CaseTemplate using do quote do alias DataProcessor.Repo import Ecto import Ecto.Changeset import Ecto.Query import DataProcessor.DataCase end end setup tags do :ok = Ecto.Adapters.SQL.Sandbox.checkout(DataProcessor.Repo) unless tags[:async] do Ecto.Adapters.SQL.Sandbox.mode(DataProcessor.Repo, {:shared, self()}) end :ok end @doc """ A helper that transform changeset errors to a map of messages. assert {:error, changeset} = Accounts.create_user(%{password: "short"}) assert "password is too short" in errors_on(changeset).password assert %{password: ["password is too short"]} = errors_on(changeset) """ def errors_on(changeset) do Ecto.Changeset.traverse_errors(changeset, fn {message, opts} -> Enum.reduce(opts, message, fn {key, value}, acc -> String.replace(acc, "%{#{key}}", to_string(value)) end) end) end end
26.166667
77
0.686483
f79a04bf40aa644dae5c65286e7ccecebc997298
2,361
ex
Elixir
lib/ecto/adapter/migration.ex
chulkilee/ecto_sql
ba4627f1d1ad7af849eaf53041b27d7c45a8ccd2
[ "Apache-2.0" ]
null
null
null
lib/ecto/adapter/migration.ex
chulkilee/ecto_sql
ba4627f1d1ad7af849eaf53041b27d7c45a8ccd2
[ "Apache-2.0" ]
null
null
null
lib/ecto/adapter/migration.ex
chulkilee/ecto_sql
ba4627f1d1ad7af849eaf53041b27d7c45a8ccd2
[ "Apache-2.0" ]
null
null
null
defmodule Ecto.Adapter.Migration do @moduledoc """ Specifies the adapter migrations API. """ alias Ecto.Migration.Table alias Ecto.Migration.Index alias Ecto.Migration.Reference @type adapter_meta :: Ecto.Adapter.adapter_meta() @typedoc "All migration commands" @type command :: raw :: String.t() | {:create, Table.t(), [table_subcommand]} | {:create_if_not_exists, Table.t(), [table_subcommand]} | {:alter, Table.t(), [table_subcommand]} | {:drop, Table.t()} | {:drop_if_exists, Table.t()} | {:create, Index.t()} | {:create_if_not_exists, Index.t()} | {:drop, Index.t()} | {:drop_if_exists, Index.t()} @typedoc "All commands allowed within the block passed to `table/2`" @type table_subcommand :: {:add, field :: atom, type :: Ecto.Type.t() | Reference.t(), Keyword.t()} | {:modify, field :: atom, type :: Ecto.Type.t() | Reference.t(), Keyword.t()} | {:remove, field :: atom, type :: Ecto.Type.t() | Reference.t(), Keyword.t()} | {:remove, field :: atom} @typedoc """ A struct that represents a table or index in a database schema. These database objects can be modified through the use of a Data Definition Language, hence the name DDL object. """ @type ddl_object :: Table.t() | Index.t() @doc """ Checks if the adapter supports ddl transaction. """ @callback supports_ddl_transaction? :: boolean @doc """ Executes migration commands. ## Options * `:timeout` - The time in milliseconds to wait for the query call to finish, `:infinity` will wait indefinitely (default: 15000); * `:pool_timeout` - The time in milliseconds to wait for calls to the pool to finish, `:infinity` will wait indefinitely (default: 5000); """ @callback execute_ddl(adapter_meta, command, options :: Keyword.t()) :: {:ok, [{Logger.level, Logger.message, Logger.metadata}]} @doc """ Locks the migrations table and emits the locked versions for callback execution. It returns the result of calling the given function with a list of versions. """ @callback lock_for_migrations(adapter_meta, Ecto.Query.t(), options :: Keyword.t(), fun) :: result when fun: (Ecto.Query.t() -> result), result: var end
34.217391
93
0.625582
f79a0d5dffae26da9cb74119904ed79f12126a07
2,330
ex
Elixir
example_avialia/lib/example_avialia/cargos/shipment.ex
zetaron/Domo
2159163378f1ad8dea5cbc31dea2ed827c9024ab
[ "MIT" ]
null
null
null
example_avialia/lib/example_avialia/cargos/shipment.ex
zetaron/Domo
2159163378f1ad8dea5cbc31dea2ed827c9024ab
[ "MIT" ]
null
null
null
example_avialia/lib/example_avialia/cargos/shipment.ex
zetaron/Domo
2159163378f1ad8dea5cbc31dea2ed827c9024ab
[ "MIT" ]
null
null
null
defmodule ExampleAvialia.Cargos.Shipment do use Ecto.Schema use Domo, skip_defaults: true import Ecto.Changeset import Domo.Changeset alias Ecto.Changeset alias ExampleAvialia.Cargos.ShipmentKind alias ExampleAvialia.Cargos.ShipmentWeight alias ExampleAvialia.Cargos.ShipmentDocument alias ExampleAvialia.SharedKernel schema "shipments" do field :flight, :string field :kind, ShipmentKind field :weight, ShipmentWeight field :documents_count, :integer has_many :documents, ShipmentDocument, on_delete: :delete_all timestamps() end @type t :: %__MODULE__{ flight: SharedKernel.flight_number(), kind: ShipmentKind.value(), weight: ShipmentWeight.value(), documents_count: non_neg_integer(), documents: [ShipmentDocument.t()] } precond t: &validate_shipment/1 defp validate_shipment(shipment) do cond do shipment.documents_count != (real_count = Enum.count(shipment.documents)) -> {:error, "Shipment #{shipment.id} expected to have #{shipment.documents_count} associated documents and has #{real_count}."} match?({:commercial_cargo, _}, shipment.kind) and not match?({:units, _}, shipment.weight) -> {:error, "Commercial shipment must be measured in package units (bags, boxes etc.)"} match?({:passenger_baggage, _}, shipment.kind) and not match?({:kilograms, _}, shipment.weight) -> {:error, "Baggage shipment must be measured in kilograms"} true -> :ok end end def changeset(shipment_or_changeset, attrs) do attribute_fields = typed_fields() |> List.delete(:documents) shipment_or_changeset |> cast(attrs, attribute_fields) |> validate_required(required_fields()) |> cast_assoc(:documents) |> maybe_lift_first_error(:documents) |> validate_type(fields: attribute_fields, maybe_filter_precond_errors: true) end defp maybe_lift_first_error(changeset, key) do if invalid_doc_changeset = Enum.find(changeset.changes[key], &match?(%Changeset{valid?: false}, &1)) do doc_error = invalid_doc_changeset |> Map.get(:errors) |> Keyword.values() |> List.first() |> elem(0) Changeset.add_error(changeset, key, doc_error) else changeset end end end
31.066667
132
0.687983